{“状态”:“确定”,“消息类型”:“工作”,“信息版本”:“1.0.0”,“邮件”:{“索引”:{-“日期部分”:[[2024,9,6]],“日期时间”:“2024-09-06T22:28:50Z”,“时间戳”:1725661730487},“参考计数”:43,“发布者”:“IEEE”,“许可证”:[{“开始”:{-date-parts”:[2021,10,1]],”日期时间“:“2021-10-01T00:00:00 Z“,”timestamp“:163304640000},”content-version“:”vor“,“delay-in-days”:0,“URL”:“https:\/\/ieeexplore.iee.org\/Xplorehelp\/downloads\/license-information\/ieee.html”},{“start”:{“date-parts”:[[2021,10,1]],“date-time”:“2021-10-01T00:00:00Z”,“timestamp”:163304640000},“content-version”:“stm-asf”,“delay-in-days“:0,”URL“https:\\/doi.org\/10.15223 \/policy-029“},{“开始”:{“日期部分”:[[2021,10,1]],“date-time”:“2021-10-01T00:00:00Z”,“timestamp”:1633046400000},“content-version”:“stm-asf”,“delay-in-days”:0,“URL”:“https:\/\/doi.org\/10.15223\/policy-037”}],“funder”:[{“doi”:“10.13039\/501100001602”,“name”:“爱尔兰科学基金会”,“doi-asserted-by”:“publisher”,“id”:[}“id”501100001602“,”id-type“:”doi“,”asserted-by“:”publisher“}]},{“DOI”:“10.13039\/501100001602”,“name”:“爱尔兰科学基金会”,“DOI-asserted-by”:“publisher”,“id”:[{“id”:“10.13029\/5011000001602”,”id-type“:”DOI“,”asserted-by“:”publisher“}]}],“content-domain”:{“domain”:[],“crossmark-restriction”:false},“short-container-title”:[],”published-print“:{”date-parts“:[2021,10]]},“DOI”:“10.1109\/swc50871.2021.00024”,“type”:“proceedings article”,“created”:{“date parts”:[[2021,11,18]],“date time”:“2021-11-18T22:20:05Z”,“timestamp”:1637274005000},“page”:“107-114”,“source”:“Crossref”,“由count引用”:2,“title”:[“Globe2Train:使用全球物联网设备进行分布式ML模型训练的框架”],“prefix”:“10.1109”,“author”:[{“given”:“Bharath”,“family”:“Sudharsan”,“sequence”:“first”,“affiliation”:[]},{“given”:“John G.”,”family“:”Breslin“,”sequence“:”additional“,”affiliance“:[]neneneep,{”given“:”Muhammad“,”faily“:“Intizar Ali”,《sequence》:“additional”,“faliation”:[]}],“member”:“263”,“reference”:[{“doi-asserted-by”:”publisher“,“key”:”ref39“,“doi”:“10.1109\/TAC.1986.1104412”},{“年份”:“0”,“author”:“waleed”,“article title”:“Dataset”,“key”:“ref38”},{“year”:“2017”,“author”:“kumar”,“journal title”:“第34届国际机器学习大会(ICML)”,“article title”:“物联网2 KB RAM中的资源高效机器学习”,“key”:“ref33”},{“doi asserted by”:“publisher”,“key”:“ref32”,“doi”:“10.1109\/GLOCOM.2016.7842181”},{“年份”:“2020”,“author”:“keshavarz”,“article-title”:“Sefr:超低功耗设备的快速线性时间分类器”,“key”:“ref31”}:“ref37”,“DOI”:“10.1109\/SWC50871.2021.00023”},{“DOI-asserted-by”:“publisher”,“key”:“ref36”,“DOI”:“101109\/IJCNN52387.2021.9533927”}007\/978-3-030-86517-7_2“},{“DOI-asserted-by”:“publisher”,“key”:“ref10”,“DOI”:“10.1016\/j.ijrobp.2017.04.021”},{“年份”:“2011”,“作者”:“recht”,“日志标题”:“神经信息处理系统(NIPS)”,“文章标题”:”Hogwild:并行化随机梯度下降的无锁方法“,”key“:”ref40“},“doi-asserted-by”:“publisher”,“key”:“ref11”,“doi”:“10.1145\/3450268.3453513”}、{“年”:“2018”,“author”:“代”,“article-title”:“理解分布式机器学习中陈旧性的影响”,“key”:“ref12”},{“year”:“2018”,“author”:“tang”,“article-title:“D2:分散数据的分散培训”,“key”:”ref13“},}“year:”2018“,”author“:”lian“,”journal-title“:”国际机器学习会议(ICML)“,”article-title“:“Adpsgd异步分散并行随机梯度下降”,“key”:“ref14”},{“year”:“2017”,“author”:“wen”,“journal-title”:“Neural Information Processing Systems(NIPS)”,“article-title(文章标题)”:“Terngrad:减少分布式深度学习中的交流的三元梯度”,“key”::“10.1109\/MIC.2021.3053711”},{“年份”:“2014”,“作者”:“seide”,“新闻标题”:“语际”,“文章标题”:”1位随机梯度下降及其在语音dns数据并行分布式训练中的应用“,”key“:”ref17“},”{“年”:“2016”,“author”:“alistarh”,“论文标题”:“Qsgd:通信的随机化量化-最优随机梯度下降”,“key”:“ref18”},{“year”:“2016”,“author”:“zhou”,“article-title”:“Dorefa-net:训练低比特宽梯度的低比特宽卷积神经网络”,“key”::“Mnasnet:Platform-aware neural architecture searchitecture for mobile”,“key”:“ref28”},{“doi-asserted-by”:“publisher”,“key”:”ref4“,“doi”:“10.1109”\/ACCESC.2021.3063291“},}“year”:“2017”,“author”:“huang”,”article-title“:”Condensenet:使用学习的组卷积的高效密集网”,“key”:“ref27”}、{“year:”2017“,”author:“lin”、“article-title”:“深度梯度压缩:减少分布式训练的通信带宽”,“key”:“ref3”},{“doi-asserted-by”:“publisher”,“key”:”ref6“,”doi“:”10.1145\/3410992.3411005“},}“year”:“2020”,“author”:“briggs”,“article-title”:“private iot分析的隐私保护联邦学习综述”,“key”:“ref29”}、{“doi-asserted-by”:“publisher”,“key”:“ref5”,“DOI”:“10.1109\/INFOCOM.2018.8486403”},{“year”:“2021”,“article-title”:“授权您的团队快速、轻松、经济高效地将人工智能集成到您的项目中”,“key”:”ref8“},“year“:”2021:“15分钟内的培训动力学:视频上的大规模分布式培训”,“key”:“ref2”},{“doi-asserted-by”:“publisher”,“key”:”ref9“,“doi”:“10.1016\/j.radonc.2016.10.002“},”年份“:“2019”,“author”:“zhu”,“article-title”:“distributed Training ace the world”,“key”:“ref1”}:“10.21437\/Interspeech.2015-354”},{“doi-asserted-by”:“publisher”,“key”:“ref22”,“doi”:“10.1109\/IJCNN.2019.8852172”}、{“year”:“2018”,“author”:“chen”,“journal-title”:“AAAI Conference”,《article-title》:“Adacomp:数据并行分布式训练的自适应剩余梯度压缩”,“key”:《ref21》},}“year:”2012“,”author“:”dean“journal-title“”:“Neural Information Processing Systems(NIPS)”,“article-title”:“大规模分布式深层网络”,“key”:“ref42”},{“doi-asserted-by”:“publisher”,“key”:“ref24”,“doi”:“10.1109\/JIOT.2021.3098166”}:“驯服野性:霍格沃德式算法的统一分析”,“key”:“ref41”},{“doi-asserted-by”:“publisher”,“key”:”ref23“,“doi”:“10.1109”,ALLERTON.2016.7852343(NIPS)“,“article-title”:“非凸优化的异步并行随机梯度”,“key”:“ref43”},{“doi-asserted-by”:“publisher”,“密钥”:“ref25”,“doi”:“10.1109\/WF-IoT51360.2021.9595024”}],“event”:{“name”:“2021 IEEE SmartWorld,无处不在的智能与计算,高级与可信计算,可扩展计算与通信,人联网和智能城市创新(SmartWorld\/SCALCOM\/UIC\/ATC\/IOP\/SCI)”,“开始”:{“日期部分”:[[2021,10,18]]},“位置”:“美国佐治亚州亚特兰大”,“结束”:{“日期部分”:[[2021,10,21]]},“容器标题”:[“2021 IEEE SmartWorld,无处不在的智能与计算,高级与可信计算,可扩展计算与通信,互联网和智能城市创新(SmartWorld\/SCALCOM\/UIC\/ATC\/IOP\/SCI)”],“原始标题”:[],“链接”:[{“URL”:“http://\/xplorestaging.IEEE.org\/ielx7\/9604213\/9604467.pdf?arnumber=9604467”,“content-type”:“unspecified”,“content-version”:“vor”,“intended-application”:“similarity-checking”}],“deposed”:{“date-parts”:[[2022,5,10]],“date-time”:”2022-05-10T16:51:17Z“,“timestamp”:1652201477000},“score”:1,“resource”:{primary“:{”URL“https:\/\ieeeexplore.iee.org\/document\/9604467\/“}”,“subtittle”:[],“shorttitle”:[],“issued”:{“date-parts”:[[2021,10]]},“references-count”:43,“URL”:“http://\/dx.doi.org\/10.1109\/swc50871.2021.00024”,“relation”:{},”subject“:[],“published”:{“date-parts”:[2021,10]}}}