<!DOCTYPE html>
<html>
<head><title>torch_jit_subgraph_0</title></head>
<body>
<div id="App"></div>
</body>
<script type="text/babel">
class App extends React.Component {
constructor(props) {
super(props)
this.state = {
jsonObject: {"BPU OPs per frame (effective)":9253383082,"BPU OPs per run (effective)":55520298496,"BPU PE number":1,"BPU core number":1,"BPU march":"BAYES","DDR bytes per frame":25869370,"DDR bytes per run":155216224,"DDR bytes per second":2008716564,"DDR megabytes per frame":24.671,"DDR megabytes per run":148.026,"DDR megabytes per second":1915.7,"FPS":77.65,"HBDK version":"3.48.6","compiling options":"-f hbir -m /tmp/tmpuzvxnagp/torch_jit_subgraph_0.hbir -o /tmp/tmpuzvxnagp/torch_jit_subgraph_0.hbm --march bayes --progressbar --O3 --cache /open_explorer/efficient/model_output/cache.json --debug --core-num 1 --fast --input-layout NHWC --output-layout NHWC --input-source pyramid,ddr","frame per run":6,"frame per second":77.65,"input features":[["input name","input size"],["onnx::Pad_0","6x256x704x3"],["points_calibrated_quantized","4x100x100x2"]],"interval computing unit utilization":[0.514,0.66,0.62,0.764,0.922,0.996,0.996,0.991,0.996,0.996,0.862,0.064,0.576,0.548,0.462,0.986,0.986,0.982,0.987,0.986,0.985,0.985,0.983,0.986,0.986,0.987,0.984,0.982,0.988,0.986,0.986,0.984,0.983,0.987,0.135,0.017,0.017,0.773,0.506],"interval computing units utilization":[0.514,0.66,0.62,0.764,0.922,0.996,0.996,0.991,0.996,0.996,0.862,0.064,0.576,0.548,0.462,0.986,0.986,0.982,0.987,0.986,0.985,0.985,0.983,0.986,0.986,0.987,0.984,0.982,0.988,0.986,0.986,0.984,0.983,0.987,0.135,0.017,0.017,0.773,0.506],"interval loading bandwidth (megabytes/s)":[2450,3684,5434,5152,3377,1199,0,39,39,0,34,380,1578,3426,4703,2578,144,151,144,144,151,150,144,145,150,144,145,150,144,145,150,144,145,150,307,495,525,1006,1265],"interval number":39,"interval storing bandwidth (megabytes/s)":[4832,5424,4728,2722,1251,295,94,94,94,95,90,61,1979,4084,4381,2298,81,80,81,81,81,81,80,81,81,80,81,81,81,81,81,81,81,80,47,7,0,794,1379],"interval time (ms)":2.0,"latency (ms)":77.27,"latency (ms) by segments":[77.271],"latency (us)":77271.3,"layer details":[["layer","ops","original output shape","aligned output shape","computing cost (no DDR)","load/store cost","active period of time"],["HZ_PREPROCESS_FOR_onnx::Pad_0","19,464,192","[6,256,704,3]","[6,256,704,8]","61 us (0% of model)","238 us (0.3% of model)","281 ~ 1411 us (1130)"],["/backbone/layers.0/conv/Conv","467,140,608","[6,128,352,32]","[6,128,352,32]","38 us (0% of model)","1 us (0% of model)","304 ~ 1414 us (1110)"],["/backbone/layers.0/activate/Mul","0","[6,128,352,32]","[6,128,352,32]","118 us (0.1% of model)","2 us (0% of model)","305 ~ 1463 us (1158)"],["/backbone/layers.1/layers.1.0/depthwise_conv/conv/Conv","155,713,536","[6,128,352,32]","[6,128,352,32]","42 us (0% of model)","2 us (0% of model)","311 ~ 1479 us (1168)"],["/backbone/layers.1/layers.1.0/depthwise_conv/activate/Mul","0","[6,128,352,32]","[6,128,352,32]","118 us (0.1% of model)","926 us (1.1% of model)","314 ~ 1522 us (1208)"],["/backbone/layers.1/layers.1.0/se/global_avgpool/GlobalAveragePool","0","[6,1,1,32]","[6,1,1,256]","61 us (0% of model)","0","337 ~ 1524 us (1187)"],["/backbone/layers.1/layers.1.0/se/conv1/conv/Conv","3,072","[6,1,1,8]","[6,2,16,32]","<1 us","1 us (0% of model)","1358 ~ 1524 us (166)"],["/backbone/layers.1/layers.1.0/se/conv1/activate/Mul","0","[6,1,1,8]","[6,2,16,8]","<1 us","1 us (0% of model)","1414 ~ 1525 us (111)"],["/backbone/layers.1/layers.1.0/se/conv2/conv/Conv","3,072","[6,1,1,32]","[6,2,16,32]","<1 us","1 us (0% of model)","1359 ~ 1526 us (167)"],["/backbone/layers.1/layers.1.0/se/conv2/activate/Sigmoid","0","[6,1,1,32]","[6,2,16,32]","<1 us","1 us (0% of model)","1415 ~ 1526 us (111)"],["/backbone/layers.1/layers.1.0/se/Mul","0","[6,128,352,32]","[6,128,352,32]","260 us (0.3% of model)","926 us (1.1% of model)","1526 ~ 2791 us (1265)"],["/backbone/layers.1/layers.1.0/linear_conv/conv/Conv","276,824,064","[6,128,352,16]","[6,128,352,32]","33 us (0% of model)","1 us (0% of model)","1463 ~ 2792 us (1329)"],["/backbone/layers.2/layers.2.0/expand_conv/conv/Conv","830,472,192","[6,128,352,96]","[6,128,352,96]","89 us (0.1% of model)","1 us (0% of model)","915 ~ 2793 us (1878)"],["/backbone/layers.2/layers.2.0/expand_conv/activate/Mul","0","[6,128,352,96]","[6,128,352,96]","534 us (0.6% of model)","2 us (0% of model)","920 ~ 2800 us (1880)"],["/backbone/layers.2/layers.2.0/depthwise_conv/conv/Conv","116,785,152","[6,64,176,96]","[6,64,192,96]","179 us (0.2% of model)","1 us (0% of model)","948 ~ 2822 us (1874)"],["/backbone/layers.2/layers.2.0/depthwise_conv/activate/Mul","0","[6,64,176,96]","[6,64,176,96]","100 us (0.1% of model)","716 us (0.9% of model)","938 ~ 2830 us (1892)"],["/backbone/layers.2/layers.2.0/se/global_avgpool/GlobalAveragePool","0","[6,1,1,96]","[6,1,1,256]","58 us (0% of model)","0","1581 ~ 2834 us (1253)"],["/backbone/layers.2/layers.2.0/se/conv1/conv/Conv","4,608","[6,1,1,4]","[6,2,16,32]","<1 us","1 us (0% of model)","1984 ~ 2835 us (851)"],["/backbone/layers.2/layers.2.0/se/conv1/activate/Mul","0","[6,1,1,4]","[6,2,16,8]","<1 us","1 us (0% of model)","2791 ~ 2836 us (45)"],["/backbone/layers.2/layers.2.0/se/conv2/conv/Conv","4,608","[6,1,1,96]","[6,2,16,96]","<1 us","1 us (0% of model)","2791 ~ 2836 us (45)"],["/backbone/layers.2/layers.2.0/se/conv2/activate/Sigmoid","0","[6,1,1,96]","[6,2,16,96]","<1 us","1 us (0% of model)","2792 ~ 2837 us (45)"],["/backbone/layers.2/layers.2.0/se/Mul","0","[6,64,176,96]","[6,64,176,96]","179 us (0.2% of model)","717 us (0.9% of model)","2830 ~ 4044 us (1214)"],["/backbone/layers.2/layers.2.0/linear_conv/conv/Conv","311,427,072","[6,64,176,24]","[6,64,176,32]","24 us (0% of model)","180 us (0.2% of model)","2794 ~ 4091 us (1297)"],["/backbone/layers.2/layers.2.1/expand_conv/conv/Conv","467,140,608","[6,64,176,144]","[6,64,176,160]","38 us (0% of model)","1 us (0% of model)","2796 ~ 4069 us (1273)"],["/backbone/layers.2/layers.2.1/expand_conv/activate/Mul","0","[6,64,176,144]","[6,64,176,144]","131 us (0.1% of model)","2 us (0% of model)","2800 ~ 4072 us (1272)"],["/backbone/layers.2/layers.2.1/depthwise_conv/conv/Conv","175,177,728","[6,64,176,144]","[6,64,176,144]","47 us (0% of model)","1 us (0% of model)","2802 ~ 4092 us (1290)"],["/backbone/layers.2/layers.2.1/depthwise_conv/activate/Mul","0","[6,64,176,144]","[6,64,176,144]","131 us (0.1% of model)","990 us (1.2% of model)","2822 ~ 4136 us (1314)"],["/backbone/layers.2/layers.2.1/se/global_avgpool/GlobalAveragePool","0","[6,1,1,144]","[6,1,1,256]","68 us (0% of model)","0","2901 ~ 4138 us (1237)"],["/backbone/layers.2/layers.2.1/se/conv1/conv/Conv","10,368","[6,1,1,6]","[6,2,16,32]","<1 us","1 us (0% of model)","2834 ~ 4139 us (1305)"],["/backbone/layers.2/layers.2.1/se/conv1/activate/Mul","0","[6,1,1,6]","[6,2,16,8]","<1 us","1 us (0% of model)","4045 ~ 4140 us (95)"],["/backbone/layers.2/layers.2.1/se/conv2/conv/Conv","10,368","[6,1,1,144]","[6,2,16,160]","<1 us","1 us (0% of model)","4025 ~ 4141 us (116)"],["/backbone/layers.2/layers.2.1/se/conv2/activate/Sigmoid","0","[6,1,1,144]","[6,2,16,144]","<1 us","1 us (0% of model)","4045 ~ 4142 us (97)"],["/backbone/layers.2/layers.2.1/se/Mul","0","[6,64,176,144]","[6,64,176,144]","275 us (0.3% of model)","991 us (1.2% of model)","4140 ~ 5463 us (1323)"],["/backbone/layers.2/layers.2.1/linear_conv/conv/Conv","467,140,608","[6,64,176,24]","[6,64,176,32]","34 us (0% of model)","180 us (0.2% of model)","4068 ~ 5465 us (1397)"],["/backbone/layers.3/layers.3.0/expand_conv/conv/Conv","467,140,608","[6,64,176,144]","[6,64,176,160]","38 us (0% of model)","1 us (0% of model)","4070 ~ 5466 us (1396)"],["/backbone/layers.3/layers.3.0/expand_conv/activate/Mul","0","[6,64,176,144]","[6,64,176,144]","210 us (0.2% of model)","2 us (0% of model)","4072 ~ 5471 us (1399)"],["/backbone/layers.3/layers.3.0/depthwise_conv/conv/Conv","121,65
efficient-fastbev 27.8FPS
需积分: 0 40 浏览量
更新于2024-02-21
收藏 42.57MB ZIP 举报
"efficient-fastbev 27.8FPS" 这个标题和描述可能指的是一个计算机视觉领域的项目或软件,特别关注于提升Bird's Eye View (BEV)图像处理的速度。在自动驾驶、交通监控和智能城市等领域,BEV图像是一种常用的技术,它能将3D空间的数据转换成俯视图,便于理解和分析车辆周围环境。这里的"27.8FPS"意味着该系统能够在每秒处理27.8帧的BEV图像,这是一个关于实时性能的关键指标。
"efficient"和"fastbev"标签进一步强调了这个项目的核心特性:效率和快速处理BEV图像。这通常涉及到高效的算法设计,可能是通过优化计算过程、减少不必要的计算或利用特定硬件加速来实现。高效可能意味着该系统在保持高性能的同时,对计算资源的需求较低,这对于资源有限的设备(如车载计算平台)尤其重要。
在压缩包子文件的文件名称列表中只给出了"efficient",这可能是一个项目的文件夹或者代码库的一部分,包含了实现高效BEV转换的源代码、数据集、配置文件等资源。为了深入了解这个项目,我们需要查看这些文件内容,包括但不限于:
1. **源代码**:可能包含C++、Python或其他编程语言编写的算法实现,用于转换3D数据到BEV图像。
2. **模型定义**:可能有机器学习模型的配置文件,比如TensorFlow或PyTorch的模型结构和权重。
3. **数据集**:训练和测试模型所需的BEV图像和对应的3D点云数据。
4. **README或文档**:提供项目介绍、安装指南、使用方法和性能评估的详细信息。
5. **配置文件**:可能包含运行时参数设置,影响算法的性能和效果。
6. **测试脚本**:用于验证算法性能和功能的自动化测试。
7. **性能基准**:可能包含用于评估系统性能的基准测试代码,例如用于计算FPS的代码。
要深入理解并使用"efficient-fastbev",我们需要下载这些文件,遵循文档指导安装依赖,然后运行代码来体验其性能。同时,对于想进一步改进或定制的开发者,理解源码中的关键算法和数据结构是至关重要的。此外,探索其如何利用硬件加速(如GPU)也是提高效率的关键。
总结来说,"efficient-fastbev"是一个专注于高效BEV图像处理的系统,它的目标是提供超过27.8FPS的处理速度,适用于需要实时处理大量3D数据的场景。其背后的算法和实现策略对于理解计算机视觉、自动驾驶技术以及优化性能的方法具有很高的研究价值。
weixin_52019990
- 粉丝: 3
- 资源: 167
最新资源
- 只用酒精就能搞好鼠标失灵.mp4
- Python3实现sftp服务器
- 22寸panle金手指贴胶带sw18可编辑全套技术资料100%好用.zip
- SAP售前工作方法与技巧交流:构建成功的销售基础
- 2024年度制造业数字化转型典型案例集-中国信通院.pdf
- 彼得森经济研究所-美国是否正在经历一场将提振中产阶级的制造业复兴.pdf
- 单片机多功能调试助手,PortHelper V.1.8.5 单片机+串口调试+USB调试+网络调试+编码转换+LCD取模+OLED取模
- 中型网络综合实训(1).zip
- SAP系统CO管理会计培训PPT
- SAP FM配置管理与恢复技术:解决配置变更导致的0L账户消失问题
- postgresql-17.2-3-windows-x64-binaries.zip
- SAP财务关账驾驶舱:模板定制与任务执行练习(企业级财务管理工具)
- xyplayer智能解析源码,影视解析接口,xyplayer影视解析源码 带xyplayer使用配置教程
- SAP FICO财务模块中的标准成本估算流程与应用场景解析
- 多元统计分析多元统计分析重点内容
- SAP FICO新总账凭证分割技术详解及其应用场景