aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.gitmodules2
-rw-r--r--README.md2
-rw-r--r--firmware/fx2/b100/CMakeLists.txt6
-rw-r--r--firmware/fx2/usrp1/CMakeLists.txt6
-rw-r--r--firmware/fx3/b200/b200_main.c10
-rw-r--r--firmware/fx3/b200/b200_main.h2
-rw-r--r--firmware/usrp3/lib/CMakeLists.txt1
-rw-r--r--firmware/usrp3/n230/CMakeLists.txt7
-rw-r--r--firmware/usrp3/n230/n230_eth_handlers.c2
-rw-r--r--firmware/usrp3/n230/n230_fw_comm_protocol.c (renamed from firmware/usrp3/lib/fw_comm_protocol.c)2
-rwxr-xr-xfirmware/usrp3/x300/x300_aurora_bist.py40
m---------fpga-src0
-rw-r--r--host/CMakeLists.txt27
-rw-r--r--host/cmake/Modules/CodeCoverage.cmake197
-rw-r--r--host/cmake/Modules/UHDConfigVersion.cmake.in12
-rw-r--r--host/cmake/Modules/UHDGlobalDefs.cmake4
-rw-r--r--host/cmake/Modules/UHDLog.cmake72
-rw-r--r--host/cmake/Modules/UHDPackage.cmake9
-rw-r--r--host/cmake/Modules/UHDVersion.cmake32
-rw-r--r--host/docs/general.dox2
-rw-r--r--host/docs/logging.dox89
-rw-r--r--host/docs/uhd.dox1
-rw-r--r--host/docs/usrp_e3x0.dox1
-rw-r--r--host/examples/gpio.cpp2
-rw-r--r--host/examples/init_usrp/CMakeLists.txt19
-rw-r--r--host/examples/rx_samples_to_file.cpp26
-rw-r--r--host/examples/test_dboard_coercion.cpp12
-rw-r--r--host/examples/test_messages.cpp5
-rw-r--r--host/examples/tx_samples_from_file.cpp28
-rw-r--r--host/examples/tx_waveforms.cpp1
-rw-r--r--host/include/config.h.in8
-rw-r--r--host/include/uhd/CMakeLists.txt2
-rw-r--r--host/include/uhd/cal/CMakeLists.txt23
-rw-r--r--host/include/uhd/cal/container.hpp104
-rw-r--r--host/include/uhd/cal/power_container.hpp79
-rw-r--r--host/include/uhd/device.hpp12
-rw-r--r--host/include/uhd/device3.hpp8
-rw-r--r--host/include/uhd/device_deprecated.ipp201
-rw-r--r--host/include/uhd/image_loader.hpp1
-rw-r--r--host/include/uhd/rfnoc/block_ctrl_base.hpp5
-rw-r--r--host/include/uhd/rfnoc/node_ctrl_base.hpp2
-rw-r--r--host/include/uhd/rfnoc/node_ctrl_base.ipp2
-rw-r--r--host/include/uhd/rfnoc/radio_ctrl.hpp205
-rw-r--r--host/include/uhd/transport/nirio/nirio_fifo.ipp1
-rw-r--r--host/include/uhd/transport/nirio/nirio_quirks.h4
-rw-r--r--host/include/uhd/transport/nirio/rpc/rpc_client.hpp4
-rw-r--r--host/include/uhd/transport/udp_simple.hpp5
-rw-r--r--host/include/uhd/transport/udp_zero_copy.hpp2
-rw-r--r--host/include/uhd/types/filters.hpp4
-rw-r--r--host/include/uhd/types/ranges.hpp6
-rw-r--r--host/include/uhd/types/sid.hpp42
-rw-r--r--host/include/uhd/usrp/subdev_spec.hpp6
-rw-r--r--host/include/uhd/utils/CMakeLists.txt2
-rw-r--r--host/include/uhd/utils/assert_has.ipp2
-rw-r--r--host/include/uhd/utils/atomic.hpp137
-rw-r--r--host/include/uhd/utils/fp_compare_delta.ipp2
-rw-r--r--host/include/uhd/utils/fp_compare_epsilon.ipp2
-rw-r--r--host/include/uhd/utils/log.hpp290
-rw-r--r--host/include/uhd/utils/log_add.hpp46
-rw-r--r--host/include/uhd/utils/msg.hpp79
-rw-r--r--host/include/uhd/utils/safe_call.hpp2
-rw-r--r--host/include/uhd/version.hpp.in2
-rw-r--r--host/lib/CMakeLists.txt7
-rw-r--r--host/lib/cal/CMakeLists.txt32
-rw-r--r--host/lib/cal/interpolation.hpp71
-rw-r--r--host/lib/cal/interpolation.ipp199
-rw-r--r--host/lib/cal/power_container_impl.cpp80
-rw-r--r--host/lib/cal/power_container_impl.hpp77
-rw-r--r--host/lib/convert/convert_fc32_item32.cpp2
-rw-r--r--host/lib/convert/convert_impl.cpp19
-rw-r--r--host/lib/convert/convert_pack_sc12.cpp2
-rw-r--r--host/lib/convert/convert_unpack_sc12.cpp2
-rw-r--r--host/lib/device.cpp21
-rw-r--r--host/lib/device3.cpp8
-rw-r--r--host/lib/experts/expert_container.cpp21
-rw-r--r--host/lib/experts/expert_nodes.hpp9
-rw-r--r--host/lib/image_loader.cpp9
-rw-r--r--host/lib/property_tree.cpp11
-rw-r--r--host/lib/rfnoc/block_ctrl_base.cpp60
-rw-r--r--host/lib/rfnoc/block_ctrl_base_factory.cpp10
-rw-r--r--host/lib/rfnoc/blockdef_xml_impl.cpp41
-rw-r--r--host/lib/rfnoc/ctrl_iface.cpp116
-rw-r--r--host/lib/rfnoc/ctrl_iface.hpp12
-rw-r--r--host/lib/rfnoc/ddc_block_ctrl_impl.cpp8
-rw-r--r--host/lib/rfnoc/dma_fifo_block_ctrl_impl.cpp8
-rw-r--r--host/lib/rfnoc/duc_block_ctrl_impl.cpp8
-rw-r--r--host/lib/rfnoc/graph_impl.cpp8
-rw-r--r--host/lib/rfnoc/legacy_compat.cpp57
-rw-r--r--host/lib/rfnoc/nocscript/block_iface.cpp26
-rw-r--r--host/lib/rfnoc/nocscript/expression.cpp7
-rw-r--r--host/lib/rfnoc/nocscript/function_table.cpp1
-rw-r--r--host/lib/rfnoc/node_ctrl_base.cpp4
-rw-r--r--host/lib/rfnoc/radio_ctrl_impl.cpp130
-rw-r--r--host/lib/rfnoc/radio_ctrl_impl.hpp31
-rw-r--r--host/lib/rfnoc/rx_stream_terminator.cpp23
-rw-r--r--host/lib/rfnoc/sink_block_ctrl_base.cpp6
-rw-r--r--host/lib/rfnoc/sink_node_ctrl.cpp6
-rw-r--r--host/lib/rfnoc/source_block_ctrl_base.cpp16
-rw-r--r--host/lib/rfnoc/source_node_ctrl.cpp6
-rw-r--r--host/lib/rfnoc/tick_node_ctrl.cpp2
-rw-r--r--host/lib/rfnoc/tx_stream_terminator.cpp6
-rw-r--r--host/lib/rfnoc/tx_stream_terminator.hpp4
-rw-r--r--host/lib/rfnoc/utils.hpp4
-rw-r--r--host/lib/rfnoc/xports.hpp2
-rw-r--r--host/lib/transport/if_addrs.cpp4
-rw-r--r--host/lib/transport/libusb1_base.cpp19
-rw-r--r--host/lib/transport/libusb1_zero_copy.cpp9
-rw-r--r--host/lib/transport/nirio/rpc/rpc_client.cpp22
-rw-r--r--host/lib/transport/nirio_zero_copy.cpp8
-rw-r--r--host/lib/transport/super_recv_packet_handler.hpp33
-rw-r--r--host/lib/transport/super_send_packet_handler.hpp3
-rw-r--r--host/lib/transport/tcp_zero_copy.cpp4
-rw-r--r--host/lib/transport/udp_common.hpp12
-rw-r--r--host/lib/transport/udp_simple.cpp6
-rw-r--r--host/lib/transport/udp_wsa_zero_copy.cpp24
-rw-r--r--host/lib/transport/udp_zero_copy.cpp17
-rw-r--r--host/lib/transport/xport_benchmarker.hpp2
-rw-r--r--host/lib/transport/zero_copy_flow_ctrl.cpp7
-rw-r--r--host/lib/transport/zero_copy_recv_offload.cpp4
-rw-r--r--host/lib/types/byte_vector.cpp3
-rw-r--r--host/lib/types/device_addr.cpp21
-rw-r--r--host/lib/types/mac_addr.cpp5
-rw-r--r--host/lib/types/ranges.cpp23
-rw-r--r--host/lib/types/sid.cpp10
-rw-r--r--host/lib/uhd.rc.in4
-rw-r--r--host/lib/usrp/b100/b100_impl.cpp28
-rw-r--r--host/lib/usrp/b100/clock_ctrl.cpp23
-rw-r--r--host/lib/usrp/b100/codec_ctrl.cpp6
-rw-r--r--host/lib/usrp/b100/io_impl.cpp6
-rw-r--r--host/lib/usrp/b100/usb_zero_copy_wrapper.cpp3
-rw-r--r--host/lib/usrp/b200/b200_iface.cpp34
-rw-r--r--host/lib/usrp/b200/b200_image_loader.cpp5
-rw-r--r--host/lib/usrp/b200/b200_impl.cpp63
-rw-r--r--host/lib/usrp/b200/b200_io_impl.cpp18
-rw-r--r--host/lib/usrp/b200/b200_uart.cpp5
-rw-r--r--host/lib/usrp/common/CMakeLists.txt1
-rw-r--r--host/lib/usrp/common/ad9361_ctrl.cpp6
-rw-r--r--host/lib/usrp/common/ad9361_driver/ad9361_client.h4
-rw-r--r--host/lib/usrp/common/ad9361_driver/ad9361_device.cpp34
-rw-r--r--host/lib/usrp/common/ad936x_manager.cpp21
-rw-r--r--host/lib/usrp/common/ad936x_manager.hpp1
-rw-r--r--host/lib/usrp/common/adf4001_ctrl.cpp2
-rw-r--r--host/lib/usrp/common/adf435x.hpp17
-rw-r--r--host/lib/usrp/common/apply_corrections.cpp15
-rw-r--r--host/lib/usrp/common/async_packet_handler.hpp17
-rw-r--r--host/lib/usrp/common/constrained_device_args.hpp2
-rw-r--r--host/lib/usrp/common/fifo_ctrl_excelsior.cpp6
-rw-r--r--host/lib/usrp/common/fx2_ctrl.cpp14
-rw-r--r--host/lib/usrp/common/max287x.hpp17
-rw-r--r--host/lib/usrp/common/recv_packet_demuxer.cpp4
-rw-r--r--host/lib/usrp/common/recv_packet_demuxer_3000.hpp41
-rw-r--r--host/lib/usrp/common/validate_subdev_spec.cpp9
-rw-r--r--host/lib/usrp/cores/dma_fifo_core_3000.cpp2
-rw-r--r--host/lib/usrp/cores/i2c_core_100.cpp4
-rw-r--r--host/lib/usrp/cores/i2c_core_100_wb32.cpp4
-rw-r--r--host/lib/usrp/cores/i2c_core_200.cpp4
-rw-r--r--host/lib/usrp/cores/radio_ctrl_core_3000.cpp16
-rw-r--r--host/lib/usrp/cores/rx_dsp_core_200.cpp4
-rw-r--r--host/lib/usrp/cores/rx_dsp_core_3000.cpp6
-rw-r--r--host/lib/usrp/cores/rx_vita_core_3000.cpp4
-rw-r--r--host/lib/usrp/cores/spi_core_100.cpp4
-rw-r--r--host/lib/usrp/cores/spi_core_3000.cpp2
-rw-r--r--host/lib/usrp/cores/time_core_3000.cpp10
-rw-r--r--host/lib/usrp/cores/tx_dsp_core_200.cpp4
-rw-r--r--host/lib/usrp/cores/tx_dsp_core_3000.cpp4
-rw-r--r--host/lib/usrp/dboard/db_basic_and_lf.cpp2
-rw-r--r--host/lib/usrp/dboard/db_cbx.cpp6
-rw-r--r--host/lib/usrp/dboard/db_dbsrx.cpp70
-rw-r--r--host/lib/usrp/dboard/db_dbsrx2.cpp37
-rw-r--r--host/lib/usrp/dboard/db_rfx.cpp22
-rw-r--r--host/lib/usrp/dboard/db_sbx_common.cpp16
-rw-r--r--host/lib/usrp/dboard/db_sbx_common.hpp2
-rw-r--r--host/lib/usrp/dboard/db_sbx_version3.cpp6
-rw-r--r--host/lib/usrp/dboard/db_sbx_version4.cpp6
-rw-r--r--host/lib/usrp/dboard/db_tvrx.cpp28
-rw-r--r--host/lib/usrp/dboard/db_tvrx2.cpp128
-rw-r--r--host/lib/usrp/dboard/db_twinrx.cpp2
-rw-r--r--host/lib/usrp/dboard/db_ubx.cpp32
-rw-r--r--host/lib/usrp/dboard/db_unknown.cpp7
-rw-r--r--host/lib/usrp/dboard/db_wbx_common.cpp10
-rw-r--r--host/lib/usrp/dboard/db_wbx_version2.cpp12
-rw-r--r--host/lib/usrp/dboard/db_wbx_version3.cpp12
-rw-r--r--host/lib/usrp/dboard/db_wbx_version4.cpp12
-rw-r--r--host/lib/usrp/dboard/db_xcvr2450.cpp24
-rw-r--r--host/lib/usrp/dboard/twinrx/twinrx_ctrl.cpp6
-rw-r--r--host/lib/usrp/dboard/twinrx/twinrx_experts.cpp5
-rw-r--r--host/lib/usrp/dboard_eeprom.cpp8
-rw-r--r--host/lib/usrp/dboard_manager.cpp27
-rw-r--r--host/lib/usrp/device3/device3_impl.cpp32
-rw-r--r--host/lib/usrp/device3/device3_impl.hpp7
-rw-r--r--host/lib/usrp/device3/device3_io_impl.cpp146
-rw-r--r--host/lib/usrp/e100/clock_ctrl.cpp25
-rw-r--r--host/lib/usrp/e100/codec_ctrl.cpp6
-rw-r--r--host/lib/usrp/e100/e100_ctrl.cpp9
-rw-r--r--host/lib/usrp/e100/e100_impl.cpp38
-rw-r--r--host/lib/usrp/e100/e100_mmap_zero_copy.cpp46
-rw-r--r--host/lib/usrp/e100/fpga_downloader.cpp17
-rw-r--r--host/lib/usrp/e100/io_impl.cpp6
-rw-r--r--host/lib/usrp/e300/e300_common.cpp7
-rw-r--r--host/lib/usrp/e300/e300_common.hpp2
-rw-r--r--host/lib/usrp/e300/e300_defaults.hpp2
-rw-r--r--host/lib/usrp/e300/e300_fifo_config.cpp19
-rw-r--r--host/lib/usrp/e300/e300_impl.cpp80
-rw-r--r--host/lib/usrp/e300/e300_impl.hpp3
-rw-r--r--host/lib/usrp/e300/e300_io_impl.cpp17
-rw-r--r--host/lib/usrp/e300/e300_network.cpp56
-rw-r--r--host/lib/usrp/e300/e300_remote_codec_ctrl.cpp2
-rw-r--r--host/lib/usrp/e300/e300_sysfs_hooks.cpp2
-rw-r--r--host/lib/usrp/gps_ctrl.cpp48
-rw-r--r--host/lib/usrp/gpsd_iface.cpp2
-rw-r--r--host/lib/usrp/mboard_eeprom.cpp3
-rw-r--r--host/lib/usrp/multi_usrp.cpp108
-rw-r--r--host/lib/usrp/n230/CMakeLists.txt1
-rw-r--r--host/lib/usrp/n230/n230_clk_pps_ctrl.cpp9
-rw-r--r--host/lib/usrp/n230/n230_eeprom_manager.cpp2
-rw-r--r--host/lib/usrp/n230/n230_frontend_ctrl.cpp2
-rw-r--r--host/lib/usrp/n230/n230_fw_comm_protocol.h (renamed from host/lib/usrp/common/fw_comm_protocol.h)6
-rw-r--r--host/lib/usrp/n230/n230_fw_ctrl_iface.cpp (renamed from host/lib/usrp/common/usrp3_fw_ctrl_iface.cpp)41
-rw-r--r--host/lib/usrp/n230/n230_fw_ctrl_iface.hpp (renamed from host/lib/usrp/common/usrp3_fw_ctrl_iface.hpp)14
-rw-r--r--host/lib/usrp/n230/n230_impl.cpp36
-rw-r--r--host/lib/usrp/n230/n230_resource_manager.cpp28
-rw-r--r--host/lib/usrp/n230/n230_resource_manager.hpp8
-rw-r--r--host/lib/usrp/n230/n230_stream_manager.cpp3
-rw-r--r--host/lib/usrp/n230/n230_uart.cpp2
-rw-r--r--host/lib/usrp/subdev_spec.cpp15
-rw-r--r--host/lib/usrp/usrp1/codec_ctrl.cpp26
-rw-r--r--host/lib/usrp/usrp1/io_impl.cpp20
-rw-r--r--host/lib/usrp/usrp1/usrp1_calc_mux.hpp6
-rw-r--r--host/lib/usrp/usrp1/usrp1_iface.cpp20
-rw-r--r--host/lib/usrp/usrp1/usrp1_impl.cpp42
-rw-r--r--host/lib/usrp/usrp1/usrp1_impl.hpp3
-rw-r--r--host/lib/usrp/usrp2/codec_ctrl.cpp3
-rw-r--r--host/lib/usrp/usrp2/dboard_iface.cpp2
-rw-r--r--host/lib/usrp/usrp2/io_impl.cpp40
-rw-r--r--host/lib/usrp/usrp2/n200_image_loader.cpp4
-rw-r--r--host/lib/usrp/usrp2/usrp2_fifo_ctrl.cpp2
-rw-r--r--host/lib/usrp/usrp2/usrp2_iface.cpp7
-rw-r--r--host/lib/usrp/usrp2/usrp2_impl.cpp51
-rw-r--r--host/lib/usrp/usrp_c.cpp3
-rw-r--r--host/lib/usrp/x300/x300_adc_ctrl.cpp1
-rw-r--r--host/lib/usrp/x300/x300_adc_dac_utils.cpp24
-rw-r--r--host/lib/usrp/x300/x300_clock_ctrl.cpp5
-rw-r--r--host/lib/usrp/x300/x300_dac_ctrl.cpp5
-rw-r--r--host/lib/usrp/x300/x300_dboard_iface.cpp2
-rw-r--r--host/lib/usrp/x300/x300_fw_ctrl.cpp22
-rw-r--r--host/lib/usrp/x300/x300_fw_uart.cpp5
-rw-r--r--host/lib/usrp/x300/x300_image_loader.cpp2
-rw-r--r--host/lib/usrp/x300/x300_impl.cpp180
-rw-r--r--host/lib/usrp/x300/x300_impl.hpp9
-rw-r--r--host/lib/usrp/x300/x300_io_impl.cpp6
-rw-r--r--host/lib/usrp/x300/x300_mb_eeprom.cpp2
-rw-r--r--host/lib/usrp/x300/x300_radio_ctrl_impl.cpp330
-rw-r--r--host/lib/usrp/x300/x300_radio_ctrl_impl.hpp19
-rw-r--r--host/lib/usrp_clock/multi_usrp_clock.cpp5
-rw-r--r--host/lib/usrp_clock/octoclock/octoclock_eeprom.cpp1
-rw-r--r--host/lib/usrp_clock/octoclock/octoclock_image_loader.cpp2
-rw-r--r--host/lib/usrp_clock/octoclock/octoclock_impl.cpp44
-rw-r--r--host/lib/usrp_clock/usrp_clock_c.cpp3
-rw-r--r--host/lib/utils/CMakeLists.txt1
-rw-r--r--host/lib/utils/csv.cpp3
-rw-r--r--host/lib/utils/gain_group.cpp17
-rw-r--r--host/lib/utils/load_modules.cpp3
-rw-r--r--host/lib/utils/log.cpp511
-rw-r--r--host/lib/utils/msg.cpp130
-rw-r--r--host/lib/utils/paths.cpp9
-rw-r--r--host/lib/utils/tasks.cpp18
-rw-r--r--host/lib/utils/thread_priority.cpp6
-rw-r--r--host/lib/utils/thread_priority_c.cpp2
-rw-r--r--host/lib/version.cpp13
-rw-r--r--host/tests/CMakeLists.txt3
-rw-r--r--host/tests/addr_test.cpp1
-rw-r--r--host/tests/blockdef_test.cpp1
-rw-r--r--host/tests/cal_container_test.cpp200
-rw-r--r--host/tests/convert_test.cpp23
-rw-r--r--host/tests/device3_test.cpp1
-rw-r--r--host/tests/graph_search_test.cpp6
-rw-r--r--host/tests/log_test.cpp (renamed from host/tests/msg_test.cpp)33
-rw-r--r--host/tests/nocscript_expr_test.cpp1
-rw-r--r--host/tests/nocscript_ftable_test.cpp1
-rw-r--r--host/tests/nocscript_parser_test.cpp1
-rw-r--r--host/tests/ranges_test.cpp9
-rw-r--r--host/tests/sid_t_test.cpp3
-rw-r--r--host/tests/subdev_spec_test.cpp9
-rw-r--r--host/tests/time_spec_test.cpp1
-rw-r--r--host/utils/CMakeLists.txt2
-rw-r--r--host/utils/b100_eeprom.h15
-rw-r--r--host/utils/converter_benchmark.cpp6
-rw-r--r--host/utils/fx2_init_eeprom.cpp53
-rw-r--r--host/utils/latency/include/Responder.hpp5
-rw-r--r--host/utils/latency/lib/Responder.cpp62
-rw-r--r--host/utils/octoclock_firmware_burner.cpp381
-rw-r--r--host/utils/uhd_cal_rx_iq_balance.cpp3
-rw-r--r--host/utils/uhd_cal_tx_dc_offset.cpp3
-rw-r--r--host/utils/uhd_cal_tx_iq_balance.cpp2
-rw-r--r--host/utils/uhd_find_devices.cpp50
-rw-r--r--host/utils/uhd_usrp_probe.cpp95
-rw-r--r--host/utils/usrp1_eeprom.h15
-rw-r--r--host/utils/usrp_cal_utils.hpp5
-rw-r--r--host/utils/usrp_e3x0_network_mode.cpp5
-rwxr-xr-xhost/utils/usrp_n2xx_net_burner.py486
-rwxr-xr-xhost/utils/usrp_n2xx_net_burner_gui.py248
-rw-r--r--host/utils/usrp_n2xx_simple_net_burner.cpp634
-rw-r--r--host/utils/usrp_x3xx_fpga_burner.cpp485
-rwxr-xr-xtools/dissectors/make-dissector-reg.py18
-rw-r--r--tools/gr-usrptest/CMakeLists.txt198
-rw-r--r--tools/gr-usrptest/MANIFEST.md16
-rw-r--r--tools/gr-usrptest/README.md18
-rw-r--r--tools/gr-usrptest/apps/CMakeLists.txt26
-rwxr-xr-xtools/gr-usrptest/apps/usrp_phasealignment.py72
-rwxr-xr-xtools/gr-usrptest/apps/usrp_selftest.py17
-rw-r--r--tools/gr-usrptest/cmake/Modules/CMakeParseArgumentsCopy.cmake138
-rw-r--r--tools/gr-usrptest/cmake/Modules/FindCppUnit.cmake39
-rw-r--r--tools/gr-usrptest/cmake/Modules/FindGnuradioRuntime.cmake36
-rw-r--r--tools/gr-usrptest/cmake/Modules/GrMiscUtils.cmake528
-rw-r--r--tools/gr-usrptest/cmake/Modules/GrPlatform.cmake54
-rw-r--r--tools/gr-usrptest/cmake/Modules/GrPython.cmake241
-rw-r--r--tools/gr-usrptest/cmake/Modules/GrSwig.cmake251
-rw-r--r--tools/gr-usrptest/cmake/Modules/GrTest.cmake143
-rw-r--r--tools/gr-usrptest/cmake/Modules/UseSWIG.cmake304
-rw-r--r--tools/gr-usrptest/cmake/Modules/usrptestConfig.cmake30
-rw-r--r--tools/gr-usrptest/cmake/cmake_uninstall.cmake.in32
-rw-r--r--tools/gr-usrptest/docs/CMakeLists.txt35
-rw-r--r--tools/gr-usrptest/docs/README.usrptest11
-rw-r--r--tools/gr-usrptest/docs/doxygen/CMakeLists.txt52
-rw-r--r--tools/gr-usrptest/docs/doxygen/Doxyfile.in1922
-rw-r--r--tools/gr-usrptest/docs/doxygen/Doxyfile.swig_doc.in1890
-rw-r--r--tools/gr-usrptest/docs/doxygen/doxyxml/__init__.py82
-rw-r--r--tools/gr-usrptest/docs/doxygen/doxyxml/base.py219
-rw-r--r--tools/gr-usrptest/docs/doxygen/doxyxml/doxyindex.py301
-rw-r--r--tools/gr-usrptest/docs/doxygen/doxyxml/generated/__init__.py7
-rw-r--r--tools/gr-usrptest/docs/doxygen/doxyxml/generated/compound.py503
-rw-r--r--tools/gr-usrptest/docs/doxygen/doxyxml/generated/compoundsuper.py8342
-rw-r--r--tools/gr-usrptest/docs/doxygen/doxyxml/generated/index.py77
-rw-r--r--tools/gr-usrptest/docs/doxygen/doxyxml/generated/indexsuper.py523
-rw-r--r--tools/gr-usrptest/docs/doxygen/doxyxml/text.py56
-rw-r--r--tools/gr-usrptest/docs/doxygen/other/group_defs.dox7
-rw-r--r--tools/gr-usrptest/docs/doxygen/other/main_page.dox10
-rw-r--r--tools/gr-usrptest/docs/doxygen/swig_doc.py328
-rwxr-xr-xtools/gr-usrptest/examples/lv_control_example.py81
-rw-r--r--tools/gr-usrptest/examples/phase_diff_x310_ubx_example.grc3296
-rw-r--r--tools/gr-usrptest/grc/CMakeLists.txt23
-rw-r--r--tools/gr-usrptest/grc/usrptest_measurement_sink_f.xml38
-rw-r--r--tools/gr-usrptest/grc/usrptest_phase_calc_ccf.xml20
-rw-r--r--tools/gr-usrptest/include/usrptest/CMakeLists.txt26
-rw-r--r--tools/gr-usrptest/include/usrptest/api.h33
-rw-r--r--tools/gr-usrptest/include/usrptest/measurement_sink_f.h61
-rw-r--r--tools/gr-usrptest/lib/CMakeLists.txt59
-rw-r--r--tools/gr-usrptest/lib/measurement_sink_f_impl.cc124
-rw-r--r--tools/gr-usrptest/lib/measurement_sink_f_impl.h62
-rw-r--r--tools/gr-usrptest/lib/qa_usrptest.cc36
-rw-r--r--tools/gr-usrptest/lib/qa_usrptest.h38
-rw-r--r--tools/gr-usrptest/lib/test_usrptest.cc48
-rw-r--r--tools/gr-usrptest/python/CMakeLists.txt49
-rw-r--r--tools/gr-usrptest/python/__init__.py35
-rw-r--r--tools/gr-usrptest/python/build_utils.py226
-rw-r--r--tools/gr-usrptest/python/build_utils_codes.py52
-rw-r--r--tools/gr-usrptest/python/flowgraphs/CMakeLists.txt28
-rw-r--r--tools/gr-usrptest/python/flowgraphs/__init__.py14
-rw-r--r--tools/gr-usrptest/python/flowgraphs/phasealignment_fg.py119
-rw-r--r--tools/gr-usrptest/python/flowgraphs/selftest_fg.py122
-rw-r--r--tools/gr-usrptest/python/functions.py157
-rw-r--r--tools/gr-usrptest/python/labview_control/CMakeLists.txt28
-rw-r--r--tools/gr-usrptest/python/labview_control/__init__.py14
-rw-r--r--tools/gr-usrptest/python/labview_control/lv_control.py87
-rw-r--r--tools/gr-usrptest/python/phase_calc_ccf.py47
-rwxr-xr-xtools/gr-usrptest/python/qa_measurement_sink_f.py41
-rw-r--r--tools/gr-usrptest/python/rts_tests/CMakeLists.txt33
-rw-r--r--tools/gr-usrptest/python/rts_tests/__init__.py14
-rwxr-xr-xtools/gr-usrptest/python/rts_tests/test_phasealignment.py174
-rw-r--r--tools/gr-usrptest/python/setup.py13
-rw-r--r--tools/gr-usrptest/swig/CMakeLists.txt65
-rw-r--r--tools/gr-usrptest/swig/usrptest_swig.i16
-rw-r--r--tools/kitchen_sink/kitchen_sink.cpp1
373 files changed, 26482 insertions, 4902 deletions
diff --git a/.gitmodules b/.gitmodules
index 473a21289..c85c089b3 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -1,4 +1,4 @@
[submodule "fpga-src"]
path = fpga-src
url = https://github.com/EttusResearch/fpga.git
- branch = maint
+ branch = master
diff --git a/README.md b/README.md
index 1aa57bc64..d9e99970e 100644
--- a/README.md
+++ b/README.md
@@ -74,7 +74,7 @@ building applications that link against UHD.
__images/__
This contains the package builder for FPGA and firmware images.
-We provide other tools to downloade image packages, the scripts in here
+We provide other tools to download image packages, the scripts in here
are mainly relevant for UHD maintainers and -developers.
__tools/__
diff --git a/firmware/fx2/b100/CMakeLists.txt b/firmware/fx2/b100/CMakeLists.txt
index 438aa9207..18bc2080a 100644
--- a/firmware/fx2/b100/CMakeLists.txt
+++ b/firmware/fx2/b100/CMakeLists.txt
@@ -84,5 +84,11 @@ add_custom_target(b100_eeprom ALL
COMMAND ${PYTHON_EXECUTABLE} ${BUILD_EEPROM} -r2 b100_boot.bin b100_eeprom.bin
)
+add_custom_target(b100_eeprom_header ALL
+ DEPENDS b100_eeprom
+ COMMAND xxd -i b100_eeprom.bin ${CMAKE_SOURCE_DIR}/../../host/utils/b100_eeprom.h
+ COMMAND sed -i 's/char/const char/' ${CMAKE_SOURCE_DIR}/../../host/utils/b100_eeprom.h
+)
+
add_executable(b100_boot ${eeprom1p_sources})
target_link_libraries(b100_boot libb100)
diff --git a/firmware/fx2/usrp1/CMakeLists.txt b/firmware/fx2/usrp1/CMakeLists.txt
index 6607bc7f2..8b5389535 100644
--- a/firmware/fx2/usrp1/CMakeLists.txt
+++ b/firmware/fx2/usrp1/CMakeLists.txt
@@ -80,5 +80,11 @@ add_custom_target(usrp1_eeprom ALL
COMMAND ${PYTHON_EXECUTABLE} ${BUILD_EEPROM} -r1 usrp1_boot.bin usrp1_eeprom.bin
)
+add_custom_target(usrp1_eeprom_header ALL
+ DEPENDS usrp1_eeprom
+ COMMAND xxd -i usrp1_eeprom.bin ${CMAKE_SOURCE_DIR}/../../host/utils/usrp1_eeprom.h
+ COMMAND sed -i 's/char/const char/' ${CMAKE_SOURCE_DIR}/../../host/utils/usrp1_eeprom.h
+)
+
add_executable(usrp1_boot ${eeprom1_sources})
target_link_libraries(usrp1_boot libusrp1)
diff --git a/firmware/fx3/b200/b200_main.c b/firmware/fx3/b200/b200_main.c
index e552d3177..e5c55d6a7 100644
--- a/firmware/fx3/b200/b200_main.c
+++ b/firmware/fx3/b200/b200_main.c
@@ -228,7 +228,7 @@ typedef struct Config {
int enable_as_superspeed; // 1
int pport_drive_strength; // CY_U3P_DS_THREE_QUARTER_STRENGTH
int dma_buffer_size; // [USB3] (max)
- int dma_buffer_count; // [USB3] 1
+ int dma_buffer_count; // [USB3] 2
int manual_dma; // 0
int sb_baud_div; // 434*2
} CONFIG, *PCONFIG;
@@ -244,8 +244,8 @@ static CONFIG g_config = {
0, // disable_usb2
1, // enable_as_superspeed
CY_U3P_DS_THREE_QUARTER_STRENGTH, // pport_drive_strength
- 64512, // dma_buffer_size 2**16-1, then aligned to next page boundary
- 1, // dma_buffer_count
+ 16*1024, // dma_buffer_size - optimized value from Cypress AN86947
+ 2, // dma_buffer_count - optimized value from Cypress AN86947
0, // manual_dma
434*2 // sb_baud_div
};
@@ -1175,7 +1175,7 @@ void b200_fw_start(void) {
data_buffer_size_from_host = data_buffer_size;
g_vendor_req_buff_size = USB3_VREQ_BUF_SIZE; // Max 512
- num_packets_per_burst = USB3_PACKETS_PER_BURST*1+4*0; // 16
+ num_packets_per_burst = USB3_PACKETS_PER_BURST; // 8
break;
case CY_U3P_NOT_CONNECTED:
@@ -1529,7 +1529,7 @@ void event_usb_callback (CyU3PUsbEventType_t event_type, uint16_t event_data) {
* of this function is to register that the event happened at all, so that the
* application thread knows it can proceed.
*
- * This function is also responsible for receiving vendor requests, and trigging
+ * This function is also responsible for receiving vendor requests, and triggering
* the appropriate RTOS event to wake up the vendor request handler thread.
*/
CyBool_t usb_setup_callback(uint32_t data0, uint32_t data1) {
diff --git a/firmware/fx3/b200/b200_main.h b/firmware/fx3/b200/b200_main.h
index 9fe8b9511..453e7034a 100644
--- a/firmware/fx3/b200/b200_main.h
+++ b/firmware/fx3/b200/b200_main.h
@@ -41,7 +41,7 @@
#define MASK_GPIO_FPGA_SB_SCL (uint32_t)(1 << (GPIO_FPGA_SB_SCL - 0))
#define MASK_GPIO_FPGA_SB_SDA (uint32_t)(1 << (GPIO_FPGA_SB_SDA - 0))
-#define USB3_PACKETS_PER_BURST (16)
+#define USB3_PACKETS_PER_BURST (8) // Optimized value from Cypress AN86947
#define USB2_PACKETS_PER_BURST (1)
#define DMA_SIZE_INFINITE (0)
diff --git a/firmware/usrp3/lib/CMakeLists.txt b/firmware/usrp3/lib/CMakeLists.txt
index 9d9ee3c6c..cd1a82c3f 100644
--- a/firmware/usrp3/lib/CMakeLists.txt
+++ b/firmware/usrp3/lib/CMakeLists.txt
@@ -30,7 +30,6 @@ add_library(usrp3fw STATIC
print_addrs.c
link_state_route_proto.c
cron.c
- fw_comm_protocol.c
flash/spi_flash.c
flash/spif_spsn_s25flxx.c
)
diff --git a/firmware/usrp3/n230/CMakeLists.txt b/firmware/usrp3/n230/CMakeLists.txt
index 6247477f0..5787fbb7d 100644
--- a/firmware/usrp3/n230/CMakeLists.txt
+++ b/firmware/usrp3/n230/CMakeLists.txt
@@ -19,7 +19,12 @@
include_directories(${CMAKE_CURRENT_SOURCE_DIR})
include_directories(${CMAKE_SOURCE_DIR}/../../host/lib/usrp/n230)
-list(APPEND n230_sources n230_eeprom.c n230_eth_handlers.c n230_init.c n230_main.c)
+list(APPEND n230_sources
+ n230_eeprom.c
+ n230_fw_comm_protocol.c
+ n230_eth_handlers.c
+ n230_init.c
+ n230_main.c)
########################################################################
set(GEN_OUTPUTS_BIN_SIZE 0x7fff)
diff --git a/firmware/usrp3/n230/n230_eth_handlers.c b/firmware/usrp3/n230/n230_eth_handlers.c
index b291bb39f..f5c319681 100644
--- a/firmware/usrp3/n230/n230_eth_handlers.c
+++ b/firmware/usrp3/n230/n230_eth_handlers.c
@@ -22,7 +22,7 @@
#include <u3_net_stack.h>
#include <print_addrs.h>
#include <trace.h>
-#include "../../../host/lib/usrp/common/fw_comm_protocol.h"
+#include "../../../host/lib/usrp/n230/n230_fw_comm_protocol.h"
#include "../../../host/lib/usrp/n230/n230_fw_defs.h"
#include "../n230/n230_fw_host_iface.h"
#include "../../../host/lib/usrp/n230/n230_eeprom.h"
diff --git a/firmware/usrp3/lib/fw_comm_protocol.c b/firmware/usrp3/n230/n230_fw_comm_protocol.c
index 0cc931a76..d6f6dff5a 100644
--- a/firmware/usrp3/lib/fw_comm_protocol.c
+++ b/firmware/usrp3/n230/n230_fw_comm_protocol.c
@@ -15,7 +15,7 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//
-#include "../../../host/lib/usrp/common/fw_comm_protocol.h"
+#include "../../../host/lib/usrp/n230/n230_fw_comm_protocol.h"
#include <trace.h>
#include <string.h> //memcmp
diff --git a/firmware/usrp3/x300/x300_aurora_bist.py b/firmware/usrp3/x300/x300_aurora_bist.py
index f5e119b66..5f7a22a3b 100755
--- a/firmware/usrp3/x300/x300_aurora_bist.py
+++ b/firmware/usrp3/x300/x300_aurora_bist.py
@@ -43,8 +43,9 @@ SFP_TYPE_AURORA = 2
MAC_REG_CTRL = 0
MAC_REG_STATUS = 0
MAC_REG_OVERRUNS = 4
-MAC_REG_BIST_SAMPS = 8
-MAC_REG_BIST_ERRORS = 12
+MAC_REG_CHECKSUM_ERRS = 8
+MAC_REG_BIST_SAMPS = 12
+MAC_REG_BIST_ERRORS = 16
MAC_STATUS_LINK_UP_MSK = 0x00000001
MAC_STATUS_HARD_ERR_MSK = 0x00000002
@@ -52,19 +53,16 @@ MAC_STATUS_SOFT_ERR_MSK = 0x00000004
MAC_STATUS_BIST_LOCKED_MSK = 0x00000008
MAC_STATUS_BIST_LATENCY_MSK = 0x000FFFF0
MAC_STATUS_BIST_LATENCY_OFFSET = 4
-MAC_STATUS_CHECKSUM_ERRS_MSK = 0xFFF00000
-MAC_STATUS_CHECKSUM_ERRS_OFFSET = 20
MAC_CTRL_BIST_CHECKER_EN = 0x00000001
MAC_CTRL_BIST_GEN_EN = 0x00000002
MAC_CTRL_BIST_LOOPBACK_EN = 0x00000004
-MAC_CTRL_PHY_RESET = 0x00000100
-MAC_CTRL_BIST_RATE_MSK = 0x000000F8
+MAC_CTRL_PHY_RESET = 0x00000200
+MAC_CTRL_BIST_RATE_MSK = 0x000001F8
MAC_CTRL_BIST_RATE_OFFSET = 3
-AURORA_CLK_RATE = 156.25e6
BUS_CLK_RATE = 166.66e6
-BIST_MAX_TIME_LIMIT = math.floor(pow(2,48)/AURORA_CLK_RATE)-1
+BIST_MAX_TIME_LIMIT = math.floor(pow(2,48)/BUS_CLK_RATE)-1
########################################################################
# utils
@@ -83,10 +81,11 @@ def get_aurora_info(ctrl):
return (aur_port, link_up)
def get_rate_setting(rate):
- for div in range(2,32):
- if (rate < 8e-6 * BUS_CLK_RATE * (1.0 - 1.0/div)):
- return (div-1, 8e-6 * BUS_CLK_RATE * (1.0 - 1.0/(div-1)))
- return (0, 8e-6 * BUS_CLK_RATE)
+ RATE_RES_BITS = 6
+ rate_sett = int((1.0*rate/(8e-6*BUS_CLK_RATE))*pow(2, RATE_RES_BITS))-1 # Round rate down
+ rate_sett = numpy.clip(rate_sett, 0, pow(2, RATE_RES_BITS)-1)
+ coerced_rate = ((1.0+rate_sett)/pow(2, RATE_RES_BITS))*(8e-6*BUS_CLK_RATE)
+ return (rate_sett, coerced_rate)
def run_ber_loopback_bist(ctrls, duration, rate_tuple):
(rate_sett, rate) = rate_tuple
@@ -107,10 +106,7 @@ def run_ber_loopback_bist(ctrls, duration, rate_tuple):
# Put the slave in loopback mode and the master in BIST mode
if 'slave' in ctrls:
ctrls['slave'].poke(SLA_MAC_REG_BASE + MAC_REG_CTRL, MAC_CTRL_BIST_LOOPBACK_EN)
- if rate_sett == 0:
- master_ctrl = MAC_CTRL_BIST_GEN_EN|MAC_CTRL_BIST_CHECKER_EN
- else:
- master_ctrl = ((rate_sett - 1) << MAC_CTRL_BIST_RATE_OFFSET)|MAC_CTRL_BIST_GEN_EN|MAC_CTRL_BIST_CHECKER_EN
+ master_ctrl = (rate_sett<<MAC_CTRL_BIST_RATE_OFFSET)|MAC_CTRL_BIST_GEN_EN|MAC_CTRL_BIST_CHECKER_EN
ctrls['master'].poke(MST_MAC_REG_BASE + MAC_REG_CTRL, master_ctrl)
start_time = datetime.datetime.now()
# Wait and check if BIST locked
@@ -153,7 +149,7 @@ def run_ber_loopback_bist(ctrls, duration, rate_tuple):
print('[INFO] BIST Complete!')
print('- Elapsed Time = ' + str(time_diff))
print('- Max BER (Bit Error Ratio) = %.4g (%d errors out of %d)'%((mst_errors+1)/mst_samps,mst_errors,mst_samps))
- print('- Max Roundtrip Latency = %.1fus'%(1e6*mst_latency_cyc/AURORA_CLK_RATE))
+ print('- Max Roundtrip Latency = %.1fus'%(1e6*mst_latency_cyc/BUS_CLK_RATE))
print('- Approx Throughput = %.0fMB/s'%((8e-6*mst_samps)/time_diff.total_seconds()))
else:
print('[ERROR] BIST Failed!')
@@ -187,10 +183,7 @@ def run_latency_loopback_bist(ctrls, duration, rate_tuple):
# Put the slave in loopback mode and the master in BIST mode
if 'slave' in ctrls:
ctrls['slave'].poke(SLA_MAC_REG_BASE + MAC_REG_CTRL, MAC_CTRL_BIST_LOOPBACK_EN)
- if rate_sett == 0:
- master_ctrl = MAC_CTRL_BIST_GEN_EN|MAC_CTRL_BIST_CHECKER_EN
- else:
- master_ctrl = ((rate_sett - 1) << MAC_CTRL_BIST_RATE_OFFSET)|MAC_CTRL_BIST_GEN_EN|MAC_CTRL_BIST_CHECKER_EN
+ master_ctrl = (rate_sett<<MAC_CTRL_BIST_RATE_OFFSET)|MAC_CTRL_BIST_GEN_EN|MAC_CTRL_BIST_CHECKER_EN
start_time = datetime.datetime.now()
latencies = []
@@ -216,7 +209,7 @@ def run_latency_loopback_bist(ctrls, duration, rate_tuple):
ctrls['master'].poke(MST_MAC_REG_BASE + MAC_REG_CTRL, 0)
# Compure latency
mst_latency_cyc = 16.0*((mst_status & MAC_STATUS_BIST_LATENCY_MSK) >> MAC_STATUS_BIST_LATENCY_OFFSET)
- mst_latency_us = 1e6*mst_latency_cyc/AURORA_CLK_RATE
+ mst_latency_us = 1e6*mst_latency_cyc/BUS_CLK_RATE
latencies.append(mst_latency_us)
except KeyboardInterrupt:
print('[WARNING] Operation cancelled by user.')
@@ -233,7 +226,7 @@ def run_latency_loopback_bist(ctrls, duration, rate_tuple):
print('- Elapsed Time = ' + str(stop_time - start_time))
print('- Roundtrip Latency Mean = %.2fus'%(numpy.mean(latencies)))
print('- Roundtrip Latency Stdev = %.2fus'%(numpy.std(latencies)))
- # Turn off BIST loopback
+ # Turn off BIST loopback
time.sleep(0.5)
if 'slave' in ctrls:
ctrls['slave'].poke(SLA_MAC_REG_BASE + MAC_REG_CTRL, 0)
@@ -304,4 +297,3 @@ if __name__=='__main__':
run_ber_loopback_bist(ctrls, options.duration, get_rate_setting(options.rate))
else:
run_latency_loopback_bist(ctrls, options.duration, get_rate_setting(options.rate))
-
diff --git a/fpga-src b/fpga-src
-Subproject e69dc9e014773aa5ed70d0757908d5de784a2de
+Subproject 0821320bdf59756dc8f29243db18f0e8111aa70
diff --git a/host/CMakeLists.txt b/host/CMakeLists.txt
index 069a0c973..6e4e7d6cc 100644
--- a/host/CMakeLists.txt
+++ b/host/CMakeLists.txt
@@ -80,9 +80,25 @@ ELSEIF(${CMAKE_CXX_COMPILER_ID} STREQUAL "Clang")
ENDIF()
ENDIF()
+IF(CMAKE_VERSION VERSION_LESS "3.1")
+ IF(${CMAKE_CXX_COMPILER_ID} STREQUAL "GNU")
+ SET(CMAKE_CXX_FLAGS "--std=gnu++11 ${CMAKE_CXX_FLAGS}")
+ ELSEIF(${CMAKE_CXX_COMPILER_ID} STREQUAL "Clang")
+ IF("${IS_APPLE}" STREQUAL "")
+ SET(CMAKE_CXX_FLAGS "-std=c++11 ${CMAKE_CXX_FLAGS}")
+ ELSE()
+ SET(CMAKE_CXX_FLAGS "-std=c++11 -stdlib=libc++ ${CMAKE_CXX_FLAGS}")
+ ENDIF()
+ ENDIF()
+ELSE()
+ SET(CMAKE_CXX_STANDARD 11)
+ENDIF()
+
IF(CMAKE_SYSTEM_NAME STREQUAL "FreeBSD" AND ${CMAKE_CXX_COMPILER_ID} STREQUAL "Clang")
SET(CMAKE_EXE_LINKER_FLAGS "-lthr ${CMAKE_EXE_LINKER_FLAGS}")
+ SET(CMAKE_CXX_FLAGS "-stdlib=libc++ ${CMAKE_CXX_FLAGS}")
ENDIF()
+
########################################################################
# Packaging Variables
########################################################################
@@ -177,7 +193,7 @@ INCLUDE(CheckCXXCompilerFlag)
MACRO(UHD_ADD_OPTIONAL_CXX_COMPILER_FLAG flag have)
CHECK_CXX_COMPILER_FLAG(${flag} ${have})
IF(${have})
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${flag}")
+ SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${flag}")
ENDIF(${have})
ENDMACRO(UHD_ADD_OPTIONAL_CXX_COMPILER_FLAG)
@@ -195,7 +211,7 @@ IF(${CMAKE_CXX_COMPILER_ID} STREQUAL "GNU" OR
${CMAKE_CXX_COMPILER_ID} STREQUAL "Clang")
IF(STRIP_BINARIES)
IF(NOT CMAKE_BUILD_TYPE STREQUAL "Debug")
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -s")
+ SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -s")
ENDIF(NOT CMAKE_BUILD_TYPE STREQUAL "Debug")
ENDIF(STRIP_BINARIES)
ADD_DEFINITIONS(-Wall)
@@ -209,6 +225,12 @@ IF(${CMAKE_CXX_COMPILER_ID} STREQUAL "GNU" OR
UHD_ADD_OPTIONAL_CXX_COMPILER_FLAG(-fvisibility=hidden HAVE_VISIBILITY_HIDDEN)
UHD_ADD_OPTIONAL_CXX_COMPILER_FLAG(-fvisibility-inlines-hidden HAVE_VISIBILITY_INLINES_HIDDEN)
ENDIF(NOT WIN32)
+ IF(${CMAKE_BUILD_TYPE} STREQUAL "Coverage")
+ include(CodeCoverage)
+ setup_target_for_coverage(coverage "ctest || return 0" coverage) # never fail ctest, always generate coverage report
+ SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -pedantic -pthread -g -O0 -fprofile-arcs -ftest-coverage" CACHE STRING "Flags used by the C++ compiler during Coverage builds." FORCE)
+ SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -pedantic -pthread -g -O0 -fprofile-arcs -ftest-coverage" CACHE STRING "Flags used by the C compiler during Coverage builds." FORCE)
+ ENDIF()
ENDIF()
IF(MSVC)
@@ -289,6 +311,7 @@ MESSAGE(STATUS "Boost libraries: ${Boost_LIBRARIES}")
# Note: RFNoC never gets fully disabled, but the public APIs do
SET(ENABLE_RFNOC OFF CACHE BOOL "Export RFNoC includes and symbols")
INCLUDE(UHDGlobalDefs)
+INCLUDE(UHDLog)
########################################################################
# Check Python Modules
diff --git a/host/cmake/Modules/CodeCoverage.cmake b/host/cmake/Modules/CodeCoverage.cmake
new file mode 100644
index 000000000..a0b0ef526
--- /dev/null
+++ b/host/cmake/Modules/CodeCoverage.cmake
@@ -0,0 +1,197 @@
+# Copyright (c) 2012 - 2015, Lars Bilke
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors
+# may be used to endorse or promote products derived from this software without
+# specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
+# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+#
+#
+# 2012-01-31, Lars Bilke
+# - Enable Code Coverage
+#
+# 2013-09-17, Joakim Söderberg
+# - Added support for Clang.
+# - Some additional usage instructions.
+#
+# USAGE:
+
+# 0. (Mac only) If you use Xcode 5.1 make sure to patch geninfo as described here:
+# http://stackoverflow.com/a/22404544/80480
+#
+# 1. Copy this file into your cmake modules path.
+#
+# 2. Add the following line to your CMakeLists.txt:
+# INCLUDE(CodeCoverage)
+#
+# 3. Set compiler flags to turn off optimization and enable coverage:
+# SET(CMAKE_CXX_FLAGS "-g -O0 -fprofile-arcs -ftest-coverage")
+# SET(CMAKE_C_FLAGS "-g -O0 -fprofile-arcs -ftest-coverage")
+#
+# 3. Use the function SETUP_TARGET_FOR_COVERAGE to create a custom make target
+# which runs your test executable and produces a lcov code coverage report:
+# Example:
+# SETUP_TARGET_FOR_COVERAGE(
+# my_coverage_target # Name for custom target.
+# test_driver # Name of the test driver executable that runs the tests.
+# # NOTE! This should always have a ZERO as exit code
+# # otherwise the coverage generation will not complete.
+# coverage # Name of output directory.
+# )
+#
+# 4. Build a Debug build:
+# cmake -DCMAKE_BUILD_TYPE=Debug ..
+# make
+# make my_coverage_target
+#
+#
+
+# Check prereqs
+FIND_PROGRAM( GCOV_PATH gcov )
+FIND_PROGRAM( LCOV_PATH lcov )
+FIND_PROGRAM( GENHTML_PATH genhtml )
+FIND_PROGRAM( GCOVR_PATH gcovr PATHS ${CMAKE_SOURCE_DIR}/tests)
+
+IF(NOT GCOV_PATH)
+ MESSAGE(FATAL_ERROR "gcov not found! Aborting...")
+ENDIF() # NOT GCOV_PATH
+
+IF("${CMAKE_CXX_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
+ IF("${CMAKE_CXX_COMPILER_VERSION}" VERSION_LESS 3)
+ MESSAGE(FATAL_ERROR "Clang version must be 3.0.0 or greater! Aborting...")
+ ENDIF()
+ELSEIF(NOT CMAKE_COMPILER_IS_GNUCXX)
+ MESSAGE(FATAL_ERROR "Compiler is not GNU gcc! Aborting...")
+ENDIF() # CHECK VALID COMPILER
+
+SET(CMAKE_CXX_FLAGS_COVERAGE
+ "-g -O0 --coverage -fprofile-arcs -ftest-coverage"
+ CACHE STRING "Flags used by the C++ compiler during coverage builds."
+ FORCE )
+SET(CMAKE_C_FLAGS_COVERAGE
+ "-g -O0 --coverage -fprofile-arcs -ftest-coverage"
+ CACHE STRING "Flags used by the C compiler during coverage builds."
+ FORCE )
+SET(CMAKE_EXE_LINKER_FLAGS_COVERAGE
+ ""
+ CACHE STRING "Flags used for linking binaries during coverage builds."
+ FORCE )
+SET(CMAKE_SHARED_LINKER_FLAGS_COVERAGE
+ ""
+ CACHE STRING "Flags used by the shared libraries linker during coverage builds."
+ FORCE )
+MARK_AS_ADVANCED(
+ CMAKE_CXX_FLAGS_COVERAGE
+ CMAKE_C_FLAGS_COVERAGE
+ CMAKE_EXE_LINKER_FLAGS_COVERAGE
+ CMAKE_SHARED_LINKER_FLAGS_COVERAGE )
+
+IF ( NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR CMAKE_BUILD_TYPE STREQUAL "Coverage"))
+ MESSAGE( WARNING "Code coverage results with an optimized (non-Debug) build may be misleading" )
+ENDIF() # NOT CMAKE_BUILD_TYPE STREQUAL "Debug"
+
+
+# Param _targetname The name of new the custom make target
+# Param _testrunner The name of the target which runs the tests.
+# MUST return ZERO always, even on errors.
+# If not, no coverage report will be created!
+# Param _outputname lcov output is generated as _outputname.info
+# HTML report is generated in _outputname/index.html
+# Optional fourth parameter is passed as arguments to _testrunner
+# Pass them in list form, e.g.: "-j;2" for -j 2
+FUNCTION(SETUP_TARGET_FOR_COVERAGE _targetname _testrunner _outputname)
+
+ IF(NOT LCOV_PATH)
+ MESSAGE(FATAL_ERROR "lcov not found! Aborting...")
+ ENDIF() # NOT LCOV_PATH
+
+ IF(NOT GENHTML_PATH)
+ MESSAGE(FATAL_ERROR "genhtml not found! Aborting...")
+ ENDIF() # NOT GENHTML_PATH
+
+ SET(coverage_info "${CMAKE_BINARY_DIR}/${_outputname}.info")
+ SET(coverage_cleaned "${coverage_info}.cleaned")
+
+ SEPARATE_ARGUMENTS(test_command UNIX_COMMAND "${_testrunner}")
+
+ # Setup target
+ ADD_CUSTOM_TARGET(${_targetname}
+
+ # Cleanup lcov
+ ${LCOV_PATH} --directory . --zerocounters
+
+ # Run tests
+ COMMAND ${test_command} ${ARGV3}
+
+ # Capturing lcov counters and generating report
+ COMMAND ${LCOV_PATH} --directory . --capture --output-file ${coverage_info}
+ COMMAND ${LCOV_PATH} --remove ${coverage_info} 'tests/*' '/usr/*' --output-file ${coverage_cleaned}
+ COMMAND ${GENHTML_PATH} -o ${_outputname} ${coverage_cleaned}
+ COMMAND ${CMAKE_COMMAND} -E remove ${coverage_info} ${coverage_cleaned}
+
+ WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
+ COMMENT "Resetting code coverage counters to zero.\nProcessing code coverage counters and generating report."
+ )
+
+ # Show info where to find the report
+ ADD_CUSTOM_COMMAND(TARGET ${_targetname} POST_BUILD
+ COMMAND ;
+ COMMENT "Open ./${_outputname}/index.html in your browser to view the coverage report."
+ )
+
+ENDFUNCTION() # SETUP_TARGET_FOR_COVERAGE
+
+# Param _targetname The name of new the custom make target
+# Param _testrunner The name of the target which runs the tests
+# Param _outputname cobertura output is generated as _outputname.xml
+# Optional fourth parameter is passed as arguments to _testrunner
+# Pass them in list form, e.g.: "-j;2" for -j 2
+FUNCTION(SETUP_TARGET_FOR_COVERAGE_COBERTURA _targetname _testrunner _outputname)
+
+ IF(NOT PYTHON_EXECUTABLE)
+ MESSAGE(FATAL_ERROR "Python not found! Aborting...")
+ ENDIF() # NOT PYTHON_EXECUTABLE
+
+ IF(NOT GCOVR_PATH)
+ MESSAGE(FATAL_ERROR "gcovr not found! Aborting...")
+ ENDIF() # NOT GCOVR_PATH
+
+ ADD_CUSTOM_TARGET(${_targetname}
+
+ # Run tests
+ ${_testrunner} ${ARGV3}
+
+ # Running gcovr
+ COMMAND ${GCOVR_PATH} -x -r ${CMAKE_SOURCE_DIR} -e '${CMAKE_SOURCE_DIR}/tests/' -o ${_outputname}.xml
+ WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
+ COMMENT "Running gcovr to produce Cobertura code coverage report."
+ )
+
+ # Show info where to find the report
+ ADD_CUSTOM_COMMAND(TARGET ${_targetname} POST_BUILD
+ COMMAND ;
+ COMMENT "Cobertura code coverage report saved in ${_outputname}.xml."
+ )
+
+ENDFUNCTION() # SETUP_TARGET_FOR_COVERAGE_COBERTURA
diff --git a/host/cmake/Modules/UHDConfigVersion.cmake.in b/host/cmake/Modules/UHDConfigVersion.cmake.in
index 549798324..c792b5bf8 100644
--- a/host/cmake/Modules/UHDConfigVersion.cmake.in
+++ b/host/cmake/Modules/UHDConfigVersion.cmake.in
@@ -29,13 +29,13 @@ set(ENV{UHD_CONFIG_VERSION_USED} TRUE)
# version values as set in cmake/Modules/UHDVersion.cmake, placed
# statically in here to avoid using Python all over again.
-SET(MAJOR_VERSION @TRIMMED_VERSION_MAJOR@)
-SET(API_VERSION @TRIMMED_VERSION_API@)
-SET(ABI_VERSION @TRIMMED_VERSION_ABI@)
-SET(PATCH_VERSION @TRIMMED_VERSION_PATCH@)
+SET(MAJOR_VERSION @UHD_VERSION_MAJOR@)
+SET(API_VERSION @UHD_VERSION_API@)
+SET(ABI_VERSION @UHD_VERSION_ABI@)
+SET(PATCH_VERSION @UHD_VERSION_PATCH@)
SET(DEVEL_VERSION @UHD_VERSION_DEVEL@)
-SET(PACKAGE_VERSION @TRIMMED_UHD_VERSION@)
+SET(PACKAGE_VERSION @UHD_VERSION@)
SET(ENV{UHD_PACKAGE_VERSION} ${PACKAGE_VERSION})
# There is a bug in CMake whereby calling "find_package(FOO)" within
@@ -175,4 +175,4 @@ IF(${PACKAGE_FIND_VERSION} VERSION_EQUAL ${PACKAGE_VERSION})
ENDIF()
# Undo our patch-version-number hack
-SET(PACKAGE_VERSION @TRIMMED_UHD_VERSION@)
+SET(PACKAGE_VERSION @UHD_VERSION@)
diff --git a/host/cmake/Modules/UHDGlobalDefs.cmake b/host/cmake/Modules/UHDGlobalDefs.cmake
index 70d1a654b..ada6371fb 100644
--- a/host/cmake/Modules/UHDGlobalDefs.cmake
+++ b/host/cmake/Modules/UHDGlobalDefs.cmake
@@ -24,9 +24,9 @@ CHECK_CXX_SYMBOL_EXISTS(log2 cmath HAVE_LOG2)
## Macros for the version number
IF(UHD_VERSION_DEVEL)
- MATH(EXPR UHD_VERSION_ADDED "1000000 * ${TRIMMED_VERSION_MAJOR} + 10000 * ${TRIMMED_VERSION_API} + 100 * ${TRIMMED_VERSION_ABI} + 99")
+ MATH(EXPR UHD_VERSION_ADDED "1000000 * ${UHD_VERSION_MAJOR} + 10000 * ${UHD_VERSION_API} + 100 * ${UHD_VERSION_ABI} + 99")
ELSE()
- MATH(EXPR UHD_VERSION_ADDED "1000000 * ${TRIMMED_VERSION_MAJOR} + 10000 * ${TRIMMED_VERSION_API} + 100 * ${TRIMMED_VERSION_ABI} + ${TRIMMED_VERSION_PATCH}")
+ MATH(EXPR UHD_VERSION_ADDED "1000000 * ${UHD_VERSION_MAJOR} + 10000 * ${UHD_VERSION_API} + 100 * ${UHD_VERSION_ABI} + ${UHD_VERSION_PATCH}")
ENDIF(UHD_VERSION_DEVEL)
## RFNoC
diff --git a/host/cmake/Modules/UHDLog.cmake b/host/cmake/Modules/UHDLog.cmake
new file mode 100644
index 000000000..4bc1daf13
--- /dev/null
+++ b/host/cmake/Modules/UHDLog.cmake
@@ -0,0 +1,72 @@
+########################################################################
+# Logging Variables
+########################################################################
+IF(CMAKE_BUILD_TYPE STREQUAL "Debug")
+ SET(UHD_LOG_MIN_LEVEL "debug" CACHE STRING "Set UHD log level to {trace, debug, info, warning, error, fatal}")
+ SET(UHD_LOG_CONSOLE_DISABLE "OFF" CACHE BOOL "Disable UHD logging to stderr")
+ SET(UHD_LOG_FILE_LEVEL "trace" CACHE STRING "SET UHD file logging level to {trace, debug, info, warning, error, fatal}")
+ SET(UHD_LOG_CONSOLE_LEVEL "debug" CACHE STRING "SET UHD file logging level to {trace, debug, info, warning, error, fatal}")
+ELSE()
+ SET(UHD_LOG_MIN_LEVEL "debug" CACHE STRING "Set UHD log level to {trace, debug, info, warning, error, fatal}")
+ SET(UHD_LOG_CONSOLE_DISABLE "OFF" CACHE BOOL "Disable UHD logging to stderr")
+ SET(UHD_LOG_FILE_LEVEL "info" CACHE STRING "SET UHD file logging level to {trace, debug, info, warning, error, fatal}")
+ SET(UHD_LOG_CONSOLE_LEVEL "info" CACHE STRING "SET UHD file logging level to {trace, debug, info, warning, error, fatal}")
+ENDIF()
+
+FUNCTION(UHD_LOG_LEVEL_CONVERT ARG1 ARG2)
+ string(TOLOWER "${ARG1}" LOG_LEVEL_LOWER)
+ IF(LOG_LEVEL_LOWER STREQUAL "trace")
+ ADD_DEFINITIONS(-D${ARG2}=0)
+ ELSEIF(LOG_LEVEL_LOWER STREQUAL "debug")
+ ADD_DEFINITIONS(-D${ARG2}=1)
+ ELSEIF(LOG_LEVEL_LOWER STREQUAL "info")
+ ADD_DEFINITIONS(-D${ARG2}=2)
+ ELSEIF(LOG_LEVEL_LOWER STREQUAL "warning")
+ ADD_DEFINITIONS(-D${ARG2}=3)
+ ELSEIF(LOG_LEVEL_LOWER STREQUAL "error")
+ ADD_DEFINITIONS(-D${ARG2}=4)
+ ELSEIF(LOG_LEVEL_LOWER STREQUAL "fatal")
+ ADD_DEFINITIONS(-D${ARG2}=5)
+ ELSE()
+ ADD_DEFINITIONS(-D${ARG2}=${ARG1})
+ ENDIF()
+ENDFUNCTION()
+
+UHD_LOG_LEVEL_CONVERT(${UHD_LOG_MIN_LEVEL} "UHD_LOG_MIN_LEVEL")
+UHD_LOG_LEVEL_CONVERT(${UHD_LOG_CONSOLE_LEVEL} "UHD_LOG_CONSOLE_LEVEL")
+UHD_LOG_LEVEL_CONVERT(${UHD_LOG_FILE_LEVEL} "UHD_LOG_FILE_LEVEL")
+
+IF(UHD_LOG_CONSOLE_DISABLE)
+ ADD_DEFINITIONS(-DUHD_LOG_CONSOLE_DISABLE)
+ELSE()
+ IF(UHD_LOG_CONSOLE_TIME)
+ ADD_DEFINITIONS(-DUHD_LOG_CONSOLE_TIME)
+ ENDIF()
+ IF(UHD_LOG_CONSOLE_THREAD)
+ ADD_DEFINITIONS(-DUHD_LOG_CONSOLE_THREAD)
+ ENDIF()
+ IF(UHD_LOG_CONSOLE_SRC)
+ ADD_DEFINITIONS(-DUHD_LOG_CONSOLE_SRC)
+ ENDIF()
+ENDIF()
+
+SET(UHD_LOG_FASTPATH_DISABLE "OFF" CACHE BOOL "Disable printing of fastpath logging symbols to stderr (DOSU)")
+IF(UHD_LOG_FASTPATH_DISABLE)
+ ADD_DEFINITIONS(-DUHD_LOG_FASTPATH_DISABLE)
+ENDIF()
+
+IF(MSVC OR CYGWIN)
+ SET(UHD_LOG_CONSOLE_COLOR "OFF" CACHE BOOL "Enable color output on the terminal")
+ELSE()
+ SET(UHD_LOG_CONSOLE_COLOR "ON" CACHE BOOL "Enable color output on the terminal")
+ENDIF()
+
+IF(UHD_LOG_CONSOLE_COLOR)
+ ADD_DEFINITIONS(-DUHD_LOG_CONSOLE_COLOR)
+ENDIF()
+
+SET(UHD_LOG_FILE "" CACHE FILE "Set UHD log file to a file in a existing directory")
+IF(NOT UHD_LOG_FILE STREQUAL "")
+ ADD_DEFINITIONS(-DUHD_LOG_FILE=${UHD_LOG_FILE})
+ENDIF()
+
diff --git a/host/cmake/Modules/UHDPackage.cmake b/host/cmake/Modules/UHDPackage.cmake
index 3b8b69ab5..138abacb9 100644
--- a/host/cmake/Modules/UHDPackage.cmake
+++ b/host/cmake/Modules/UHDPackage.cmake
@@ -55,13 +55,12 @@ ENDIF()
########################################################################
# Setup package file name
########################################################################
-
IF(DEBIAN AND LIBUHD_PKG)
- SET(CPACK_PACKAGE_FILE_NAME "libuhd${UHD_VERSION_MAJOR}_${TRIMMED_UHD_VERSION}_${CMAKE_SYSTEM_PROCESSOR}" CACHE INTERNAL "")
+ SET(CPACK_PACKAGE_FILE_NAME "libuhd${UHD_VERSION_MAJOR}_${UHD_VERSION}_${CMAKE_SYSTEM_PROCESSOR}" CACHE INTERNAL "")
ELSEIF(DEBIAN AND LIBUHDDEV_PKG)
- SET(CPACK_PACKAGE_FILE_NAME "libuhd-dev_${TRIMMED_UHD_VERSION}_${CMAKE_SYSTEM_PROCESSOR}" CACHE INTERNAL "")
+ SET(CPACK_PACKAGE_FILE_NAME "libuhd-dev_${UHD_VERSION}_${CMAKE_SYSTEM_PROCESSOR}" CACHE INTERNAL "")
ELSEIF(DEBIAN AND UHDHOST_PKG)
- SET(CPACK_PACKAGE_FILE_NAME "uhd-host_${TRIMMED_UHD_VERSION}_${CMAKE_SYSTEM_PROCESSOR}" CACHE INTERNAL "")
+ SET(CPACK_PACKAGE_FILE_NAME "uhd-host_${UHD_VERSION}_${CMAKE_SYSTEM_PROCESSOR}" CACHE INTERNAL "")
ELSE()
IF(DEBIAN OR REDHAT)
FIND_PROGRAM(LSB_RELEASE_EXECUTABLE lsb_release)
@@ -124,7 +123,7 @@ SET(CPACK_RESOURCE_FILE_LICENSE ${CMAKE_SOURCE_DIR}/LICENSE)
# Setup CPack Source
########################################################################
-SET(CPACK_SOURCE_PACKAGE_FILE_NAME "uhd-${TRIMMED_UHD_VERSION}" CACHE INTERNAL "")
+SET(CPACK_SOURCE_PACKAGE_FILE_NAME "uhd-${UHD_VERSION}" CACHE INTERNAL "")
SET(CPACK_SOURCE_IGNORE_FILES "\\\\.git*;\\\\.swp$")
########################################################################
diff --git a/host/cmake/Modules/UHDVersion.cmake b/host/cmake/Modules/UHDVersion.cmake
index 217769beb..d557fdd29 100644
--- a/host/cmake/Modules/UHDVersion.cmake
+++ b/host/cmake/Modules/UHDVersion.cmake
@@ -27,11 +27,11 @@ FIND_PACKAGE(Git QUIET)
# - Increment patch for bugfixes and docs
# - set UHD_VERSION_DEVEL to true for master and development branches
########################################################################
-SET(UHD_VERSION_MAJOR 003)
-SET(UHD_VERSION_API 010)
-SET(UHD_VERSION_ABI 001)
-SET(UHD_VERSION_PATCH 001)
-SET(UHD_VERSION_DEVEL FALSE)
+SET(UHD_VERSION_MAJOR 3)
+SET(UHD_VERSION_API 11)
+SET(UHD_VERSION_ABI 0)
+SET(UHD_VERSION_PATCH git)
+SET(UHD_VERSION_DEVEL TRUE)
########################################################################
# If we're on a development branch, we skip the patch version
@@ -74,28 +74,6 @@ IF(GIT_FOUND)
ENDIF(GIT_FOUND)
########################################################################
-# Set up trimmed version numbers for DLL resource files and packages
-########################################################################
-FUNCTION(DEPAD_NUM input_num output_num)
- EXECUTE_PROCESS(
- WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
- COMMAND ${PYTHON_EXECUTABLE} -c "print(\"${input_num}\".lstrip(\"0\") or 0)"
- OUTPUT_VARIABLE depadded_num OUTPUT_STRIP_TRAILING_WHITESPACE
- )
- SET(${output_num} ${depadded_num} PARENT_SCOPE)
-ENDFUNCTION(DEPAD_NUM)
-
-DEPAD_NUM(${UHD_VERSION_MAJOR} TRIMMED_VERSION_MAJOR)
-DEPAD_NUM(${UHD_VERSION_API} TRIMMED_VERSION_API)
-DEPAD_NUM(${UHD_VERSION_ABI} TRIMMED_VERSION_ABI)
-IF(UHD_VERSION_DEVEL)
- SET(TRIMMED_VERSION_PATCH ${UHD_VERSION_PATCH})
-ELSE(UHD_VERSION_DEVEL)
- DEPAD_NUM(${UHD_VERSION_PATCH} TRIMMED_VERSION_PATCH)
-ENDIF(UHD_VERSION_DEVEL)
-SET(TRIMMED_UHD_VERSION "${TRIMMED_VERSION_MAJOR}.${TRIMMED_VERSION_API}.${TRIMMED_VERSION_ABI}.${TRIMMED_VERSION_PATCH}")
-
-########################################################################
# Version information discovery through git log
########################################################################
diff --git a/host/docs/general.dox b/host/docs/general.dox
index ff407a304..06a8887c9 100644
--- a/host/docs/general.dox
+++ b/host/docs/general.dox
@@ -219,7 +219,7 @@ registered at a time. Make **register_handler** your first call into
the UHD library:
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~{.cpp}
-#include <uhd/utils/msg.hpp>
+
void my_handler(uhd::msg::type_t type, const std::string &msg){
//handle the message...
diff --git a/host/docs/logging.dox b/host/docs/logging.dox
new file mode 100644
index 000000000..12ccbd789
--- /dev/null
+++ b/host/docs/logging.dox
@@ -0,0 +1,89 @@
+/*! \page page_logging UHD Logging
+
+During the operation of UHD, logging messages are created which UHD handles in a
+logging subsystem. Everything logging-related is defined in
+include/uhd/utils/log.hpp. See also \ref loghpp_logging
+
+UHD itself never prints anything to stdout, which means that stdout can be used
+for applications using UHD. Actual logging messages are handled by separate
+logging backends. By default, UHD has two backends: A *console* backend, which
+prints logging messages to stderr (more accurately, to `std::clog`), and
+a *file* backend, which writes all logging messages into a log file.
+
+
+\section logging_levels Log levels
+
+UHD defines the following log levels (see also \ref loghpp_levels):
+
+- Trace: Typical trace messages are along the lines of "this function is
+ currently being executed" or "this is the current state of my local scope, the
+ following variables have the following values". Trace message are numerous,
+ and should usually be safe to ignore, unless some tricky debugging is
+ happening which requires knowing exactly what UHD is doing when and where and
+ how.
+- Debug: Messages should have this level when it's likely the message can be
+ useful for debugging errorneus behaviour. A good reason to use debug level
+ messages is when user input is in the mix.
+- Info: Whenever a message is usually supposed to be seen by the user, but it's
+ indicating regular behaviour, it's 'info'. Message at this level are rare.
+- Warning: Anything that indicates something could be wrong, but operation can
+ continue for now, is a warning.
+- Error: If something goes wrong, it's an error message. This is typically
+ accompanied by termination of the program.
+- Fatal: SOMETHING'S REALLY WRONG
+
+Whenever a log level is set, it means all log levels with higher severity are
+also printed. Example: If the log level is set to 'debug', it won't print
+messages that are at 'trace' severity, but it will print 'info' and all the
+other levels.
+
+There are multiple ways to set the log level:
+
+- Minimum level: This is a compile-time option. At compile-time, it disables
+ all log messages permanently that are below this log level.
+- Global level: The global level can be changed at runtime, and applies to all
+ backends.
+- Per-backend level: For example, The logfile and the console output could have
+ different logging levels.
+
+Log levels are evaluated in this order. If the minimum level is 'debug', no
+'trace' message will ever be printed. If the global level is 'info', no 'debug'
+message will ever be printed -- but this can be changed at runtime. Finally,
+if a message has passed both the minimum and global levels, it is handled by
+the individual backends if their log levels are appropriately set.
+
+For more info on how to set the individual log levels, see
+include/uhd/utils/log.hpp.
+
+\section logging_macros Logging Macros
+
+When log messages are generated, the appropriate logging macros should always
+be used. There are two types of macros:
+
+~~~~~~~~~~~~~~{.cpp}
+UHD_LOG_DEBUG("component", "message");
+UHD_LOGGER_DEBUG("component") << "message";
+~~~~~~~~~~~~~~
+
+The difference between those is, the former style is completely compiled out
+when the minimum log level is set accordingly, whereas the second one is not,
+but offers more flexibility through the streaming operators.
+Use the former if speed matters.
+
+The macros require three pieces of information:
+
+- The log level. This is done by choosing the appropriate macro, e.g.,
+ `UHD_LOGGER_DEBUG` vs. `UHD_LOGGER_INFO`.
+- The component where the log message is originating from. This allows to filter
+ log messages more easily.
+- The log message itself.
+
+\section logging_backends Logging Backends
+
+Anything that acts upon a log message is called a backend. UHD defines two by
+default: A logfile, and a console backend. More backends can be added by
+calling uhd::log::add_logger().
+
+*/
+// vim:ft=doxygen:
+
diff --git a/host/docs/uhd.dox b/host/docs/uhd.dox
index 5474d42e2..099bbe415 100644
--- a/host/docs/uhd.dox
+++ b/host/docs/uhd.dox
@@ -14,6 +14,7 @@ Some additional pages on developing UHD are also available here:
\li \subpage page_stream
\li \subpage page_rtp
\li \subpage page_semver
+\li \subpage page_logging
\li \subpage page_rdtesting
*/
diff --git a/host/docs/usrp_e3x0.dox b/host/docs/usrp_e3x0.dox
index 929b9f635..f2b184e4e 100644
--- a/host/docs/usrp_e3x0.dox
+++ b/host/docs/usrp_e3x0.dox
@@ -150,6 +150,7 @@ for the E3XX if you are doing custom GNU Radio development work.
\code{.sh}
$ mkdir build-arm
+$ cd build-arm
$ cmake -Wno-dev -DCMAKE_TOOLCHAIN_FILE=../cmake/Toolchains/oe-sdk_cross.cmake \-DCMAKE_INSTALL_PREFIX=/usr -DENABLE_GR_VOCODER=OFF -DENABLE_GR_ATSC=OFF \
-DENABLE_GR_DTV=OFF -DENABLE_DOXYGEN=OFF ../
\endcode
diff --git a/host/examples/gpio.cpp b/host/examples/gpio.cpp
index 02c73e96b..e47b2ab27 100644
--- a/host/examples/gpio.cpp
+++ b/host/examples/gpio.cpp
@@ -129,7 +129,7 @@ void output_reg_values(
for (int i = num_bits - 1; i >= 0; i--)
std::cout << (boost::format(" %2d") % i);
std::cout << std::endl;
- BOOST_FOREACH(std::string &attr, attrs)
+ for(std::string &attr: attrs)
{
std::cout << (boost::format("%10s:%s")
% attr % to_bit_string(uint32_t(usrp->get_gpio_attr(bank, attr)), num_bits))
diff --git a/host/examples/init_usrp/CMakeLists.txt b/host/examples/init_usrp/CMakeLists.txt
index 4ce51125f..139f9b853 100644
--- a/host/examples/init_usrp/CMakeLists.txt
+++ b/host/examples/init_usrp/CMakeLists.txt
@@ -46,6 +46,25 @@ find_package(UHD "3.8.0" REQUIRED)
#find_package(UHD 3.8.1 EXACT REQUIRED)
### Configure Compiler ########################################################
+IF(CMAKE_VERSION VERSION_LESS "3.1")
+ IF(${CMAKE_CXX_COMPILER_ID} STREQUAL "GNU")
+ SET(CMAKE_CXX_FLAGS "--std=gnu++11 ${CMAKE_CXX_FLAGS}")
+ ELSEIF(${CMAKE_CXX_COMPILER_ID} STREQUAL "Clang")
+ IF("${IS_APPLE}" STREQUAL "")
+ SET(CMAKE_CXX_FLAGS "-std=c++11 ${CMAKE_CXX_FLAGS}")
+ ELSE()
+ SET(CMAKE_CXX_FLAGS "-std=c++11 -stdlib=libc++ ${CMAKE_CXX_FLAGS}")
+ ENDIF()
+ ENDIF()
+ELSE()
+ SET(CMAKE_CXX_STANDARD 11)
+ENDIF()
+
+IF(CMAKE_SYSTEM_NAME STREQUAL "FreeBSD" AND ${CMAKE_CXX_COMPILER_ID} STREQUAL "Clang")
+ SET(CMAKE_EXE_LINKER_FLAGS "-lthr ${CMAKE_EXE_LINKER_FLAGS}")
+ SET(CMAKE_CXX_FLAGS "-stdlib=libc++ ${CMAKE_CXX_FLAGS}")
+ENDIF()
+
include_directories(
${Boost_INCLUDE_DIRS}
${UHD_INCLUDE_DIRS}
diff --git a/host/examples/rx_samples_to_file.cpp b/host/examples/rx_samples_to_file.cpp
index 934dce586..444fd155c 100644
--- a/host/examples/rx_samples_to_file.cpp
+++ b/host/examples/rx_samples_to_file.cpp
@@ -37,6 +37,7 @@ template<typename samp_type> void recv_to_file(
uhd::usrp::multi_usrp::sptr usrp,
const std::string &cpu_format,
const std::string &wire_format,
+ const std::string &channel,
const std::string &file,
size_t samps_per_buff,
unsigned long long num_requested_samples,
@@ -50,6 +51,9 @@ template<typename samp_type> void recv_to_file(
unsigned long long num_total_samps = 0;
//create a receive streamer
uhd::stream_args_t stream_args(cpu_format,wire_format);
+ std::vector<size_t> channel_nums;
+ channel_nums.push_back(boost::lexical_cast<size_t>(channel));
+ stream_args.channels = channel_nums;
uhd::rx_streamer::sptr rx_stream = usrp->get_rx_stream(stream_args);
uhd::rx_metadata_t md;
@@ -209,7 +213,7 @@ int UHD_SAFE_MAIN(int argc, char *argv[]){
uhd::set_thread_priority_safe();
//variables to be set by po
- std::string args, file, type, ant, subdev, ref, wirefmt;
+ std::string args, file, type, ant, subdev, ref, wirefmt, channel;
size_t total_num_samps, spb;
double rate, freq, gain, bw, total_time, setup_time;
@@ -229,9 +233,10 @@ int UHD_SAFE_MAIN(int argc, char *argv[]){
("gain", po::value<double>(&gain), "gain for the RF chain")
("ant", po::value<std::string>(&ant), "antenna selection")
("subdev", po::value<std::string>(&subdev), "subdevice specification")
+ ("channel", po::value<std::string>(&channel)->default_value("0"), "which channel to use")
("bw", po::value<double>(&bw), "analog frontend filter bandwidth in Hz")
("ref", po::value<std::string>(&ref)->default_value("internal"), "reference source (internal, external, mimo)")
- ("wirefmt", po::value<std::string>(&wirefmt)->default_value("sc16"), "wire format (sc8 or sc16)")
+ ("wirefmt", po::value<std::string>(&wirefmt)->default_value("sc16"), "wire format (sc8, sc16 or s16)")
("setup", po::value<double>(&setup_time)->default_value(1.0), "seconds of setup time")
("progress", "periodically display short-term bandwidth")
("stats", "show average bandwidth on exit")
@@ -329,12 +334,19 @@ int UHD_SAFE_MAIN(int argc, char *argv[]){
}
#define recv_to_file_args(format) \
- (usrp, format, wirefmt, file, spb, total_num_samps, total_time, bw_summary, stats, null, enable_size_map, continue_on_bad_packet)
+ (usrp, format, wirefmt, channel, file, spb, total_num_samps, total_time, bw_summary, stats, null, enable_size_map, continue_on_bad_packet)
//recv to file
- if (type == "double") recv_to_file<std::complex<double> >recv_to_file_args("fc64");
- else if (type == "float") recv_to_file<std::complex<float> >recv_to_file_args("fc32");
- else if (type == "short") recv_to_file<std::complex<short> >recv_to_file_args("sc16");
- else throw std::runtime_error("Unknown type " + type);
+ if (wirefmt == "s16") {
+ if (type == "double") recv_to_file<double>recv_to_file_args("f64");
+ else if (type == "float") recv_to_file<float>recv_to_file_args("f32");
+ else if (type == "short") recv_to_file<short>recv_to_file_args("s16");
+ else throw std::runtime_error("Unknown type " + type);
+ } else {
+ if (type == "double") recv_to_file<std::complex<double> >recv_to_file_args("fc64");
+ else if (type == "float") recv_to_file<std::complex<float> >recv_to_file_args("fc32");
+ else if (type == "short") recv_to_file<std::complex<short> >recv_to_file_args("sc16");
+ else throw std::runtime_error("Unknown type " + type);
+ }
//finished
std::cout << std::endl << "Done!" << std::endl << std::endl;
diff --git a/host/examples/test_dboard_coercion.cpp b/host/examples/test_dboard_coercion.cpp
index 81c45fcb3..0119f39cd 100644
--- a/host/examples/test_dboard_coercion.cpp
+++ b/host/examples/test_dboard_coercion.cpp
@@ -102,7 +102,7 @@ std::string coercion_test(uhd::usrp::multi_usrp::sptr usrp, std::string type, in
std::cout << boost::format("\nTesting %s coercion...") % type << std::endl;
- BOOST_FOREACH(const uhd::range_t &range, freq_ranges){
+ for(const uhd::range_t &range: freq_ranges){
double freq_begin = range.start();
double freq_end = range.stop();
@@ -152,7 +152,7 @@ std::string coercion_test(uhd::usrp::multi_usrp::sptr usrp, std::string type, in
bool has_sensor = (std::find(dboard_sensor_names.begin(), dboard_sensor_names.end(), "lo_locked")) != dboard_sensor_names.end();
- BOOST_FOREACH(double freq, freqs){
+ for(double freq: freqs){
//Testing for successful frequency tune
if(type == "TX") usrp->set_tx_freq(freq,chan);
@@ -212,7 +212,7 @@ std::string coercion_test(uhd::usrp::multi_usrp::sptr usrp, std::string type, in
//Testing for successful gain tune
- BOOST_FOREACH(double gain, gains){
+ for(double gain: gains){
if(type == "TX") usrp->set_tx_gain(gain,chan);
else usrp->set_rx_gain(gain,chan);
@@ -266,7 +266,7 @@ std::string coercion_test(uhd::usrp::multi_usrp::sptr usrp, std::string type, in
}
else{
results += "USRP did not successfully tune to the following frequencies: ";
- BOOST_FOREACH(double bad_freq, bad_tune_freqs){
+ for(double bad_freq: bad_tune_freqs){
if(bad_freq != *bad_tune_freqs.begin()) results += ", ";
results += MHz_str(bad_freq);
}
@@ -282,7 +282,7 @@ std::string coercion_test(uhd::usrp::multi_usrp::sptr usrp, std::string type, in
}
else{
results += "LO did not lock at the following frequencies: ";
- BOOST_FOREACH(double bad_freq, no_lock_freqs){
+ for(double bad_freq: no_lock_freqs){
if(bad_freq != *no_lock_freqs.begin()) results += ", ";
results += MHz_str(bad_freq);
}
@@ -298,7 +298,7 @@ std::string coercion_test(uhd::usrp::multi_usrp::sptr usrp, std::string type, in
}
else{
results += "USRP did not successfully set gain under the following circumstances:";
- BOOST_FOREACH(double_pair bad_pair, bad_gain_vals){
+ for(double_pair bad_pair: bad_gain_vals){
double bad_freq = bad_pair.first;
double bad_gain = bad_pair.second;
results += str(boost::format("\nFrequency: %s, Gain: %5.2f") % MHz_str(bad_freq) % bad_gain);
diff --git a/host/examples/test_messages.cpp b/host/examples/test_messages.cpp
index 43b035d19..8a50e4c85 100644
--- a/host/examples/test_messages.cpp
+++ b/host/examples/test_messages.cpp
@@ -23,7 +23,6 @@
#include <uhd/usrp/multi_usrp.hpp>
#include <boost/assign/list_of.hpp>
#include <boost/program_options.hpp>
-#include <boost/foreach.hpp>
#include <boost/bind.hpp>
#include <boost/format.hpp>
#include <cstdlib>
@@ -330,7 +329,7 @@ int UHD_SAFE_MAIN(int argc, char *argv[]){
//init result counts
uhd::dict<std::string, size_t> failures, successes;
- BOOST_FOREACH(const std::string &key, tests.keys()){
+ for(const std::string &key: tests.keys()){
failures[key] = 0;
successes[key] = 0;
}
@@ -352,7 +351,7 @@ int UHD_SAFE_MAIN(int argc, char *argv[]){
//print the result summary
bool any_failure = false;
std::cout << std::endl << "Summary:" << std::endl << std::endl;
- BOOST_FOREACH(const std::string &key, tests.keys()){
+ for(const std::string &key: tests.keys()){
std::cout << boost::format(
"%s -> %3u successes, %3u failures"
) % key % successes[key] % failures[key] << std::endl;
diff --git a/host/examples/tx_samples_from_file.cpp b/host/examples/tx_samples_from_file.cpp
index cc7e963d5..b09efe454 100644
--- a/host/examples/tx_samples_from_file.cpp
+++ b/host/examples/tx_samples_from_file.cpp
@@ -33,17 +33,11 @@ static bool stop_signal_called = false;
void sig_int_handler(int){stop_signal_called = true;}
template<typename samp_type> void send_from_file(
- uhd::usrp::multi_usrp::sptr usrp,
- const std::string &cpu_format,
- const std::string &wire_format,
+ uhd::tx_streamer::sptr tx_stream,
const std::string &file,
size_t samps_per_buff
){
- //create a transmit streamer
- uhd::stream_args_t stream_args(cpu_format, wire_format);
- uhd::tx_streamer::sptr tx_stream = usrp->get_tx_stream(stream_args);
-
uhd::tx_metadata_t md;
md.start_of_burst = false;
md.end_of_burst = false;
@@ -69,7 +63,7 @@ int UHD_SAFE_MAIN(int argc, char *argv[]){
uhd::set_thread_priority_safe();
//variables to be set by po
- std::string args, file, type, ant, subdev, ref, wirefmt;
+ std::string args, file, type, ant, subdev, ref, wirefmt, channel;
size_t spb;
double rate, freq, gain, bw, delay, lo_off;
@@ -91,6 +85,7 @@ int UHD_SAFE_MAIN(int argc, char *argv[]){
("ref", po::value<std::string>(&ref)->default_value("internal"), "reference source (internal, external, mimo)")
("wirefmt", po::value<std::string>(&wirefmt)->default_value("sc16"), "wire format (sc8 or sc16)")
("delay", po::value<double>(&delay)->default_value(0.0), "specify a delay between repeated transmission of file")
+ ("channel", po::value<std::string>(&channel)->default_value("0"), "which channel to use")
("repeat", "repeatedly transmit file")
("int-n", "tune USRP with integer-n tuning")
;
@@ -186,11 +181,22 @@ int UHD_SAFE_MAIN(int argc, char *argv[]){
std::cout << "Press Ctrl + C to stop streaming..." << std::endl;
}
+ //create a transmit streamer
+ std::string cpu_format;
+ std::vector<size_t> channel_nums;
+ if (type == "double") cpu_format = "fc64";
+ else if (type == "float") cpu_format = "fc32";
+ else if (type == "short") cpu_format = "sc16";
+ uhd::stream_args_t stream_args(cpu_format, wirefmt);
+ channel_nums.push_back(boost::lexical_cast<size_t>(channel));
+ stream_args.channels = channel_nums;
+ uhd::tx_streamer::sptr tx_stream = usrp->get_tx_stream(stream_args);
+
//send from file
do{
- if (type == "double") send_from_file<std::complex<double> >(usrp, "fc64", wirefmt, file, spb);
- else if (type == "float") send_from_file<std::complex<float> >(usrp, "fc32", wirefmt, file, spb);
- else if (type == "short") send_from_file<std::complex<short> >(usrp, "sc16", wirefmt, file, spb);
+ if (type == "double") send_from_file<std::complex<double> >(tx_stream, file, spb);
+ else if (type == "float") send_from_file<std::complex<float> >(tx_stream, file, spb);
+ else if (type == "short") send_from_file<std::complex<short> >(tx_stream, file, spb);
else throw std::runtime_error("Unknown type " + type);
if(repeat and delay != 0.0) boost::this_thread::sleep(boost::posix_time::milliseconds(delay));
diff --git a/host/examples/tx_waveforms.cpp b/host/examples/tx_waveforms.cpp
index b2a8f944c..d596d9ed4 100644
--- a/host/examples/tx_waveforms.cpp
+++ b/host/examples/tx_waveforms.cpp
@@ -23,7 +23,6 @@
#include <uhd/exception.hpp>
#include <boost/program_options.hpp>
#include <boost/math/special_functions/round.hpp>
-#include <boost/foreach.hpp>
#include <boost/format.hpp>
#include <boost/thread.hpp>
#include <boost/lexical_cast.hpp>
diff --git a/host/include/config.h.in b/host/include/config.h.in
index 8e72a1f00..0e8982488 100644
--- a/host/include/config.h.in
+++ b/host/include/config.h.in
@@ -18,10 +18,10 @@
#cmakedefine HAVE_LOG2
/* Version macros */
-#cmakedefine UHD_VERSION_MAJOR ${TRIMMED_VERSION_MAJOR}
-#cmakedefine UHD_VERSION_API ${TRIMMED_VERSION_API}
-#cmakedefine UHD_VERSION_ABI ${TRIMMED_VERSION_ABI}
-#cmakedefine UHD_VERSION_PATCH ${TRIMMED_VERSION_PATCH}
+#cmakedefine UHD_VERSION_MAJOR ${UHD_VERSION_MAJOR}
+#cmakedefine UHD_VERSION_API ${UHD_VERSION_API}
+#cmakedefine UHD_VERSION_ABI ${UHD_VERSION_ABI}
+#cmakedefine UHD_VERSION_PATCH ${UHD_VERSION_PATCH}
#cmakedefine ENABLE_USB
#ifndef UHD_VERSION
#cmakedefine UHD_VERSION @UHD_VERSION_ADDED@
diff --git a/host/include/uhd/CMakeLists.txt b/host/include/uhd/CMakeLists.txt
index e31ff80a0..805db13de 100644
--- a/host/include/uhd/CMakeLists.txt
+++ b/host/include/uhd/CMakeLists.txt
@@ -18,6 +18,7 @@
ADD_SUBDIRECTORY(rfnoc)
ADD_SUBDIRECTORY(transport)
ADD_SUBDIRECTORY(types)
+ADD_SUBDIRECTORY(cal)
ADD_SUBDIRECTORY(usrp)
ADD_SUBDIRECTORY(usrp_clock)
ADD_SUBDIRECTORY(utils)
@@ -33,7 +34,6 @@ UHD_INSTALL(FILES
convert.hpp
deprecated.hpp
device.hpp
- device_deprecated.ipp
exception.hpp
property_tree.ipp
property_tree.hpp
diff --git a/host/include/uhd/cal/CMakeLists.txt b/host/include/uhd/cal/CMakeLists.txt
new file mode 100644
index 000000000..14107ee53
--- /dev/null
+++ b/host/include/uhd/cal/CMakeLists.txt
@@ -0,0 +1,23 @@
+#
+# Copyright 2016 Ettus Research
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+
+UHD_INSTALL(FILES
+ container.hpp
+ power_container.hpp
+ DESTINATION ${INCLUDE_DIR}/uhd/cal
+ COMPONENT headers
+)
diff --git a/host/include/uhd/cal/container.hpp b/host/include/uhd/cal/container.hpp
new file mode 100644
index 000000000..e4f418311
--- /dev/null
+++ b/host/include/uhd/cal/container.hpp
@@ -0,0 +1,104 @@
+//
+// Copyright 2016 Ettus Research
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU General Public License for more details.
+//
+// You should have received a copy of the GNU General Public License
+// along with this program. If not, see <http://www.gnu.org/licenses/>.
+//
+
+#ifndef INCLUDED_UHD_CAL_CONTAINER_HPP
+#define INCLUDED_UHD_CAL_CONTAINER_HPP
+
+#include <uhd/config.hpp>
+#include <boost/serialization/serialization.hpp>
+#include <boost/serialization/vector.hpp>
+#include <boost/serialization/string.hpp>
+#include <boost/serialization/map.hpp>
+#include <boost/archive/text_iarchive.hpp>
+#include <boost/archive/text_oarchive.hpp>
+#include <boost/shared_ptr.hpp>
+
+namespace uhd {
+namespace cal {
+
+class base_container {
+public:
+ typedef std::map<std::string, std::string> metadata_t;
+ typedef boost::shared_ptr<base_container> sptr;
+};
+
+/*!
+ * An interface for creating and managing a generic calibration
+ * data container.
+ *
+ * These containers are used to represent N dimensional data structures
+ * in order to accommodate a mapping from multi-variable input to a scalar
+ * value (e.g. gain, frequency, temperature -> power level [dBm]).
+ *
+ * The container only supports inputs of the same type to be mapped.
+ *
+ */
+template<typename in_type, typename out_type>
+class UHD_API cal_container : public base_container {
+public:
+ typedef std::map<in_type, out_type> container_t;
+
+ /*!
+ * Get the mapping from an input to an output
+ * from the calibration container.
+ *
+ * \param args input values
+ * \returns the output of the mapping (a scalar value)
+ * \throws uhd::assertion_error if the dimensions of the input args
+ * are incorrect for this container
+ */
+ virtual out_type get(const in_type &args) = 0;
+
+ /*!
+ * Add a data point to the container.
+ * This function records a mapping R^n -> R between an input vector
+ * and output scalar.
+ *
+ * \param output the output of the data point mapping
+ * \param args input values
+ */
+ virtual void add(const out_type output, const in_type &args) = 0;
+
+ /*!
+ * Associate some metadata with the container.
+ *
+ * \param data a map of metadata (string -> string).
+ */
+ virtual void add_metadata(const metadata_t &data) = 0;
+
+ /*!
+ * Retrieve metadata from the container.
+ *
+ * \returns map of metadata.
+ */
+ virtual const metadata_t &get_metadata() = 0;
+
+public:
+ typedef boost::archive::text_iarchive iarchive_type;
+ typedef boost::archive::text_oarchive oarchive_type;
+
+protected:
+ friend class boost::serialization::access;
+
+ virtual void serialize(iarchive_type & ar, const unsigned int) = 0;
+ virtual void serialize(oarchive_type & ar, const unsigned int) = 0;
+};
+
+} // namespace cal
+} // namespace uhd
+
+#endif /* INCLUDED_UHD_CAL_CONTAINER_HPP */
diff --git a/host/include/uhd/cal/power_container.hpp b/host/include/uhd/cal/power_container.hpp
new file mode 100644
index 000000000..37f7bd8df
--- /dev/null
+++ b/host/include/uhd/cal/power_container.hpp
@@ -0,0 +1,79 @@
+//
+// Copyright 2016 Ettus Research
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU General Public License for more details.
+//
+// You should have received a copy of the GNU General Public License
+// along with this program. If not, see <http://www.gnu.org/licenses/>.
+//
+
+#ifndef INCLUDED_UHD_CAL_POWER_CONTAINER_HPP
+#define INCLUDED_UHD_CAL_POWER_CONTAINER_HPP
+
+#include <uhd/config.hpp>
+#include <uhd/cal/container.hpp>
+#include <boost/shared_ptr.hpp>
+
+namespace uhd {
+namespace cal {
+
+class UHD_API power_container : public cal_container<std::vector<double>, double> {
+public:
+ typedef boost::shared_ptr<power_container> sptr;
+
+ /*!
+ * Create a container for data related to power calibration.
+ *
+ * \returns shared pointer to the container
+ */
+ static sptr make();
+
+ /*!
+ * Get the mapping from an input to an output
+ * from the calibration container.
+ *
+ * \param args input values
+ * \returns the output of the mapping (a scalar value)
+ * \throws uhd::assertion_error if the number of input values are incorrect
+ * for the container type
+ */
+ virtual double get(const std::vector<double> &args) = 0;
+
+ /*!
+ * Add a data point to the container.
+ * This function records a mapping R^n -> R between an input vector
+ * and output scalar. For example, a mapping might be
+ * (power level, frequency, temperature) -> gain.
+ *
+ * \param output the output of the data point mapping
+ * \param args input values
+ */
+ virtual void add(const double output, const std::vector<double> &args) = 0;
+
+ /*!
+ * Associate some metadata with the container.
+ *
+ * \param data a map of metadata (string -> string).
+ */
+ virtual void add_metadata(const metadata_t &data) = 0;
+
+ /*!
+ * Retrieve metadata from the container.
+ *
+ * \returns map of metadata.
+ */
+ virtual const metadata_t &get_metadata() = 0;
+};
+
+} // namespace cal
+} // namespace uhd
+
+#endif /* INCLUDED_UHD_CAL_POWER_CONTAINER_HPP */
diff --git a/host/include/uhd/device.hpp b/host/include/uhd/device.hpp
index 894b48c47..28b81a0f6 100644
--- a/host/include/uhd/device.hpp
+++ b/host/include/uhd/device.hpp
@@ -105,14 +105,22 @@ public:
*/
virtual tx_streamer::sptr get_tx_stream(const stream_args_t &args) = 0;
+ /*!
+ * Receive and asynchronous message from the device.
+ * \param async_metadata the metadata to be filled in
+ * \param timeout the timeout in seconds to wait for a message
+ * \return true when the async_metadata is valid, false for timeout
+ */
+ virtual bool recv_async_msg(
+ async_metadata_t &async_metadata, double timeout = 0.1
+ ) = 0;
+
//! Get access to the underlying property structure
uhd::property_tree::sptr get_tree(void) const;
//! Get device type
device_filter_t get_device_type() const;
- #include <uhd/device_deprecated.ipp>
-
protected:
uhd::property_tree::sptr _tree;
device_filter_t _type;
diff --git a/host/include/uhd/device3.hpp b/host/include/uhd/device3.hpp
index da23bb263..a2e530321 100644
--- a/host/include/uhd/device3.hpp
+++ b/host/include/uhd/device3.hpp
@@ -22,6 +22,7 @@
#include <uhd/rfnoc/graph.hpp>
#include <uhd/rfnoc/block_ctrl_base.hpp>
#include <boost/units/detail/utility.hpp>
+#include <boost/thread/mutex.hpp>
#include <vector>
namespace uhd {
@@ -53,12 +54,14 @@ class UHD_API device3 : public uhd::device {
*
* \param block_id Canonical block name (e.g. "0/FFT_1").
* \return true if a block with the specified id exists
+ * \note this access is not thread safe if peformed during block enumeration
*/
bool has_block(const rfnoc::block_id_t &block_id) const;
/*! Same as has_block(), but with a type check.
*
* \return true if a block of type T with the specified id exists
+ * \note this access is not thread safe if peformed during block enumeration
*/
template <typename T>
bool has_block(const rfnoc::block_id_t &block_id) const
@@ -76,6 +79,7 @@ class UHD_API device3 : public uhd::device {
* on this device), it will throw a uhd::lookup_error.
*
* \param block_id Canonical block name (e.g. "0/FFT_1").
+ * \note this access is not thread safe if peformed during block enumeration
*/
rfnoc::block_ctrl_base::sptr get_block_ctrl(const rfnoc::block_id_t &block_id) const;
@@ -91,6 +95,7 @@ class UHD_API device3 : public uhd::device {
* uhd::rfnoc::my_block_ctrl::sptr block_controller = get_block_ctrl<my_block_ctrl>("0/MyBlock_0");
* block_controller->my_own_block_method();
* \endcode
+ * \note this access is not thread safe if peformed during block enumeration
*/
template <typename T>
boost::shared_ptr<T> get_block_ctrl(const rfnoc::block_id_t &block_id) const
@@ -115,6 +120,7 @@ class UHD_API device3 : public uhd::device {
* // Assume DEV is a device3::sptr
* null_block_ctrl::sptr null_block = DEV->find_blocks<null_block_ctrl>("NullSrcSink");
* \endcode
+ * \note this access is not thread safe if peformed during block enumeration
*/
std::vector<rfnoc::block_id_t> find_blocks(const std::string &block_id_hint) const;
@@ -138,6 +144,8 @@ class UHD_API device3 : public uhd::device {
// It is the responsibility of the deriving class to make
// sure this gets correctly populated.
std::vector< rfnoc::block_ctrl_base::sptr > _rfnoc_block_ctrl;
+ //! Mutex to protect access to members
+ boost::mutex _block_ctrl_mutex;
};
} //namespace uhd
diff --git a/host/include/uhd/device_deprecated.ipp b/host/include/uhd/device_deprecated.ipp
deleted file mode 100644
index 2741df4e3..000000000
--- a/host/include/uhd/device_deprecated.ipp
+++ /dev/null
@@ -1,201 +0,0 @@
-//
-// Copyright 2010-2011 Ettus Research LLC
-//
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU General Public License for more details.
-//
-// You should have received a copy of the GNU General Public License
-// along with this program. If not, see <http://www.gnu.org/licenses/>.
-//
-
-//this file is included inside device class
-//it supports the old send/recv functions
-//this was replaced by the streamer API
-
-#define _lazymin(x, y) (((x) > (y))? (y) : (x))
-
-/*!
- * Send modes for the device send routine.
- */
-enum send_mode_t{
- //! Tells the send routine to send the entire buffer
- SEND_MODE_FULL_BUFF = 0,
- //! Tells the send routine to return after one packet
- SEND_MODE_ONE_PACKET = 1
-};
-
-/*!
- * Recv modes for the device recv routine.
- */
-enum recv_mode_t{
- //! Tells the recv routine to recv the entire buffer
- RECV_MODE_FULL_BUFF = 0,
- //! Tells the recv routine to return after one packet
- RECV_MODE_ONE_PACKET = 1
-};
-
-//! Typedef for a pointer to a single, or a collection of send buffers
-typedef ref_vector<const void *> send_buffs_type;
-
-//! Typedef for a pointer to a single, or a collection of recv buffers
-typedef ref_vector<void *> recv_buffs_type;
-
-/*!
- * Send buffers containing IF data described by the metadata.
- *
- * Send handles fragmentation as follows:
- * If the buffer has more samples than the maximum per packet,
- * the send method will fragment the samples across several packets.
- * Send will respect the burst flags when fragmenting to ensure
- * that start of burst can only be set on the first fragment and
- * that end of burst can only be set on the final fragment.
- * Fragmentation only applies in the full buffer send mode.
- *
- * This is a blocking call and will not return until the number
- * of samples returned have been read out of each buffer.
- * Under a timeout condition, the number of samples returned
- * may be less than the number of samples specified.
- *
- * \param buffs a vector of read-only memory containing IF data
- * \param nsamps_per_buff the number of samples to send, per buffer
- * \param metadata data describing the buffer's contents
- * \param io_type the type of data loaded in the buffer
- * \param send_mode tells send how to unload the buffer
- * \param timeout the timeout in seconds to wait on a packet
- * \return the number of samples sent
- */
-size_t send(
- const send_buffs_type &buffs,
- size_t nsamps_per_buff,
- const tx_metadata_t &metadata,
- const io_type_t &io_type,
- send_mode_t send_mode,
- double timeout = 0.1
-){
- if (_tx_streamer.get() == NULL or _tx_streamer->get_num_channels() != buffs.size() or _send_tid != io_type.tid){
- _send_tid = io_type.tid;
- _tx_streamer.reset(); //cleanup possible old one
- stream_args_t args;
- args.cpu_format = (_send_tid == io_type_t::COMPLEX_FLOAT32)? "fc32" : "sc16";
- args.otw_format = "sc16";
- args.args["noclear"] = "1";
- for (size_t ch = 0; ch < buffs.size(); ch++)
- args.channels.push_back(ch); //linear mapping
- _tx_streamer = get_tx_stream(args);
- }
- const size_t nsamps = (send_mode == SEND_MODE_ONE_PACKET)?
- _lazymin(nsamps_per_buff, get_max_send_samps_per_packet()) :
- nsamps_per_buff;
- return _tx_streamer->send(buffs, nsamps, metadata, timeout);
-}
-
-/*!
- * Receive buffers containing IF data described by the metadata.
- *
- * Receive handles fragmentation as follows:
- * If the buffer has insufficient space to hold all samples
- * that were received in a single packet over-the-wire,
- * then the buffer will be completely filled and the implementation
- * will hold a pointer into the remaining portion of the packet.
- * Subsequent calls will load from the remainder of the packet,
- * and will flag the metadata to show that this is a fragment.
- * The next call to receive, after the remainder becomes exhausted,
- * will perform an over-the-wire receive as usual.
- * See the rx metadata fragment flags and offset fields for details.
- *
- * This is a blocking call and will not return until the number
- * of samples returned have been written into each buffer.
- * Under a timeout condition, the number of samples returned
- * may be less than the number of samples specified.
- *
- * When using the full buffer recv mode, the metadata only applies
- * to the first packet received and written into the recv buffers.
- * Use the one packet recv mode to get per packet metadata.
- *
- * \param buffs a vector of writable memory to fill with IF data
- * \param nsamps_per_buff the size of each buffer in number of samples
- * \param metadata data to fill describing the buffer
- * \param io_type the type of data to fill into the buffer
- * \param recv_mode tells recv how to load the buffer
- * \param timeout the timeout in seconds to wait for a packet
- * \return the number of samples received or 0 on error
- */
-size_t recv(
- const recv_buffs_type &buffs,
- size_t nsamps_per_buff,
- rx_metadata_t &metadata,
- const io_type_t &io_type,
- recv_mode_t recv_mode,
- double timeout = 0.1
-){
- if (_rx_streamer.get() == NULL or _rx_streamer->get_num_channels() != buffs.size() or _recv_tid != io_type.tid){
- _recv_tid = io_type.tid;
- _rx_streamer.reset(); //cleanup possible old one
- stream_args_t args;
- args.cpu_format = (_recv_tid == io_type_t::COMPLEX_FLOAT32)? "fc32" : "sc16";
- args.otw_format = "sc16";
- args.args["noclear"] = "1";
- for (size_t ch = 0; ch < buffs.size(); ch++)
- args.channels.push_back(ch); //linear mapping
- _rx_streamer = get_rx_stream(args);
- }
- const size_t nsamps = (recv_mode == RECV_MODE_ONE_PACKET)?
- _lazymin(nsamps_per_buff, get_max_recv_samps_per_packet()) :
- nsamps_per_buff;
- return _rx_streamer->recv(buffs, nsamps, metadata, timeout);
-}
-
-/*!
- * Get the maximum number of samples per packet on send.
- * \return the number of samples
- */
-size_t get_max_send_samps_per_packet(void){
- if (_tx_streamer.get() == NULL){
- stream_args_t args;
- args.cpu_format = "fc32";
- args.otw_format = "sc16";
- args.args["noclear"] = "1";
- _tx_streamer = get_tx_stream(args);
- _send_tid = io_type_t::COMPLEX_FLOAT32;
- }
- return _tx_streamer->get_max_num_samps();
-}
-
-/*!
- * Get the maximum number of samples per packet on recv.
- * \return the number of samples
- */
-size_t get_max_recv_samps_per_packet(void){
- if (_rx_streamer.get() == NULL){
- stream_args_t args;
- args.cpu_format = "fc32";
- args.otw_format = "sc16";
- args.args["noclear"] = "1";
- _rx_streamer = get_rx_stream(args);
- _recv_tid = io_type_t::COMPLEX_FLOAT32;
- }
- return _rx_streamer->get_max_num_samps();
-}
-
-/*!
- * Receive and asynchronous message from the device.
- * \param async_metadata the metadata to be filled in
- * \param timeout the timeout in seconds to wait for a message
- * \return true when the async_metadata is valid, false for timeout
- */
-virtual bool recv_async_msg(
- async_metadata_t &async_metadata, double timeout = 0.1
-) = 0;
-
-private:
- rx_streamer::sptr _rx_streamer;
- io_type_t::tid_t _recv_tid;
- tx_streamer::sptr _tx_streamer;
- io_type_t::tid_t _send_tid;
diff --git a/host/include/uhd/image_loader.hpp b/host/include/uhd/image_loader.hpp
index fd4a96781..4ebac288e 100644
--- a/host/include/uhd/image_loader.hpp
+++ b/host/include/uhd/image_loader.hpp
@@ -21,6 +21,7 @@
#include <string>
#include <boost/function.hpp>
+#include <boost/noncopyable.hpp>
#include <uhd/config.hpp>
#include <uhd/types/device_addr.hpp>
diff --git a/host/include/uhd/rfnoc/block_ctrl_base.hpp b/host/include/uhd/rfnoc/block_ctrl_base.hpp
index e9dd9c233..87d9b3a71 100644
--- a/host/include/uhd/rfnoc/block_ctrl_base.hpp
+++ b/host/include/uhd/rfnoc/block_ctrl_base.hpp
@@ -46,7 +46,6 @@ struct make_args_t
{
make_args_t(const std::string &key="") :
device_index(0),
- is_big_endian(true),
block_name(""),
block_key(key)
{}
@@ -61,7 +60,6 @@ struct make_args_t
// property tree is /mboards/0, pass a subtree starting at /mboards/0
// to the constructor.
uhd::property_tree::sptr tree;
- bool is_big_endian;
//! The name of the block as it will be addressed
std::string block_name;
//! The key of the block, i.e. how it was registered
@@ -394,9 +392,6 @@ protected:
//! Root node of this block's properties
uhd::fs_path _root_path;
- //! Endianness of underlying transport (for data transport)
- bool _transport_is_big_endian;
-
//! Block definition (stores info about the block such as ports)
blockdef::sptr _block_def;
diff --git a/host/include/uhd/rfnoc/node_ctrl_base.hpp b/host/include/uhd/rfnoc/node_ctrl_base.hpp
index 6fd6fd288..0505556ec 100644
--- a/host/include/uhd/rfnoc/node_ctrl_base.hpp
+++ b/host/include/uhd/rfnoc/node_ctrl_base.hpp
@@ -32,7 +32,7 @@
namespace uhd {
namespace rfnoc {
-#define UHD_RFNOC_BLOCK_TRACE() UHD_LOGV(never) << "[" << unique_id() << "] "
+#define UHD_RFNOC_BLOCK_TRACE() UHD_LOGGER_TRACE("RFNOC")
/*! \brief Abstract base class for streaming nodes.
*
diff --git a/host/include/uhd/rfnoc/node_ctrl_base.ipp b/host/include/uhd/rfnoc/node_ctrl_base.ipp
index d300f72a7..df92e2e8b 100644
--- a/host/include/uhd/rfnoc/node_ctrl_base.ipp
+++ b/host/include/uhd/rfnoc/node_ctrl_base.ipp
@@ -21,7 +21,7 @@
#define INCLUDED_LIBUHD_NODE_CTRL_BASE_IPP
#include <uhd/exception.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <boost/shared_ptr.hpp>
#include <vector>
diff --git a/host/include/uhd/rfnoc/radio_ctrl.hpp b/host/include/uhd/rfnoc/radio_ctrl.hpp
index 1d7842051..be22c6e5e 100644
--- a/host/include/uhd/rfnoc/radio_ctrl.hpp
+++ b/host/include/uhd/rfnoc/radio_ctrl.hpp
@@ -18,6 +18,7 @@
#ifndef INCLUDED_LIBUHD_RFNOC_RADIO_CTRL_HPP
#define INCLUDED_LIBUHD_RFNOC_RADIO_CTRL_HPP
+#include <uhd/types/ranges.hpp>
#include <uhd/types/direction.hpp>
#include <uhd/rfnoc/source_block_ctrl_base.hpp>
#include <uhd/rfnoc/sink_block_ctrl_base.hpp>
@@ -43,6 +44,16 @@ public:
virtual ~radio_ctrl(){}
+
+ //! A wildcard channel index
+ static const size_t ALL_CHANS = size_t(~0);
+
+ //! A wildcard gain element name
+ static const std::string ALL_GAINS;
+
+ //! A wildcard local oscillator element name
+ static const std::string ALL_LOS;
+
/************************************************************************
* API calls
***********************************************************************/
@@ -109,11 +120,13 @@ public:
*/
virtual double get_rx_frequency(const size_t chan) /* const */ = 0;
- /*! Tune the RX LO for channel \p.
+ /*! Tune the RX LO for channel \p chan.
*
* This function will attempt to tune as close as possible, and return a
* coerced value of the actual tuning result.
*
+ * \param freq Requested LO frequency
+ * \param chan Channel number.
* \return The actual LO frequency.
*/
virtual double set_rx_frequency(const double freq, const size_t chan) = 0;
@@ -148,6 +161,20 @@ public:
*/
virtual double set_rx_gain(const double gain, const size_t chan) = 0;
+ /*! Return the analog filter bandwidth channel \p chan
+ *
+ * \return The actual bandwidth value
+ */
+ virtual double get_rx_bandwidth(const size_t chan) = 0;
+
+ /*! Set the analog filter bandwidth channel \p chan
+ *
+ * This function will attempt to set the analog bandwidth.
+ *
+ * \return The actual bandwidth value
+ */
+ virtual double set_rx_bandwidth(const double bandwidth, const size_t chan) = 0;
+
/*! Sets the time in the radio's timekeeper to the given value.
*
* Note that there is a non-deterministic delay between calling this
@@ -186,6 +213,182 @@ public:
*/
virtual time_spec_t get_time_last_pps() = 0;
+ /*! Returns the list of GPIO banks that are associated with this radio.
+ *
+ * \returns list of GPIO bank names
+ */
+ virtual std::vector<std::string> get_gpio_banks() const = 0;
+
+ /*!
+ * Set a GPIO attribute on a particular GPIO bank.
+ * Possible attribute names:
+ * - CTRL - 1 for ATR mode 0 for GPIO mode
+ * - DDR - 1 for output 0 for input
+ * - OUT - GPIO output level (not ATR mode)
+ * - ATR_0X - ATR idle state
+ * - ATR_RX - ATR receive only state
+ * - ATR_TX - ATR transmit only state
+ * - ATR_XX - ATR full duplex state
+ * \param bank the name of a GPIO bank (e.g., FP0)
+ * \param attr the name of a GPIO attribute (e.g., CTRL)
+ * \param value the new value for this GPIO bank
+ * \param mask the bit mask to effect which pins are changed
+ */
+ virtual void set_gpio_attr(
+ const std::string &bank,
+ const std::string &attr,
+ const uint32_t value,
+ const uint32_t mask
+ ) = 0;
+
+ /*!
+ * Get a GPIO attribute on a particular GPIO bank.
+ * Possible attribute names:
+ * - CTRL - 1 for ATR mode 0 for GPIO mode
+ * - DDR - 1 for output 0 for input
+ * - OUT - GPIO output level (not ATR mode)
+ * - ATR_0X - ATR idle state
+ * - ATR_RX - ATR receive only state
+ * - ATR_TX - ATR transmit only state
+ * - ATR_XX - ATR full duplex state
+ * - READBACK - readback input GPIOs
+ * \param bank the name of a GPIO bank
+ * \param attr the name of a GPIO attribute
+ * \return the value set for this attribute
+ */
+ virtual uint32_t get_gpio_attr(const std::string &bank, const std::string &attr) = 0;
+
+ /*!
+ * Get a list of possible LO stage names
+ * \param chan the channel index 0 to N-1
+ * \return a vector of strings for possible LO names
+ */
+ virtual std::vector<std::string> get_rx_lo_names(const size_t chan) = 0;
+
+ /*!
+ * Get a list of possible LO sources.
+ * Channels which do not have controllable LO sources
+ * will return "internal".
+ * \param name the name of the LO stage to query
+ * \param chan the channel index 0 to N-1
+ * \return a vector of strings for possible settings
+ */
+ virtual std::vector<std::string> get_rx_lo_sources(const std::string &name, const size_t chan) = 0;
+
+ /*!
+ * Get the LO frequency range of the RX LO.
+ * If the channel does not have independently configurable LOs
+ * the rf frequency range will be returned.
+ * \param name the name of the LO stage to query
+ * \param chan the channel index 0 to N-1
+ * \return a frequency range object
+ */
+ virtual freq_range_t get_rx_lo_freq_range(const std::string &name, const size_t chan) = 0;
+
+ /*!
+ * Set the LO source for a channel.
+ * For usrps that support selectable LOs, this function
+ * allows switching between them.
+ * Typical options for source: internal, external.
+ * \param src a string representing the LO source
+ * \param name the name of the LO stage to update
+ * \param chan the channel index 0 to N-1
+ */
+ virtual void set_rx_lo_source(const std::string &src, const std::string &name, const size_t chan) = 0;
+
+ /*!
+ * Get the currently set LO source.
+ * Channels without controllable LO sources will return
+ * "internal"
+ * \param name the name of the LO stage to query
+ * \param chan the channel index 0 to N-1
+ * \return the configured LO source
+ */
+ virtual const std::string get_rx_lo_source(const std::string &name, const size_t chan) = 0;
+
+ /*!
+ * Set whether the LO used by the usrp device is exported
+ * For usrps that support exportable LOs, this function
+ * configures if the LO used by chan is exported or not.
+ * \param enabled if true then export the LO
+ * \param name the name of the LO stage to update
+ * \param chan the channel index 0 to N-1 for the source channel
+ */
+ virtual void set_rx_lo_export_enabled(bool enabled, const std::string &name, const size_t chan) = 0;
+
+ /*!
+ * Returns true if the currently selected LO is being exported.
+ * \param name the name of the LO stage to query
+ * \param chan the channel index 0 to N-1
+ */
+ virtual bool get_rx_lo_export_enabled(const std::string &name, const size_t chan) = 0;
+
+ /*!
+ * Set the RX LO frequency (Advanced).
+ * \param freq the frequency to set the LO to
+ * \param name the name of the LO stage to update
+ * \param chan the channel index 0 to N-1
+ * \return a coerced LO frequency
+ */
+ virtual double set_rx_lo_freq(double freq, const std::string &name, const size_t chan) = 0;
+
+ /*!
+ * Get the current RX LO frequency (Advanced).
+ * If the channel does not have independently configurable LOs
+ * the current rf frequency will be returned.
+ * \param name the name of the LO stage to query
+ * \param chan the channel index 0 to N-1
+ * \return the configured LO frequency
+ */
+ virtual double get_rx_lo_freq(const std::string &name, const size_t chan) = 0;
+
+ /*!
+ * Set the time source for this radio.
+ *
+ * May affect other radio blocks.
+ *
+ * \param source A string representing the time source
+ * \throws uhd::value_error if the value can't be applied
+ */
+ virtual void set_time_source(const std::string &source) = 0;
+
+ /*!
+ * Get the currently set time source.
+ *
+ * \return the string representing the time source
+ */
+ virtual std::string get_time_source() = 0;
+
+ /*!
+ * Get a list of possible time sources.
+ *
+ * \return a vector of strings for possible settings
+ */
+ virtual std::vector<std::string> get_time_sources() = 0;
+
+ /*!
+ * Set the clock source for the usrp device (for reference clock).
+ *
+ * Typical options for source: internal, external.
+ *
+ * \param source a string representing the clock source
+ */
+ virtual void set_clock_source(const std::string &source) = 0;
+
+ /*!
+ * Get the currently set clock source.
+ *
+ * \return the string representing the clock source
+ */
+ virtual std::string get_clock_source() = 0;
+
+ /*!
+ * Get a list of possible clock sources.
+ *
+ * \return a vector of strings for possible settings
+ */
+ virtual std::vector<std::string> get_clock_sources() = 0;
+
/*! Given a frontend name, return the channel mapping.
*
* E.g.: For a TwinRX board, there's two frontends, '0' and '1', which
diff --git a/host/include/uhd/transport/nirio/nirio_fifo.ipp b/host/include/uhd/transport/nirio/nirio_fifo.ipp
index 5669f259f..bb21c2715 100644
--- a/host/include/uhd/transport/nirio/nirio_fifo.ipp
+++ b/host/include/uhd/transport/nirio/nirio_fifo.ipp
@@ -134,7 +134,6 @@ bool nirio_fifo<data_t>::_acquire_block_from_rio_buffer(
uint32_t elements_remaining_u32 = 0;
size_t elements_to_request = 0;
void* elements_buffer = NULL;
- char context_buffer[64];
if (fifo_optimization_option == MAXIMIZE_THROUGHPUT)
{
diff --git a/host/include/uhd/transport/nirio/nirio_quirks.h b/host/include/uhd/transport/nirio/nirio_quirks.h
index 45ef40394..f98432d6b 100644
--- a/host/include/uhd/transport/nirio/nirio_quirks.h
+++ b/host/include/uhd/transport/nirio/nirio_quirks.h
@@ -44,7 +44,7 @@ public:
UHD_INLINE void add_tx_fifo(uint32_t index) {
if (_tx_stream_fifo_indices.find(index) != _tx_stream_fifo_indices.end()) {
if (_tx_stream_count == 0) {
- UHD_LOG << "NI-RIO RX FIFO Transfer Check Quirk Enabled.";
+ UHD_LOGGER_DEBUG("NIRIO") << "NI-RIO RX FIFO Transfer Check Quirk Enabled.";
}
_tx_stream_count++;
}
@@ -54,7 +54,7 @@ public:
if (_tx_stream_fifo_indices.find(index) != _tx_stream_fifo_indices.end()) {
_tx_stream_count--;
if (_tx_stream_count == 0) {
- UHD_LOG << "NI-RIO RX FIFO Transfer Check Quirk Disabled.";
+ UHD_LOGGER_DEBUG("NIRIO") << "NI-RIO RX FIFO Transfer Check Quirk Disabled.";
}
}
}
diff --git a/host/include/uhd/transport/nirio/rpc/rpc_client.hpp b/host/include/uhd/transport/nirio/rpc/rpc_client.hpp
index 78761c1fc..422f57e8a 100644
--- a/host/include/uhd/transport/nirio/rpc/rpc_client.hpp
+++ b/host/include/uhd/transport/nirio/rpc/rpc_client.hpp
@@ -65,11 +65,11 @@ private:
inline void _stop_io_service() {
if (_io_service_thread.get()) {
- UHD_LOG << "rpc_client stopping..." << std::endl;
+ UHD_LOGGER_INFO("NIRIO") << "rpc_client stopping...";
_io_service.stop();
_io_service_thread->join();
_io_service_thread.reset();
- UHD_LOG << "rpc_client stopped." << std::endl;
+ UHD_LOGGER_INFO("NIRIO") << "rpc_client stopped.";
}
}
diff --git a/host/include/uhd/transport/udp_simple.hpp b/host/include/uhd/transport/udp_simple.hpp
index c159a95e3..d0f6f36b1 100644
--- a/host/include/uhd/transport/udp_simple.hpp
+++ b/host/include/uhd/transport/udp_simple.hpp
@@ -93,6 +93,11 @@ public:
* Only use this with the broadcast socket.
*/
virtual std::string get_recv_addr(void) = 0;
+
+ /*!
+ * Get the IP address for the destination
+ */
+ virtual std::string get_send_addr(void) = 0;
};
}} //namespace
diff --git a/host/include/uhd/transport/udp_zero_copy.hpp b/host/include/uhd/transport/udp_zero_copy.hpp
index dcf020460..851e004d7 100644
--- a/host/include/uhd/transport/udp_zero_copy.hpp
+++ b/host/include/uhd/transport/udp_zero_copy.hpp
@@ -67,6 +67,8 @@ public:
udp_zero_copy::buff_params& buff_params_out,
const device_addr_t &hints = device_addr_t()
);
+
+ virtual uint16_t get_local_port(void) const = 0;
};
}} //namespace
diff --git a/host/include/uhd/types/filters.hpp b/host/include/uhd/types/filters.hpp
index e3756b7a5..f91f35030 100644
--- a/host/include/uhd/types/filters.hpp
+++ b/host/include/uhd/types/filters.hpp
@@ -20,7 +20,7 @@
#include <uhd/config.hpp>
#include <uhd/utils/log.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <stdint.h>
#include <boost/shared_ptr.hpp>
#include <boost/scoped_array.hpp>
@@ -263,7 +263,7 @@ namespace uhd{
{
std::size_t num_taps = taps.size();
if(num_taps < this->_max_num_taps){
- UHD_MSG(warning) << "digital_filter_fir::set_taps not enough coefficients. Appending zeros";
+ UHD_LOGGER_WARNING("FILTERS") << "digital_filter_fir::set_taps not enough coefficients. Appending zeros";
std::vector<tap_t> coeffs;
for (size_t i = 0; i < this->_max_num_taps; i++)
{
diff --git a/host/include/uhd/types/ranges.hpp b/host/include/uhd/types/ranges.hpp
index ac632df93..b26b5d753 100644
--- a/host/include/uhd/types/ranges.hpp
+++ b/host/include/uhd/types/ranges.hpp
@@ -59,6 +59,12 @@ namespace uhd{
//! Convert this range to a printable string
const std::string to_pp_string(void) const;
+ //! Equality operator
+ bool operator==(const range_t &other) const;
+
+ //! Inequality operator
+ bool operator!=(const range_t &other) const;
+
private: double _start, _stop, _step;
};
diff --git a/host/include/uhd/types/sid.hpp b/host/include/uhd/types/sid.hpp
index 5c31f2d5a..f9fa40273 100644
--- a/host/include/uhd/types/sid.hpp
+++ b/host/include/uhd/types/sid.hpp
@@ -1,5 +1,5 @@
//
-// Copyright 2014 Ettus Research LLC
+// Copyright 2014-2016 Ettus Research LLC
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
@@ -19,9 +19,8 @@
#define INCLUDED_UHD_TYPES_SID_HPP
#include <uhd/config.hpp>
-#include <stdint.h>
-#include <boost/shared_ptr.hpp>
#include <iostream>
+#include <stdint.h>
namespace uhd {
/*!
@@ -105,47 +104,47 @@ namespace uhd {
// Getters
//
//! Alias for get_sid()
- UHD_INLINE uint32_t get() const { return get_sid(); };
+ inline uint32_t get() const { return get_sid(); };
//! Returns a 32-Bit representation of the SID if set, or zero otherwise.
- UHD_INLINE uint32_t get_sid() const { return _set ? _sid : 0; };
+ inline uint32_t get_sid() const { return _set ? _sid : 0; };
//! Return the 16-bit source address of this SID
- UHD_INLINE uint32_t get_src() const {
+ inline uint32_t get_src() const {
return (_sid >> 16) & 0xFFFF;
}
//! Return the 16-bit destination address of this SID
- UHD_INLINE uint32_t get_dst() const {
+ inline uint32_t get_dst() const {
return _sid & 0xFFFF;
}
//! Return 8-bit address of the source
- UHD_INLINE uint32_t get_src_addr() const {
+ inline uint32_t get_src_addr() const {
return (get_src() >> 8) & 0xFF;
}
//! Return endpoint of the source
- UHD_INLINE uint32_t get_src_endpoint() const {
+ inline uint32_t get_src_endpoint() const {
return get_src() & 0xFF;
}
//! Return crossbar port of the source
- UHD_INLINE uint32_t get_src_xbarport() const {
+ inline uint32_t get_src_xbarport() const {
return (get_src_endpoint() >> 4) & 0xF;
}
//! Return block port of the source
- UHD_INLINE uint32_t get_src_blockport() const {
+ inline uint32_t get_src_blockport() const {
return (get_src_endpoint()) & 0xF;
}
//! Return 8-bit address of the destination
- UHD_INLINE uint32_t get_dst_addr() const {
+ inline uint32_t get_dst_addr() const {
return (get_dst() >> 8) & 0xFF;
}
//! Return endpoint of the destination
- UHD_INLINE uint32_t get_dst_endpoint() const {
+ inline uint32_t get_dst_endpoint() const {
return get_dst() & 0xFF;
}
//! Return crossbar port of the source
- UHD_INLINE uint32_t get_dst_xbarport() const {
+ inline uint32_t get_dst_xbarport() const {
return (get_dst_endpoint() >> 4) & 0xF;
}
//! Return block port of the source
- UHD_INLINE uint32_t get_dst_blockport() const {
+ inline uint32_t get_dst_blockport() const {
return (get_dst_endpoint()) & 0xF;
}
@@ -174,14 +173,14 @@ namespace uhd {
// Manipulators
//! Swaps dst and src address and returns the new SID.
- sid_t reversed();
+ sid_t reversed() const;
- //! Swaps dst and src in-place.
+ //! Swaps dst and src in-place. This modifies the current SID.
void reverse();
// Overloaded operators
- sid_t operator = (uint32_t new_sid) {
+ sid_t operator = (const uint32_t new_sid) {
set_sid(new_sid);
return *this;
}
@@ -191,6 +190,11 @@ namespace uhd {
return *this;
}
+ sid_t operator = (const sid_t &sid) {
+ set_sid(sid.get_sid());
+ return *this;
+ }
+
sid_t operator = (const std::string &sid_str) {
set_from_str(sid_str);
return *this;
@@ -228,7 +232,7 @@ namespace uhd {
};
//! Stream output operator. Honors std::ios::hex.
- UHD_INLINE std::ostream& operator<< (std::ostream& out, const sid_t &sid) {
+ inline std::ostream& operator<< (std::ostream& out, const sid_t &sid) {
std::ios_base::fmtflags ff = out.flags();
if (ff & std::ios::hex) {
out << sid.to_pp_string_hex();
diff --git a/host/include/uhd/usrp/subdev_spec.hpp b/host/include/uhd/usrp/subdev_spec.hpp
index 62c1fc177..4165a45f7 100644
--- a/host/include/uhd/usrp/subdev_spec.hpp
+++ b/host/include/uhd/usrp/subdev_spec.hpp
@@ -44,6 +44,12 @@ namespace uhd{ namespace usrp{
const std::string &db_name = "",
const std::string &sd_name = ""
);
+
+ //! overloaded equality operator
+ bool operator==(const subdev_spec_pair_t &other);
+
+ //! overloaded inquality operator
+ bool operator!=(const subdev_spec_pair_t &other);
};
//! overloaded comparison operator for subdev_spec_pair_t
diff --git a/host/include/uhd/utils/CMakeLists.txt b/host/include/uhd/utils/CMakeLists.txt
index af6d3ee47..cf5bb8aa8 100644
--- a/host/include/uhd/utils/CMakeLists.txt
+++ b/host/include/uhd/utils/CMakeLists.txt
@@ -28,8 +28,8 @@ UHD_INSTALL(FILES
fp_compare_epsilon.ipp
gain_group.hpp
log.hpp
+ log_add.hpp
math.hpp
- msg.hpp
msg_task.hpp
paths.hpp
pimpl.hpp
diff --git a/host/include/uhd/utils/assert_has.ipp b/host/include/uhd/utils/assert_has.ipp
index 7b3c88cb7..974eea5a5 100644
--- a/host/include/uhd/utils/assert_has.ipp
+++ b/host/include/uhd/utils/assert_has.ipp
@@ -21,8 +21,8 @@
#include <uhd/utils/algorithm.hpp>
#include <uhd/exception.hpp>
#include <boost/format.hpp>
-#include <boost/foreach.hpp>
#include <boost/lexical_cast.hpp>
+#include <boost/foreach.hpp>
namespace uhd{
diff --git a/host/include/uhd/utils/atomic.hpp b/host/include/uhd/utils/atomic.hpp
index ec4d3e0a2..d75a60c3b 100644
--- a/host/include/uhd/utils/atomic.hpp
+++ b/host/include/uhd/utils/atomic.hpp
@@ -1,5 +1,5 @@
//
-// Copyright 2012-2013,2016 Ettus Research LLC
+// Copyright 2012-2013,2016-2017 Ettus Research LLC
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
@@ -21,122 +21,10 @@
#include <uhd/config.hpp>
#include <uhd/types/time_spec.hpp>
#include <boost/thread/thread.hpp>
-#include <boost/thread/mutex.hpp>
-#include <boost/thread/condition_variable.hpp>
-#include <boost/interprocess/detail/atomic.hpp>
-
-#include <boost/version.hpp>
-#define BOOST_IPC_DETAIL boost::interprocess::ipcdetail
+#include <atomic>
namespace uhd{
- //! A 32-bit integer that can be atomically accessed
- class UHD_API atomic_uint32_t{
- public:
-
- //! Create a new atomic 32-bit integer, initialized to zero
- UHD_INLINE atomic_uint32_t(void){
- this->write(0);
- }
-
- //! Compare with cmp, swap with newval if same, return old value
- UHD_INLINE uint32_t cas(uint32_t newval, uint32_t cmp){
- return BOOST_IPC_DETAIL::atomic_cas32(&_num, newval, cmp);
- }
-
- //! Sets the atomic integer to a new value
- UHD_INLINE void write(const uint32_t newval){
- BOOST_IPC_DETAIL::atomic_write32(&_num, newval);
- }
-
- //! Gets the current value of the atomic integer
- UHD_INLINE uint32_t read(void){
- return BOOST_IPC_DETAIL::atomic_read32(&_num);
- }
-
- //! Increment by 1 and return the old value
- UHD_INLINE uint32_t inc(void){
- return BOOST_IPC_DETAIL::atomic_inc32(&_num);
- }
-
- //! Decrement by 1 and return the old value
- UHD_INLINE uint32_t dec(void){
- return BOOST_IPC_DETAIL::atomic_dec32(&_num);
- }
-
- private: volatile uint32_t _num;
- };
-
- /*!
- * A reusable barrier to sync multiple threads.
- * All threads spin on wait() until count is reset.
- */
- class UHD_API reusable_barrier{
- public:
-
- reusable_barrier():_size (0) {}
-
- reusable_barrier(const size_t size):_size(size) {}
-
- //! Resize the barrier for N threads
- void resize(const size_t size){
- _size = size;
- }
-
- /*!
- * Force the barrier wait to throw a boost::thread_interrupted
- * The threads were not getting the interruption_point on windows.
- */
- void interrupt(void)
- {
- _done.inc();
- }
-
- //! Wait on the barrier condition
- UHD_INLINE void wait(void)
- {
- if (_size == 1) return;
-
- //entry barrier with condition variable
- _entry_counter.inc();
- _entry_counter.cas(0, _size);
- boost::mutex::scoped_lock lock(_mutex);
- while (_entry_counter.read() != 0)
- {
- this->check_interrupt();
- _cond.timed_wait(lock, boost::posix_time::milliseconds(1));
- }
- lock.unlock(); //unlock before notify
- _cond.notify_one();
-
- //exit barrier to ensure known condition of entry count
- _exit_counter.inc();
- _exit_counter.cas(0, _size);
- while (_exit_counter.read() != 0) this->check_interrupt();
- }
-
- //! Wait on the barrier condition
- UHD_INLINE void wait_others(void)
- {
- while (_entry_counter.read() != (_size-1)) this->check_interrupt();
- }
-
- private:
- size_t _size;
- atomic_uint32_t _entry_counter;
- atomic_uint32_t _exit_counter;
- atomic_uint32_t _done;
- boost::mutex _mutex;
- boost::condition_variable _cond;
-
- UHD_INLINE void check_interrupt(void)
- {
- if (_done.read() != 0) throw boost::thread_interrupted();
- boost::this_thread::interruption_point();
- boost::this_thread::yield();
- }
- };
-
/*!
* Spin-wait on a condition with a timeout.
* \param cond an atomic variable to compare
@@ -144,15 +32,18 @@ namespace uhd{
* \param timeout the timeout in seconds
* \return true for cond == value, false for timeout
*/
+ template<typename T>
UHD_INLINE bool spin_wait_with_timeout(
- atomic_uint32_t &cond,
- uint32_t value,
+ std::atomic<T> &cond,
+ const T value,
const double timeout
){
- if (cond.read() == value) return true;
+ if (cond == value) return true;
const time_spec_t exit_time = time_spec_t::get_system_time() + time_spec_t(timeout);
- while (cond.read() != value){
- if (time_spec_t::get_system_time() > exit_time) return false;
+ while (cond != value) {
+ if (time_spec_t::get_system_time() > exit_time) {
+ return false;
+ }
boost::this_thread::interruption_point();
boost::this_thread::yield();
}
@@ -170,19 +61,19 @@ namespace uhd{
}
UHD_INLINE void release(void){
- _locked.write(0);
+ _locked = false;
}
UHD_INLINE bool claim_with_wait(const double timeout){
- if (spin_wait_with_timeout(_locked, 0, timeout)){
- _locked.write(1);
+ if (spin_wait_with_timeout(_locked, false, timeout)){
+ _locked = true;
return true;
}
return false;
}
private:
- atomic_uint32_t _locked;
+ std::atomic<bool> _locked;
};
} //namespace uhd
diff --git a/host/include/uhd/utils/fp_compare_delta.ipp b/host/include/uhd/utils/fp_compare_delta.ipp
index 292ef4bf6..9a03bd95b 100644
--- a/host/include/uhd/utils/fp_compare_delta.ipp
+++ b/host/include/uhd/utils/fp_compare_delta.ipp
@@ -15,7 +15,7 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//
-#include <uhd/utils/msg.hpp>
+
#include <cmath>
#include <typeinfo>
diff --git a/host/include/uhd/utils/fp_compare_epsilon.ipp b/host/include/uhd/utils/fp_compare_epsilon.ipp
index ff2d585db..af71046ed 100644
--- a/host/include/uhd/utils/fp_compare_epsilon.ipp
+++ b/host/include/uhd/utils/fp_compare_epsilon.ipp
@@ -15,7 +15,7 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//
-#include <uhd/utils/msg.hpp>
+
#include <cmath>
#include <typeinfo>
diff --git a/host/include/uhd/utils/log.hpp b/host/include/uhd/utils/log.hpp
index 106c0d9d5..58b3afed4 100644
--- a/host/include/uhd/utils/log.hpp
+++ b/host/include/uhd/utils/log.hpp
@@ -19,74 +19,273 @@
#define INCLUDED_UHD_UTILS_LOG_HPP
#include <uhd/config.hpp>
-#include <uhd/utils/pimpl.hpp>
#include <boost/current_function.hpp>
-#include <boost/format.hpp>
+#include <boost/thread/thread.hpp>
#include <ostream>
#include <string>
#include <sstream>
+#include <iostream>
/*! \file log.hpp
- * The UHD logging facility.
*
- * The logger enables UHD library code to easily log events into a file.
+ * \section loghpp_logging The UHD logging facility
+ *
+ * The logger enables UHD library code to easily log events into a file and
+ * display messages above a certain level in the terminal.
* Log entries are time-stamped and stored with file, line, and function.
- * Each call to the UHD_LOG macros is synchronous and thread-safe.
+ * Each call to the UHD_LOG macros is thread-safe. Each thread will aquire the
+ * lock for the logger.
+ *
+ * Note: More information on the logging subsystem can be found on
+ * \ref page_logging.
+ *
+ * To disable console logging completely at compile time specify
+ * `-DUHD_LOG_CONSOLE_DISABLE` during configuration with CMake.
+ *
+ * By default no file logging will occur. Set a log file path:
+ * - at compile time by specifying `-DUHD_LOG_FILE=$file_path`
+ * - and/or override at runtime by setting the environment variable
+ * `UHD_LOG_FILE`
*
- * The log file can be found in the path <temp-directory>/uhd.log,
- * where <temp-directory> is the user or system's temporary directory.
- * To override <temp-directory>, set the UHD_TEMP_PATH environment variable.
+ * \subsection loghpp_levels Log levels
+ *
+ * See also \ref logging_levels.
*
* All log messages with verbosity greater than or equal to the log level
* (in other words, as often or less often than the current log level)
- * are recorded into the log file. All other messages are sent to null.
+ * are recorded to std::clog and/or the log file.
+ * Log levels can be specified using string or numeric values of
+ * uhd::log::severity_level.
*
- * The default log level is "never", but can be overridden:
- * - at compile time by setting the pre-processor define UHD_LOG_LEVEL.
- * - at runtime by setting the environment variable UHD_LOG_LEVEL.
+ * The default log level is "info", but can be overridden:
+ * - at compile time by setting the pre-processor define `-DUHD_LOG_MIN_LEVEL`.
+ * - at runtime by setting the environment variable `UHD_LOG_LEVEL`.
+ * - for console logging by setting `(-D)UHD_LOG_CONSOLE_LEVEL` at
+ * run-/compiletime
+ * - for file logging by setting `(-D)UHD_LOG_FILE_LEVEL` at run-/compiletime
*
* UHD_LOG_LEVEL can be the name of a verbosity enum or integer value:
- * - Example pre-processor define: -DUHD_LOG_LEVEL=3
- * - Example pre-processor define: -DUHD_LOG_LEVEL=regularly
- * - Example environment variable: export UHD_LOG_LEVEL=3
- * - Example environment variable: export UHD_LOG_LEVEL=regularly
+ * - Example pre-processor define: `-DUHD_LOG_MIN_LEVEL=3`
+ * - Example pre-processor define: `-DUHD_LOG_MIN_LEVEL=info`
+ * - Example environment variable: `export UHD_LOG_LEVEL=3`
+ * - Example environment variable: `export UHD_LOG_LEVEL=info`
+ *
+ * \subsection loghpp_formatting Log formatting
+ *
+ * The log format for messages going into a log file is CSV.
+ * All log messages going into a logfile will contain following fields:
+ * - timestamp
+ * - thread-id
+ * - source-file + line information
+ * - severity level
+ * - component/channel information which logged the information
+ * - the actual log message
+ *
+ * The log format of log messages displayed on the terminal is plain text with
+ * space separated tags prepended.
+ * For example:
+ * - `[INFO] [X300] This is a informational log message`
+ *
+ * The log format for log output on the console by using these preprocessor
+ * defines in CMake:
+ * - `-DUHD_LOG_CONSOLE_TIME` adds a timestamp [2017-01-01 00:00:00.000000]
+ * - `-DUHD_LOG_CONSOLE_THREAD` adds a thread-id `[0x001234]`
+ * - `-DUHD_LOG_CONSOLE_SRC` adds a sourcefile and line tag `[src_file:line]`
*/
-/*!
- * A UHD logger macro with configurable verbosity.
- * Usage: UHD_LOGV(very_rarely) << "the log message" << std::endl;
+/*
+ * Advanced logging macros
+ * UHD_LOG_MIN_LEVEL definitions
+ * trace: 0
+ * debug: 1
+ * info: 2
+ * warning: 3
+ * error: 4
+ * fatal: 5
*/
-#define UHD_LOGV(verbosity) \
- uhd::_log::log(uhd::_log::verbosity, __FILE__, __LINE__, BOOST_CURRENT_FUNCTION)
-/*!
- * A UHD logger macro with default verbosity.
- * Usage: UHD_LOG << "the log message" << std::endl;
- */
-#define UHD_LOG \
- UHD_LOGV(regularly)
+namespace uhd {
+ namespace log {
+ /*! Logging severity levels
+ *
+ * Either numeric value or string can be used to define loglevel in
+ * CMake and environment variables
+ */
+ enum severity_level {
+ trace = 0, /**< displays every available log message */
+ debug = 1, /**< displays most log messages necessary for debugging internals */
+ info = 2, /**< informational messages about setup and what is going on*/
+ warning = 3, /**< something is not right but operation can continue */
+ error = 4, /**< something has gone wrong */
+ fatal = 5, /**< something has gone horribly wrong */
+ off = 6, /**< logging is turned off */
+ };
+ /*! Logging info structure
+ *
+ * Information needed to create a log entry is fully contained in the
+ * logging_info structure.
+ */
+ struct UHD_API logging_info {
+ logging_info()
+ : verbosity(uhd::log::off) {}
+ logging_info(
+ const boost::posix_time::ptime &time_,
+ const uhd::log::severity_level &verbosity_,
+ const std::string &file_,
+ const size_t &line_,
+ const std::string &component_,
+ const boost::thread::id &thread_id_
+ ) : time(time_),
+ verbosity(verbosity_),
+ file(file_),
+ line(line_),
+ component(component_),
+ thread_id(thread_id_)
+ { /* nop */ }
-namespace uhd{ namespace _log{
+ boost::posix_time::ptime time;
+ uhd::log::severity_level verbosity;
+ std::string file;
+ unsigned int line;
+ std::string component;
+ boost::thread::id thread_id;
+ std::string message;
+ };
- //! Verbosity levels for the logger
- enum verbosity_t{
- always = 1,
- often = 2,
- regularly = 3,
- rarely = 4,
- very_rarely = 5,
- never = 6
- };
+ /*! Set the global log level
+ *
+ * The global log level gets applied before the specific log level.
+ * So, if the global log level is 'info', no logger can can print
+ * messages at level 'debug' or below.
+ */
+ UHD_API void set_log_level(uhd::log::severity_level level);
+
+ /*! Set the log level for the console logger (if defined).
+ *
+ * Short-hand for `set_logger_level("console", level);`
+ */
+ UHD_API void set_console_level(uhd::log::severity_level level);
+
+ /*! Set the log level for the file logger (if defined)
+ *
+ * Short-hand for `set_logger_level("file", level);`
+ */
+ UHD_API void set_file_level(uhd::log::severity_level level);
+
+ /*! Set the log level for any specific logger.
+ *
+ * \param logger Name of the logger
+ * \param level New log level for this logger.
+ *
+ * \throws uhd::key_error if \p logger was not defined
+ */
+ UHD_API void set_logger_level(const std::string &logger, uhd::log::severity_level level);
+ }
+}
+
+//! \cond
+//! Internal logging macro to be used in other macros
+#define _UHD_LOG_INTERNAL(component, level) \
+ uhd::_log::log(level, __FILE__, __LINE__, component, boost::this_thread::get_id())
+//! \endcond
+
+// macro-style logging (compile-time determined)
+#if UHD_LOG_MIN_LEVEL < 1
+#define UHD_LOG_TRACE(component, message) \
+ _UHD_LOG_INTERNAL(component, uhd::log::trace) << message;
+#else
+#define UHD_LOG_TRACE(component, message)
+#endif
+
+#if UHD_LOG_MIN_LEVEL < 2
+#define UHD_LOG_DEBUG(component, message) \
+ _UHD_LOG_INTERNAL(component, uhd::log::debug) << message;
+#else
+#define UHD_LOG_DEBUG(component, message)
+#endif
+
+#if UHD_LOG_MIN_LEVEL < 3
+#define UHD_LOG_INFO(component, message) \
+ _UHD_LOG_INTERNAL(component, uhd::log::info) << message;
+#else
+#define UHD_LOG_INFO(component, message)
+#endif
+
+#if UHD_LOG_MIN_LEVEL < 4
+#define UHD_LOG_WARNING(component, message) \
+ _UHD_LOG_INTERNAL(component, uhd::log::warning) << message;
+#else
+#define UHD_LOG_WARNING(component, message)
+#endif
+
+#if UHD_LOG_MIN_LEVEL < 5
+#define UHD_LOG_ERROR(component, message) \
+ _UHD_LOG_INTERNAL(component, uhd::log::error) << message;
+#else
+#define UHD_LOG_ERROR(component, message)
+#endif
+
+#if UHD_LOG_MIN_LEVEL < 6
+#define UHD_LOG_FATAL(component, message) \
+ _UHD_LOG_INTERNAL(component, uhd::log::fatal) << message;
+#else
+#define UHD_LOG_FATAL(component, message)
+#endif
+
+#ifndef UHD_LOG_FASTPATH_DISABLE
+//! Extra-fast logging macro for when speed matters.
+// No metadata is tracked. Only the message is displayed. This does not go
+// through the regular backends. Mostly used for printing the UOSDL characters
+// during streaming.
+#define UHD_LOG_FASTPATH(message) \
+ uhd::_log::log_fastpath(message);
+#else
+#define UHD_LOG_FASTPATH(message)
+#endif
+
+// iostream-style logging
+#define UHD_LOGGER_TRACE(component) _UHD_LOG_INTERNAL(component, uhd::log::trace)
+#define UHD_LOGGER_DEBUG(component) _UHD_LOG_INTERNAL(component, uhd::log::debug)
+#define UHD_LOGGER_INFO(component) _UHD_LOG_INTERNAL(component, uhd::log::info)
+#define UHD_LOGGER_WARNING(component) _UHD_LOG_INTERNAL(component, uhd::log::warning)
+#define UHD_LOGGER_ERROR(component) _UHD_LOG_INTERNAL(component, uhd::log::error)
+#define UHD_LOGGER_FATAL(component) _UHD_LOG_INTERNAL(component, uhd::log::fatal)
+
+
+#if defined(__GNUG__)
+//! Helpful debug tool to print site info
+#define UHD_HERE() \
+ UHD_LOGGER_DEBUG("DEBUG") << __FILE__ << ":" << __LINE__ << " (" << __PRETTY_FUNCTION__ << ")";
+#else
+//! Helpful debug tool to print site info
+#define UHD_HERE() \
+ UHD_LOGGER_DEBUG("DEBUG") << __FILE__ << ":" << __LINE__;
+#endif
+
+//! Helpful debug tool to print a variable
+#define UHD_VAR(var) \
+ UHD_LOGGER_DEBUG("DEBUG") << #var << " = " << var;
+
+//! Helpful debug tool to print a variable in hex
+#define UHD_HEX(var) \
+ UHD_LOGGER_DEBUG("DEBUG") << #var << " = 0x" << std::hex << std::setfill('0') << std::setw(8) << var << std::dec;
+
+//! \cond
+namespace uhd{ namespace _log {
+
+ //! Fastpath logging
+ void UHD_API log_fastpath(const std::string &msg);
- //! Internal logging object (called by UHD_LOG macros)
+ //! Internal logging object (called by UHD_LOG* macros)
class UHD_API log {
public:
log(
- const verbosity_t verbosity,
+ const uhd::log::severity_level verbosity,
const std::string &file,
const unsigned int line,
- const std::string &function
+ const std::string &component,
+ const boost::thread::id thread_id
);
~log(void);
@@ -95,7 +294,9 @@ namespace uhd{ namespace _log{
// conversion of types if not logging.
#define INSERTION_OVERLOAD(x) log& operator<< (x) \
{ \
- if(_log_it) _ss << val; \
+ if(_log_it) { \
+ _ss << val ; \
+ } \
return *this; \
}
@@ -109,10 +310,13 @@ namespace uhd{ namespace _log{
INSERTION_OVERLOAD(std::ios_base& (*val)(std::ios_base&))
private:
+ uhd::log::logging_info _log_info;
std::ostringstream _ss;
- bool _log_it;
+ const bool _log_it;
};
-}} //namespace uhd::_log
+} //namespace uhd::_log
+//! \endcond
+} /* namespace uhd */
#endif /* INCLUDED_UHD_UTILS_LOG_HPP */
diff --git a/host/include/uhd/utils/log_add.hpp b/host/include/uhd/utils/log_add.hpp
new file mode 100644
index 000000000..99b2e6dfc
--- /dev/null
+++ b/host/include/uhd/utils/log_add.hpp
@@ -0,0 +1,46 @@
+//
+// Copyright 2017 Ettus Research (National Instruments Corp.)
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU General Public License for more details.
+//
+// You should have received a copy of the GNU General Public License
+// along with this program. If not, see <http://www.gnu.org/licenses/>.
+//
+
+// Note: Including this file requires C++11 features enabled.
+
+#ifndef INCLUDED_UHD_UTILS_LOG_ADD_HPP
+#define INCLUDED_UHD_UTILS_LOG_ADD_HPP
+
+#include <uhd/config.hpp>
+#include <uhd/utils/log.hpp>
+#include <functional>
+
+namespace uhd {
+ namespace log {
+
+ /*! Logging function type
+ *
+ * Every logging_backend has to define a function with this signature.
+ * Can be added to the logging core.
+ */
+ typedef std::function<void(const uhd::log::logging_info&)> log_fn_t ;
+
+ /*! Add logging backend to the log system
+ *
+ * \param key Identifies the logging backend in the logging core
+ * \param logger_fn function which actually logs messages to this backend
+ */
+ UHD_API void add_logger(const std::string &key, log_fn_t logger_fn);
+ }
+} /* namespace uhd::log */
+
+#endif /* INCLUDED_UHD_UTILS_LOG_ADD_HPP */
diff --git a/host/include/uhd/utils/msg.hpp b/host/include/uhd/utils/msg.hpp
deleted file mode 100644
index 2cc5893e7..000000000
--- a/host/include/uhd/utils/msg.hpp
+++ /dev/null
@@ -1,79 +0,0 @@
-//
-// Copyright 2011-2013 Ettus Research LLC
-//
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU General Public License for more details.
-//
-// You should have received a copy of the GNU General Public License
-// along with this program. If not, see <http://www.gnu.org/licenses/>.
-//
-
-#ifndef INCLUDED_UHD_UTILS_MSG_HPP
-#define INCLUDED_UHD_UTILS_MSG_HPP
-
-#include <uhd/config.hpp>
-#include <uhd/utils/pimpl.hpp>
-#include <ostream>
-#include <iomanip>
-#include <string>
-
-/*!
- * A UHD message macro with configurable type.
- * Usage: UHD_MSG(warning) << "some warning message" << std::endl;
- */
-#define UHD_MSG(type) \
- uhd::msg::_msg(uhd::msg::type)()
-
-//! Helpful debug tool to print site info
-#define UHD_HERE() \
- UHD_MSG(status) << __FILE__ << ":" << __LINE__ << std::endl
-
-//! Helpful debug tool to print a variable
-#define UHD_VAR(var) \
- UHD_MSG(status) << #var << " = " << var << std::endl;
-
-//! Helpful debug tool to print a variable in hex
-#define UHD_HEX(var) \
- UHD_MSG(status) << #var << " = 0x" << std::hex << std::setfill('0') << std::setw(8) << var << std::dec << std::endl;
-
-namespace uhd{ namespace msg{
-
- //! Possible message types
- enum type_t{
- status = 's',
- warning = 'w',
- error = 'e',
- fastpath= 'f'
- };
-
- //! Typedef for a user-registered message handler
- typedef void (*handler_t)(type_t, const std::string &);
-
- /*!
- * Register the handler for uhd system messages.
- * Only one handler can be registered at once.
- * This replaces the default std::cout/cerr handler.
- * \param handler a new handler callback function
- */
- UHD_API void register_handler(const handler_t &handler);
-
- //! Internal message object (called by UHD_MSG macro)
- class UHD_API _msg{
- public:
- _msg(const type_t type);
- ~_msg(void);
- std::ostream &operator()(void);
- private:
- UHD_PIMPL_DECL(impl) _impl;
- };
-
-}} //namespace uhd::msg
-
-#endif /* INCLUDED_UHD_UTILS_MSG_HPP */
diff --git a/host/include/uhd/utils/safe_call.hpp b/host/include/uhd/utils/safe_call.hpp
index ab287cc66..d04d89436 100644
--- a/host/include/uhd/utils/safe_call.hpp
+++ b/host/include/uhd/utils/safe_call.hpp
@@ -23,7 +23,7 @@
#include <uhd/utils/log.hpp>
//! helper macro for safe call to produce warnings
-#define _UHD_SAFE_CALL_WARNING(code, what) UHD_LOGV(rarely) << \
+#define _UHD_SAFE_CALL_WARNING(code, what) UHD_LOGGER_ERROR("UHD") << \
UHD_THROW_SITE_INFO("Exception caught in safe-call.") + #code + " -> " + what \
;
diff --git a/host/include/uhd/version.hpp.in b/host/include/uhd/version.hpp.in
index 8cfc7b8c6..c16739a78 100644
--- a/host/include/uhd/version.hpp.in
+++ b/host/include/uhd/version.hpp.in
@@ -24,7 +24,7 @@
* The format is oldest API compatible release - ABI compat number.
* The compatibility number allows pre-release ABI to be versioned.
*/
-#define UHD_VERSION_ABI_STRING "@TRIMMED_VERSION_MAJOR@.@TRIMMED_VERSION_API@.@TRIMMED_VERSION_ABI@"
+#define UHD_VERSION_ABI_STRING "@UHD_VERSION_MAJOR@.@UHD_VERSION_API@.@UHD_VERSION_ABI@"
/*!
* A macro to check UHD version at compile-time.
diff --git a/host/lib/CMakeLists.txt b/host/lib/CMakeLists.txt
index 0cd89953c..fce1021c1 100644
--- a/host/lib/CMakeLists.txt
+++ b/host/lib/CMakeLists.txt
@@ -91,6 +91,7 @@ INCLUDE_SUBDIRECTORY(ic_reg_maps)
INCLUDE_SUBDIRECTORY(types)
INCLUDE_SUBDIRECTORY(convert)
INCLUDE_SUBDIRECTORY(rfnoc)
+INCLUDE_SUBDIRECTORY(cal)
INCLUDE_SUBDIRECTORY(usrp)
INCLUDE_SUBDIRECTORY(usrp_clock)
INCLUDE_SUBDIRECTORY(utils)
@@ -140,10 +141,10 @@ ENDIF(ENABLE_C_API)
# Add DLL resource file to Windows build
########################################################################
IF(MSVC)
- MATH(EXPR TRIMMED_VERSION_MAJOR_API "${TRIMMED_VERSION_MAJOR} * 1000 + ${TRIMMED_VERSION_API}")
- SET(RC_TRIMMED_VERSION_PATCH ${TRIMMED_VERSION_PATCH})
+ MATH(EXPR RC_VERSION_MAJOR_API "${UHD_VERSION_MAJOR} * 1000 + ${UHD_VERSION_API}")
+ SET(RC_VERSION_PATCH ${UHD_VERSION_PATCH})
IF(UHD_VERSION_DEVEL)
- SET(RC_TRIMMED_VERSION_PATCH "999")
+ SET(RC_VERSION_PATCH "999")
ENDIF(UHD_VERSION_DEVEL)
# Allow a custom .rc template file to be used
diff --git a/host/lib/cal/CMakeLists.txt b/host/lib/cal/CMakeLists.txt
new file mode 100644
index 000000000..5c616a9da
--- /dev/null
+++ b/host/lib/cal/CMakeLists.txt
@@ -0,0 +1,32 @@
+#
+# Copyright 2016 Ettus Research
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+
+########################################################################
+# This file included, use CMake directory variables
+########################################################################
+INCLUDE(CheckIncludeFileCXX)
+MESSAGE(STATUS "")
+
+########################################################################
+# Convert types generation
+########################################################################
+INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR})
+INCLUDE_DIRECTORIES(${CMAKE_CURRENT_BINARY_DIR})
+
+LIBUHD_APPEND_SOURCES(
+ ${CMAKE_CURRENT_SOURCE_DIR}/power_container_impl.cpp
+)
diff --git a/host/lib/cal/interpolation.hpp b/host/lib/cal/interpolation.hpp
new file mode 100644
index 000000000..34f084fbd
--- /dev/null
+++ b/host/lib/cal/interpolation.hpp
@@ -0,0 +1,71 @@
+//
+// Copyright 2016 Ettus Research
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU General Public License for more details.
+//
+// You should have received a copy of the GNU General Public License
+// along with this program. If not, see <http://www.gnu.org/licenses/>.
+//
+
+#ifndef INCLUDED_UHD_INTERPOLATION_HPP
+#define INCLUDED_UHD_INTERPOLATION_HPP
+
+#include <uhd/exception.hpp>
+#include <boost/format.hpp>
+#include <map>
+#include <cmath>
+
+namespace uhd {
+namespace cal {
+
+template<typename in_type, typename out_type>
+struct interp
+{
+public:
+ typedef std::vector<in_type> args_t;
+ typedef std::map<args_t, out_type> container_t;
+
+ /*!
+ * Nearest neighbor interpolation given a mapping: R^n -> R
+ *
+ * 1) search for the nearest point in R^n
+ * 2) find the nearest output scalars in R
+ *
+ * \param data input data container
+ * \param args input data point
+ * \returns interpolated output value
+ */
+ const out_type nn_interp(container_t &data, const args_t &args);
+
+ /*!
+ * Bilinear interpolation given a mapping: R^2 -> R
+ *
+ * 1) search for 4 surrounding points in R^2
+ * 2) find the output scalars in R
+ * 3) solve the system of equations given our input mappings
+ *
+ * \param data input data container
+ * \param args input data point
+ * \returns interpolated output value
+ */
+ const out_type bl_interp(container_t &data, const args_t &args);
+
+private:
+ /*!
+ * Calculate the distance between two points
+ */
+ static in_type calc_dist(const args_t &a, const args_t &b);
+};
+
+} // namespace cal
+} // namespace uhd
+
+#endif /* INCLUDED_UHD_INTERPOLATION_HPP */
diff --git a/host/lib/cal/interpolation.ipp b/host/lib/cal/interpolation.ipp
new file mode 100644
index 000000000..1544b041c
--- /dev/null
+++ b/host/lib/cal/interpolation.ipp
@@ -0,0 +1,199 @@
+//
+// Copyright 2016 Ettus Research
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU General Public License for more details.
+//
+// You should have received a copy of the GNU General Public License
+// along with this program. If not, see <http://www.gnu.org/licenses/>.
+//
+
+#ifndef INCLUDED_UHD_INTERPOLATION_IPP
+#define INCLUDED_UHD_INTERPOLATION_IPP
+
+#include "interpolation.hpp"
+#include <uhd/utils/log.hpp>
+#include <boost/numeric/ublas/io.hpp>
+#include <boost/numeric/ublas/matrix.hpp>
+#include <boost/numeric/ublas/lu.hpp>
+
+using namespace boost::numeric;
+
+namespace uhd {
+namespace cal {
+
+#define CAL_INTERP_METHOD(return_type, method, args, ...) \
+ template<typename in_type, typename out_type> \
+ return_type interp<in_type, out_type>::\
+ method(args, __VA_ARGS__)
+
+#define ARGS_T typename interp<in_type, out_type>::args_t
+#define CONTAINER_T typename interp<in_type, out_type>::container_t
+
+CAL_INTERP_METHOD(in_type, calc_dist, const ARGS_T &a, const ARGS_T &b)
+{
+ in_type dist = 0;
+ for (size_t i = 0; i < std::min(a.size(), b.size()); i++)
+ {
+ dist += std::abs(a[i] - b[i]);
+ }
+ return dist;
+}
+
+CAL_INTERP_METHOD(const out_type, nn_interp, CONTAINER_T &data, const ARGS_T &args)
+{
+ // Check the cache for the output
+ if (data.find(args) != data.end()) {
+ return data[args];
+ }
+
+ out_type output = 0;
+ in_type min_dist = 0;
+ typename container_t::const_iterator citer;
+ for (citer = data.begin(); citer != data.end(); citer++)
+ {
+ in_type dist = calc_dist(citer->first, args);
+ if (citer == data.begin() || dist < min_dist) {
+ min_dist = dist;
+ output = data[citer->first];
+ }
+ }
+
+ return output;
+}
+
+CAL_INTERP_METHOD(const out_type, bl_interp, CONTAINER_T &data, const ARGS_T &args)
+{
+ if (args.size() != 2) {
+ throw uhd::assertion_error(str(boost::format(
+ "Bilinear interpolation expects 2D values. Received %d.")
+ % args.size()
+ ));
+ }
+
+ if (data.size() < 4) {
+ throw uhd::assertion_error(str(boost::format(
+ "Bilinear interpolation requires at least 4 input points. Found %d.")
+ % data.size()
+ ));
+ }
+
+ // Locate the nearest 4 points
+ typedef std::pair<interp<in_type, out_type>::args_t, out_type> cal_pair_t;
+ typename std::vector<cal_pair_t> nearest;
+
+ // Initialize the resulting pair to something
+ cal_pair_t pair = *data.begin();
+
+ for (size_t i = 0; i < 4; i++) {
+ bool init = true;
+ in_type min_dist = 0;
+ typename container_t::const_iterator citer;
+ for (citer = data.begin(); citer != data.end(); citer++)
+ {
+ cal_pair_t temp = *citer;
+ if (std::find(nearest.begin(), nearest.end(), temp) == nearest.end())
+ {
+ in_type dist = calc_dist(citer->first, args);
+ if (dist < min_dist || init)
+ {
+ min_dist = dist;
+ pair = temp;
+ init = false;
+ }
+ }
+ }
+ // Push back the nearest pair
+ nearest.push_back(pair);
+ }
+
+ //
+ // Since these points are not grid aligned,
+ // we perform irregular bilinear interpolation.
+ // This math involves finding our interpolation
+ // function using lagrange multipliers:
+ //
+ // f(x, y) = ax^2 + bxy + cy^2 + dx + ey + f
+ //
+ // The solution is to solve the following system:
+ //
+ // A x b
+ // | E X' | | s | - | 0 |
+ // | X 0 | | l | - | z |
+ //
+ // where s is a vector of the above coefficients.
+ //
+ typename ublas::matrix<in_type> A(10, 10, 0.0);
+
+ // E
+ A(0, 0) = 1.0; A(1, 1) = 1.0; A(2, 2) = 1.0;
+
+ in_type x1, x2, x3, x4;
+ in_type y1, y2, y3, y4;
+
+ x1 = nearest[0].first[0]; y1 = nearest[0].first[1];
+ x2 = nearest[1].first[0]; y2 = nearest[1].first[1];
+ x3 = nearest[2].first[0]; y3 = nearest[2].first[1];
+ x4 = nearest[3].first[0]; y4 = nearest[3].first[1];
+
+ // X
+ A(0, 6) = x1*x1; A(1, 6) = x1*y1; A(2, 6) = y1*y1; A(3, 6) = x1; A(4, 6) = y1; A(5, 6) = 1.0;
+ A(0, 7) = x2*x2; A(1, 7) = x2*y2; A(2, 7) = y2*y2; A(3, 7) = x2; A(4, 7) = y2; A(5, 7) = 1.0;
+ A(0, 8) = x3*x3; A(1, 8) = x3*y3; A(2, 8) = y3*y3; A(3, 8) = x3; A(4, 8) = y3; A(5, 8) = 1.0;
+ A(0, 9) = x4*x4; A(1, 9) = x4*y4; A(2, 9) = y4*y4; A(3, 9) = x4; A(4, 9) = y4; A(5, 9) = 1.0;
+
+ // X'
+ A(6, 0) = x1*x1; A(6, 1) = x1*y1; A(6, 2) = y1*y1; A(6, 3) = x1; A(6, 4) = y1; A(6, 5) = 1.0;
+ A(7, 0) = x2*x2; A(7, 1) = x2*y2; A(7, 2) = y2*y2; A(7, 3) = x2; A(7, 4) = y2; A(7, 5) = 1.0;
+ A(8, 0) = x3*x3; A(8, 1) = x3*y3; A(8, 2) = y3*y3; A(8, 3) = x3; A(8, 4) = y3; A(8, 5) = 1.0;
+ A(9, 0) = x4*x4; A(9, 1) = x4*y4; A(9, 2) = y4*y4; A(9, 3) = x4; A(9, 4) = y4; A(9, 5) = 1.0;
+
+ // z
+ typename ublas::vector<in_type> b(10, 0.0);
+ b(6) = nearest[0].second;
+ b(7) = nearest[1].second;
+ b(8) = nearest[2].second;
+ b(9) = nearest[3].second;
+
+ typename ublas::matrix<in_type> A_t = A;
+ typename ublas::vector<in_type> s = b;
+ typename ublas::permutation_matrix<in_type> P(A_t.size1());
+
+ // Use LUP factorization to solve for the coefficients
+ // We're solving the problem in the form of Ax = b
+ bool is_singular = ublas::lu_factorize(A_t, P);
+
+ out_type output = 0;
+
+ // Fall back to 1D interpolation if the matrix is singular
+ if (is_singular) {
+ // Warn the user that the A matrix is singular
+ UHD_LOGGER_WARNING("CAL") << "Bilinear interpolation: singular matrix detected. "
+ << "Performing 1D linear interpolation against the nearest measurements. "
+ << "Provide calibration data with more measurements";
+
+ output = (b[7] - b[6]) / 2.0;
+ output += b[6];
+ return output;
+ }
+ ublas::lu_substitute(A_t, P, s);
+
+ in_type x = args[0];
+ in_type y = args[1];
+
+ // Utilize the solution to calculate the interpolation function
+ output = s[0]*x*x + s[1]*x*y + s[2]*y*y + s[3]*x + s[4]*y + s[5];
+ return output;
+}
+
+} // namespace cal
+} // namespace uhd
+
+#endif /* INCLUDED_UHD_INTERPOLATION_IPP */
diff --git a/host/lib/cal/power_container_impl.cpp b/host/lib/cal/power_container_impl.cpp
new file mode 100644
index 000000000..5d07c3b7e
--- /dev/null
+++ b/host/lib/cal/power_container_impl.cpp
@@ -0,0 +1,80 @@
+//
+// Copyright 2016 Ettus Research
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU General Public License for more details.
+//
+// You should have received a copy of the GNU General Public License
+// along with this program. If not, see <http://www.gnu.org/licenses/>.
+//
+
+#include "power_container_impl.hpp"
+#include <uhd/exception.hpp>
+#include <boost/format.hpp>
+
+using namespace uhd;
+using namespace uhd::cal;
+
+power_container::sptr power_container::make()
+{
+ return power_container::sptr(new power_container_impl());
+}
+
+power_container_impl::power_container_impl() :
+ _nargs(0), _mode(interp_mode_t::NEAREST)
+{
+ /* NOP */
+}
+
+double power_container_impl::get(const std::vector<double> &args)
+{
+ this->verify_nargs(args);
+ switch (_mode)
+ {
+ case interp_mode_t::BILINEAR :
+ return _interpolator.bl_interp(_data, args);
+ case interp_mode_t::NEAREST :
+ return _interpolator.nn_interp(_data, args);
+ default:
+ return _interpolator.nn_interp(_data, args);
+ }
+}
+
+void power_container_impl::add(const double output, const std::vector<double> &args)
+{
+ if (_nargs == 0)
+ {
+ _nargs = args.size();
+ _mode = _nargs == 2 ? interp_mode_t::BILINEAR : interp_mode_t::NEAREST;
+ }
+ this->verify_nargs(args);
+ _data[args] = output;
+}
+
+void power_container_impl::add_metadata(const power_container::metadata_t &data)
+{
+ _metadata = data;
+}
+
+const power_container_impl::metadata_t &power_container_impl::get_metadata()
+{
+ return _metadata;
+}
+
+void power_container_impl::verify_nargs(const std::vector<double> &args)
+{
+ // Check that the number of arguments expected are correct
+ if (args.size() != _nargs) {
+ throw uhd::assertion_error(str(boost::format(
+ "power_container_impl: Expected %d number of arguments/values instead of %d")
+ % _nargs % args.size()
+ ));
+ }
+}
diff --git a/host/lib/cal/power_container_impl.hpp b/host/lib/cal/power_container_impl.hpp
new file mode 100644
index 000000000..4c2bcab79
--- /dev/null
+++ b/host/lib/cal/power_container_impl.hpp
@@ -0,0 +1,77 @@
+//
+// Copyright 2016 Ettus Research
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU General Public License for more details.
+//
+// You should have received a copy of the GNU General Public License
+// along with this program. If not, see <http://www.gnu.org/licenses/>.
+//
+
+#ifndef INCLUDED_UHD_CAL_POWER_CONTAINER_IMPL_HPP
+#define INCLUDED_UHD_CAL_POWER_CONTAINER_IMPL_HPP
+
+#include "interpolation.ipp"
+#include <uhd/cal/power_container.hpp>
+
+namespace uhd {
+namespace cal {
+
+enum interp_mode_t
+{
+ BILINEAR, //! linear interpolation
+ NEAREST //! nearest neighbor interpolation
+};
+
+class power_container_impl : public power_container {
+public:
+ typedef std::map<std::vector<double>, double> container_t;
+
+ power_container_impl();
+
+ double get(const std::vector<double> &args);
+ const metadata_t &get_metadata();
+
+ void add(const double output, const std::vector<double> &args);
+ void add_metadata(const metadata_t &data);
+
+private:
+ // Container data to be serialized
+ size_t _nargs;
+ metadata_t _metadata;
+ interp_mode_t _mode;
+ container_t _data;
+
+ interp<double, double> _interpolator;
+
+ void verify_nargs(const std::vector<double> &args);
+
+protected:
+ friend class boost::serialization::access;
+
+ void serialize(iarchive_type & ar, const unsigned int) {
+ ar & _nargs;
+ ar & _metadata;
+ ar & _mode;
+ ar & _data;
+ }
+
+ void serialize(oarchive_type & ar, const unsigned int) {
+ ar & _nargs;
+ ar & _metadata;
+ ar & _mode;
+ ar & _data;
+ }
+};
+
+} // namespace cal
+} // namespace uhd
+
+#endif /* INCLUDED_UHD_CAL_POWER_CONTAINER_IMPL_HPP */
diff --git a/host/lib/convert/convert_fc32_item32.cpp b/host/lib/convert/convert_fc32_item32.cpp
index 4a188780a..38f71a62b 100644
--- a/host/lib/convert/convert_fc32_item32.cpp
+++ b/host/lib/convert/convert_fc32_item32.cpp
@@ -17,7 +17,7 @@
#include "convert_common.hpp"
#include <uhd/utils/byteswap.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <boost/math/special_functions/round.hpp>
#include <vector>
diff --git a/host/lib/convert/convert_impl.cpp b/host/lib/convert/convert_impl.cpp
index d624bf970..33f88d70b 100644
--- a/host/lib/convert/convert_impl.cpp
+++ b/host/lib/convert/convert_impl.cpp
@@ -22,7 +22,6 @@
#include <uhd/exception.hpp>
#include <stdint.h>
#include <boost/format.hpp>
-#include <boost/foreach.hpp>
#include <complex>
using namespace uhd;
@@ -81,10 +80,7 @@ void uhd::convert::register_converter(
get_table()[id][prio] = fcn;
//----------------------------------------------------------------//
- UHD_LOGV(always) << "register_converter: " << id.to_pp_string() << std::endl
- << " prio: " << prio << std::endl
- << std::endl
- ;
+ UHD_LOG_TRACE("CONVERT", boost::format("register_converter: %s prio: %s") % id.to_string() % prio)
//----------------------------------------------------------------//
}
@@ -100,12 +96,11 @@ convert::function_type convert::get_converter(
//find a matching priority
priority_type best_prio = -1;
- BOOST_FOREACH(priority_type prio_i, get_table()[id].keys()){
+ for(priority_type prio_i: get_table()[id].keys()){
if (prio_i == prio) {
//----------------------------------------------------------------//
- UHD_LOGV(always) << "get_converter: For converter ID: " << id.to_pp_string() << std::endl
- << "Using prio: " << prio << std::endl
- << std::endl
+ UHD_LOGGER_DEBUG("CONVERT") << "get_converter: For converter ID: " << id.to_pp_string()
+ << " Using prio: " << prio;
;
//----------------------------------------------------------------//
return get_table()[id][prio];
@@ -118,10 +113,8 @@ convert::function_type convert::get_converter(
"Cannot find a conversion routine [with prio] for " + id.to_pp_string());
//----------------------------------------------------------------//
- UHD_LOGV(always) << "get_converter: For converter ID: " << id.to_pp_string() << std::endl
- << "Using prio: " << best_prio << std::endl
- << std::endl
- ;
+ UHD_LOGGER_DEBUG("CONVERT") << "get_converter: For converter ID: " << id.to_pp_string()
+ << " Using prio: " << best_prio;
//----------------------------------------------------------------//
//otherwise, return best prio
diff --git a/host/lib/convert/convert_pack_sc12.cpp b/host/lib/convert/convert_pack_sc12.cpp
index fd32fc95f..aaa6f2632 100644
--- a/host/lib/convert/convert_pack_sc12.cpp
+++ b/host/lib/convert/convert_pack_sc12.cpp
@@ -17,7 +17,7 @@
#include "convert_common.hpp"
#include <uhd/utils/byteswap.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <boost/math/special_functions/round.hpp>
#include <vector>
diff --git a/host/lib/convert/convert_unpack_sc12.cpp b/host/lib/convert/convert_unpack_sc12.cpp
index acc815951..f42e51c00 100644
--- a/host/lib/convert/convert_unpack_sc12.cpp
+++ b/host/lib/convert/convert_unpack_sc12.cpp
@@ -17,7 +17,7 @@
#include "convert_common.hpp"
#include <uhd/utils/byteswap.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <boost/math/special_functions/round.hpp>
#include <vector>
diff --git a/host/lib/device.cpp b/host/lib/device.cpp
index c75ecad77..f8b2fc498 100644
--- a/host/lib/device.cpp
+++ b/host/lib/device.cpp
@@ -19,10 +19,9 @@
#include <uhd/types/dict.hpp>
#include <uhd/exception.hpp>
#include <uhd/utils/log.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/utils/static.hpp>
#include <uhd/utils/algorithm.hpp>
-#include <boost/foreach.hpp>
#include <boost/format.hpp>
#include <boost/weak_ptr.hpp>
#include <boost/functional/hash.hpp>
@@ -53,7 +52,7 @@ static size_t hash_device_addr(
boost::hash_combine(hash, dev_addr["resource"]);
}
else {
- BOOST_FOREACH(const std::string &key, uhd::sorted(dev_addr.keys())){
+ for(const std::string &key: uhd::sorted(dev_addr.keys())){
boost::hash_combine(hash, key);
boost::hash_combine(hash, dev_addr[key]);
}
@@ -74,7 +73,7 @@ void device::register_device(
const make_t &make,
const device_filter_t filter
){
- UHD_LOGV(always) << "registering device" << std::endl;
+ // UHD_LOGGER_TRACE("UHD") << "registering device";
get_dev_fcn_regs().push_back(dev_fcn_reg_t(find, make, filter));
}
@@ -90,7 +89,7 @@ device_addrs_t device::find(const device_addr_t &hint, device_filter_t filter){
device_addrs_t device_addrs;
- BOOST_FOREACH(const dev_fcn_reg_t &fcn, get_dev_fcn_regs()) {
+ for(const dev_fcn_reg_t &fcn: get_dev_fcn_regs()) {
try {
if (filter == ANY or fcn.get<2>() == filter) {
device_addrs_t discovered_addrs = fcn.get<0>()(hint);
@@ -102,7 +101,7 @@ device_addrs_t device::find(const device_addr_t &hint, device_filter_t filter){
}
}
catch (const std::exception &e) {
- UHD_MSG(error) << "Device discovery error: " << e.what() << std::endl;
+ UHD_LOGGER_ERROR("UHD") << "Device discovery error: " << e.what();
}
}
@@ -118,17 +117,17 @@ device::sptr device::make(const device_addr_t &hint, device_filter_t filter, siz
typedef boost::tuple<device_addr_t, make_t> dev_addr_make_t;
std::vector<dev_addr_make_t> dev_addr_makers;
- BOOST_FOREACH(const dev_fcn_reg_t &fcn, get_dev_fcn_regs()){
+ for(const dev_fcn_reg_t &fcn: get_dev_fcn_regs()){
try{
if(filter == ANY or fcn.get<2>() == filter){
- BOOST_FOREACH(device_addr_t dev_addr, fcn.get<0>()(hint)){
+ for(device_addr_t dev_addr: fcn.get<0>()(hint)){
//append the discovered address and its factory function
dev_addr_makers.push_back(dev_addr_make_t(dev_addr, fcn.get<1>()));
}
}
}
catch(const std::exception &e){
- UHD_MSG(error) << "Device discovery error: " << e.what() << std::endl;
+ UHD_LOGGER_ERROR("UHD") << "Device discovery error: " << e.what() ;
}
}
@@ -150,11 +149,11 @@ device::sptr device::make(const device_addr_t &hint, device_filter_t filter, siz
device_addr_t dev_addr; make_t maker;
boost::tie(dev_addr, maker) = dev_addr_makers.at(which);
size_t dev_hash = hash_device_addr(dev_addr);
- UHD_LOG << boost::format("Device hash: %u") % dev_hash << std::endl;
+ UHD_LOGGER_TRACE("UHD") << boost::format("Device hash: %u") % dev_hash ;
//copy keys that were in hint but not in dev_addr
//this way, we can pass additional transport arguments
- BOOST_FOREACH(const std::string &key, hint.keys()){
+ for(const std::string &key: hint.keys()){
if (not dev_addr.has_key(key)) dev_addr[key] = hint[key];
}
diff --git a/host/lib/device3.cpp b/host/lib/device3.cpp
index 3b316e8ea..044459aec 100644
--- a/host/lib/device3.cpp
+++ b/host/lib/device3.cpp
@@ -15,9 +15,10 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//
-#include <boost/format.hpp>
#include <uhd/device3.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
+#include <boost/format.hpp>
+#include <boost/thread/lock_guard.hpp>
using namespace uhd;
using namespace uhd::rfnoc;
@@ -68,7 +69,8 @@ std::vector<rfnoc::block_id_t> device3::find_blocks(const std::string &block_id_
void device3::clear()
{
- BOOST_FOREACH(const block_ctrl_base::sptr &block, _rfnoc_block_ctrl) {
+ boost::lock_guard<boost::mutex> lock(_block_ctrl_mutex);
+ for(const block_ctrl_base::sptr &block: _rfnoc_block_ctrl) {
block->clear();
}
}
diff --git a/host/lib/experts/expert_container.cpp b/host/lib/experts/expert_container.cpp
index 853e3e4b7..f010293b7 100644
--- a/host/lib/experts/expert_container.cpp
+++ b/host/lib/experts/expert_container.cpp
@@ -17,9 +17,8 @@
#include "expert_container.hpp"
#include <uhd/exception.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <boost/format.hpp>
-#include <boost/foreach.hpp>
#include <boost/function.hpp>
#include <boost/bind.hpp>
#include <boost/make_shared.hpp>
@@ -185,7 +184,7 @@ public:
EX_LOG(1, "cycle check ... PASSED");
} else {
EX_LOG(1, "cycle check ... ERROR!!!");
- BOOST_FOREACH(const std::string& e, back_edges) {
+ for(const std::string& e: back_edges) {
EX_LOG(2, "back edge: " + e);
}
}
@@ -193,7 +192,7 @@ public:
//Test 2: Check data node input and output edges
std::vector<std::string> data_node_issues;
- BOOST_FOREACH(const vertex_map_t::value_type& v, _datanode_map) {
+ for(const vertex_map_t::value_type& v: _datanode_map) {
size_t in_count = 0, out_count = 0;
for (std::pair<edge_iter, edge_iter> ei = boost::edges(_expert_dag);
ei.first != ei.second;
@@ -238,7 +237,7 @@ public:
EX_LOG(1, "data node check ... PASSED");
} else {
EX_LOG(1, "data node check ... WARNING!");
- BOOST_FOREACH(const std::string& i, data_node_issues) {
+ for(const std::string& i: data_node_issues) {
EX_LOG(2, i);
}
}
@@ -246,7 +245,7 @@ public:
//Test 3: Check worker node input and output edges
std::vector<std::string> worker_issues;
- BOOST_FOREACH(const vertex_map_t::value_type& v, _worker_map) {
+ for(const vertex_map_t::value_type& v: _worker_map) {
size_t in_count = 0, out_count = 0;
for (std::pair<edge_iter, edge_iter> ei = boost::edges(_expert_dag);
ei.first != ei.second;
@@ -268,7 +267,7 @@ public:
EX_LOG(1, "worker check ... PASSED");
} else {
EX_LOG(1, "worker check ... WARNING!");
- BOOST_FOREACH(const std::string& i, worker_issues) {
+ for(const std::string& i: worker_issues) {
EX_LOG(2, i);
}
}
@@ -347,7 +346,7 @@ protected:
_worker_map.insert(vertex_map_t::value_type(worker->get_name(), gr_node));
//For each input, add an edge from the input to this node
- BOOST_FOREACH(const std::string& node_name, worker->get_inputs()) {
+ for(const std::string& node_name: worker->get_inputs()) {
vertex_map_t::const_iterator node = _datanode_map.find(node_name);
if (node != _datanode_map.end()) {
boost::add_edge((*node).second, gr_node, _expert_dag);
@@ -358,7 +357,7 @@ protected:
}
//For each output, add an edge from this node to the output
- BOOST_FOREACH(const std::string& node_name, worker->get_outputs()) {
+ for(const std::string& node_name: worker->get_outputs()) {
vertex_map_t::const_iterator node = _datanode_map.find(node_name);
if (node != _datanode_map.end()) {
boost::add_edge(gr_node, (*node).second, _expert_dag);
@@ -423,7 +422,7 @@ private:
boost::depth_first_search(_expert_dag, boost::visitor(cdet_vis));
if (not back_edges.empty()) {
std::string edges;
- BOOST_FOREACH(const std::string& e, back_edges) {
+ for(const std::string& e: back_edges) {
edges += "* " + e + "";
}
throw uhd::runtime_error("Cannot resolve expert because it has at least one cycle!\n"
@@ -514,7 +513,7 @@ private:
{
std::string indents;
for (size_t i = 0; i < depth; i++) indents += "- ";
- UHD_MSG(fastpath) << "[expert::" + _name + "] " << indents << str << std::endl;
+ UHD_LOG_DEBUG("EXPERT","[expert::" + _name + "] " << indents << str)
}
private:
diff --git a/host/lib/experts/expert_nodes.hpp b/host/lib/experts/expert_nodes.hpp
index 6040cd19e..4a285cc80 100644
--- a/host/lib/experts/expert_nodes.hpp
+++ b/host/lib/experts/expert_nodes.hpp
@@ -23,7 +23,6 @@
#include <uhd/utils/dirty_tracked.hpp>
#include <uhd/types/time_spec.hpp>
#include <boost/function.hpp>
-#include <boost/foreach.hpp>
#include <boost/thread/recursive_mutex.hpp>
#include <boost/thread.hpp>
#include <boost/units/detail/utility.hpp>
@@ -409,7 +408,7 @@ namespace uhd { namespace experts {
// Worker node specific
std::list<std::string> get_inputs() const {
std::list<std::string> retval;
- BOOST_FOREACH(data_accessor_t* acc, _inputs) {
+ for(data_accessor_t* acc: _inputs) {
retval.push_back(acc->node().get_name());
}
return retval;
@@ -417,7 +416,7 @@ namespace uhd { namespace experts {
std::list<std::string> get_outputs() const {
std::list<std::string> retval;
- BOOST_FOREACH(data_accessor_t* acc, _outputs) {
+ for(data_accessor_t* acc: _outputs) {
retval.push_back(acc->node().get_name());
}
return retval;
@@ -442,14 +441,14 @@ namespace uhd { namespace experts {
// Graph resolution specific
virtual bool is_dirty() const {
bool inputs_dirty = false;
- BOOST_FOREACH(data_accessor_t* acc, _inputs) {
+ for(data_accessor_t* acc: _inputs) {
inputs_dirty |= acc->node().is_dirty();
}
return inputs_dirty;
}
virtual void mark_clean() {
- BOOST_FOREACH(data_accessor_t* acc, _inputs) {
+ for(data_accessor_t* acc: _inputs) {
acc->node().mark_clean();
}
}
diff --git a/host/lib/image_loader.cpp b/host/lib/image_loader.cpp
index 91dd325dd..8b56d607e 100644
--- a/host/lib/image_loader.cpp
+++ b/host/lib/image_loader.cpp
@@ -20,13 +20,12 @@
#include <utility>
#include <boost/filesystem.hpp>
-#include <boost/foreach.hpp>
#include <boost/format.hpp>
#include <uhd/exception.hpp>
#include <uhd/image_loader.hpp>
#include <uhd/utils/log.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/utils/static.hpp>
namespace fs = boost::filesystem;
@@ -47,8 +46,8 @@ UHD_SINGLETON_FCN(string_map_t, get_recovery_strings);
void uhd::image_loader::register_image_loader(const std::string &device_type,
const loader_fcn_t &loader_fcn,
const std::string &recovery_instructions){
- UHD_LOGV(always) << "Registering image loader and recovery instructions for "
- << device_type << std::endl;
+ // UHD_LOGGER_TRACE("UHD") << "Registering image loader and recovery instructions for "
+ // << device_type;
get_image_loaders().insert(loader_fcn_pair_t(device_type, loader_fcn));
get_recovery_strings().insert(string_pair_t(device_type, recovery_instructions));
@@ -69,7 +68,7 @@ bool uhd::image_loader::load(const uhd::image_loader::image_loader_args_t &image
else return get_image_loaders().at(type)(image_loader_args);
}
else{
- BOOST_FOREACH(const loader_fcn_pair_t &loader_fcn_pair, get_image_loaders()){
+ for(const loader_fcn_pair_t &loader_fcn_pair: get_image_loaders()){
if(loader_fcn_pair.second(image_loader_args)) return true;
}
return false;
diff --git a/host/lib/property_tree.cpp b/host/lib/property_tree.cpp
index 76d7bccba..d62f61878 100644
--- a/host/lib/property_tree.cpp
+++ b/host/lib/property_tree.cpp
@@ -17,7 +17,6 @@
#include <uhd/property_tree.hpp>
#include <uhd/types/dict.hpp>
-#include <boost/foreach.hpp>
#include <boost/thread/mutex.hpp>
#include <boost/make_shared.hpp>
#include <iostream>
@@ -98,7 +97,7 @@ public:
node_type *parent = NULL;
node_type *node = &_guts->root;
- BOOST_FOREACH(const std::string &name, path_tokenizer(path)){
+ for(const std::string &name: path_tokenizer(path)){
if (not node->has_key(name)) throw_path_not_found(path);
parent = node;
node = &(*node)[name];
@@ -112,7 +111,7 @@ public:
boost::mutex::scoped_lock lock(_guts->mutex);
node_type *node = &_guts->root;
- BOOST_FOREACH(const std::string &name, path_tokenizer(path)){
+ for(const std::string &name: path_tokenizer(path)){
if (not node->has_key(name)) return false;
node = &(*node)[name];
}
@@ -124,7 +123,7 @@ public:
boost::mutex::scoped_lock lock(_guts->mutex);
node_type *node = &_guts->root;
- BOOST_FOREACH(const std::string &name, path_tokenizer(path)){
+ for(const std::string &name: path_tokenizer(path)){
if (not node->has_key(name)) throw_path_not_found(path);
node = &(*node)[name];
}
@@ -137,7 +136,7 @@ public:
boost::mutex::scoped_lock lock(_guts->mutex);
node_type *node = &_guts->root;
- BOOST_FOREACH(const std::string &name, path_tokenizer(path)){
+ for(const std::string &name: path_tokenizer(path)){
if (not node->has_key(name)) (*node)[name] = node_type();
node = &(*node)[name];
}
@@ -150,7 +149,7 @@ public:
boost::mutex::scoped_lock lock(_guts->mutex);
node_type *node = &_guts->root;
- BOOST_FOREACH(const std::string &name, path_tokenizer(path)){
+ for(const std::string &name: path_tokenizer(path)){
if (not node->has_key(name)) throw_path_not_found(path);
node = &(*node)[name];
}
diff --git a/host/lib/rfnoc/block_ctrl_base.cpp b/host/lib/rfnoc/block_ctrl_base.cpp
index a6eecc20b..e9d6c0030 100644
--- a/host/lib/rfnoc/block_ctrl_base.cpp
+++ b/host/lib/rfnoc/block_ctrl_base.cpp
@@ -20,16 +20,15 @@
#include "ctrl_iface.hpp"
#include "nocscript/block_iface.hpp"
-#include <uhd/utils/msg.hpp>
+
#include <uhd/utils/log.hpp>
#include <uhd/convert.hpp>
#include <uhd/rfnoc/block_ctrl_base.hpp>
#include <uhd/rfnoc/constants.hpp>
#include <boost/format.hpp>
-#include <boost/foreach.hpp>
#include <boost/bind.hpp>
-#define UHD_BLOCK_LOG() UHD_LOGV(never)
+#define UHD_BLOCK_LOG() UHD_LOGGER_TRACE("RFNOC")
using namespace uhd;
using namespace uhd::rfnoc;
@@ -48,17 +47,16 @@ inline uint32_t _sr_to_addr64(uint32_t reg) { return reg * 8; }; // for peek64
block_ctrl_base::block_ctrl_base(
const make_args_t &make_args
) : _tree(make_args.tree),
- _transport_is_big_endian(make_args.is_big_endian),
_ctrl_ifaces(make_args.ctrl_ifaces),
_base_address(make_args.base_address & 0xFFF0)
{
- UHD_BLOCK_LOG() << "block_ctrl_base()" << std::endl;
+ UHD_BLOCK_LOG() << "block_ctrl_base()" ;
/*** Identify this block (NoC-ID, block-ID, and block definition) *******/
// Read NoC-ID (name is passed in through make_args):
uint64_t noc_id = sr_read64(SR_READBACK_REG_ID);
_block_def = blockdef::make_from_noc_id(noc_id);
- if (_block_def) UHD_BLOCK_LOG() << "Found valid blockdef" << std::endl;
+ if (_block_def) UHD_BLOCK_LOG() << "Found valid blockdef" ;
if (not _block_def)
_block_def = blockdef::make_from_noc_id(DEFAULT_NOC_ID);
UHD_ASSERT_THROW(_block_def);
@@ -70,7 +68,7 @@ block_ctrl_base::block_ctrl_base(
}
UHD_BLOCK_LOG()
<< "NOC ID: " << str(boost::format("0x%016X ") % noc_id)
- << "Block ID: " << _block_id << std::endl;
+ << "Block ID: " << _block_id ;
/*** Initialize property tree *******************************************/
_root_path = "xbar/" + _block_id.get_local();
@@ -81,7 +79,7 @@ block_ctrl_base::block_ctrl_base(
/*** Configure ports ****************************************************/
size_t n_valid_input_buffers = 0;
- BOOST_FOREACH(const size_t ctrl_port, get_ctrl_ports()) {
+ for(const size_t ctrl_port: get_ctrl_ports()) {
// Set source addresses:
sr_write(SR_BLOCK_SID, get_address(ctrl_port), ctrl_port);
// Set sink buffer sizes:
@@ -95,7 +93,7 @@ block_ctrl_base::block_ctrl_base(
/*** Register names *****************************************************/
blockdef::registers_t sregs = _block_def->get_settings_registers();
- BOOST_FOREACH(const std::string &reg_name, sregs.keys()) {
+ for(const std::string &reg_name: sregs.keys()) {
if (DEFAULT_NAMED_SR.has_key(reg_name)) {
throw uhd::runtime_error(str(
boost::format("Register name %s is already defined!")
@@ -106,7 +104,7 @@ block_ctrl_base::block_ctrl_base(
.set(sregs.get(reg_name));
}
blockdef::registers_t rbacks = _block_def->get_readback_registers();
- BOOST_FOREACH(const std::string &reg_name, rbacks.keys()) {
+ for(const std::string &reg_name: rbacks.keys()) {
_tree->create<size_t>(_root_path / "registers"/ "rb" / reg_name)
.set(rbacks.get(reg_name));
}
@@ -116,10 +114,10 @@ block_ctrl_base::block_ctrl_base(
_init_port_defs("out", _block_def->get_output_ports());
// FIXME this warning always fails until the input buffer code above is fixed
if (_tree->list(_root_path / "ports/in").size() != n_valid_input_buffers) {
- UHD_MSG(warning) <<
+ UHD_LOGGER_WARNING("RFNOC") <<
boost::format("[%s] defines %d input buffer sizes, but %d input ports")
% get_block_id().get() % n_valid_input_buffers % _tree->list(_root_path / "ports/in").size()
- << std::endl;
+ ;
}
/*** Init default block args ********************************************/
@@ -138,14 +136,14 @@ void block_ctrl_base::_init_port_defs(
const size_t first_port_index
) {
size_t port_index = first_port_index;
- BOOST_FOREACH(const blockdef::port_t &port_def, ports) {
+ for(const blockdef::port_t &port_def: ports) {
fs_path port_path = _root_path / "ports" / direction / port_index;
if (not _tree->exists(port_path)) {
_tree->create<blockdef::port_t>(port_path);
}
UHD_RFNOC_BLOCK_TRACE() << "Adding port definition at " << port_path
<< boost::format(": type = '%s' pkt_size = '%s' vlen = '%s'") % port_def["type"] % port_def["pkt_size"] % port_def["vlen"]
- << std::endl;
+ ;
_tree->access<blockdef::port_t>(port_path).set(port_def);
port_index++;
}
@@ -155,12 +153,12 @@ void block_ctrl_base::_init_block_args()
{
blockdef::args_t args = _block_def->get_args();
fs_path arg_path = _root_path / "args";
- BOOST_FOREACH(const size_t port, get_ctrl_ports()) {
+ for(const size_t port: get_ctrl_ports()) {
_tree->create<std::string>(arg_path / port);
}
// First, create all nodes.
- BOOST_FOREACH(const blockdef::arg_t &arg, args) {
+ for(const blockdef::arg_t &arg: args) {
fs_path arg_type_path = arg_path / arg["port"] / arg["name"] / "type";
_tree->create<std::string>(arg_type_path).set(arg["type"]);
fs_path arg_val_path = arg_path / arg["port"] / arg["name"] / "value";
@@ -174,7 +172,7 @@ void block_ctrl_base::_init_block_args()
// TODO: Add coercer
#define _SUBSCRIBE_CHECK_AND_RUN(type, arg_tag, error_message) \
_tree->access<type>(arg_val_path).add_coerced_subscriber(boost::bind((&nocscript::block_iface::run_and_check), _nocscript_iface, arg[#arg_tag], error_message))
- BOOST_FOREACH(const blockdef::arg_t &arg, args) {
+ for(const blockdef::arg_t &arg: args) {
fs_path arg_val_path = arg_path / arg["port"] / arg["name"] / "value";
if (not arg["check"].empty()) {
if (arg["type"] == "string") { _SUBSCRIBE_CHECK_AND_RUN(string, check, arg["check_message"]); }
@@ -193,7 +191,7 @@ void block_ctrl_base::_init_block_args()
}
// Finally: Set the values. This will call subscribers, if we have any.
- BOOST_FOREACH(const blockdef::arg_t &arg, args) {
+ for(const blockdef::arg_t &arg: args) {
fs_path arg_val_path = arg_path / arg["port"] / arg["name"] / "value";
if (not arg["value"].empty()) {
if (arg["type"] == "int_vector") { throw uhd::runtime_error("not yet implemented: int_vector"); }
@@ -218,7 +216,7 @@ std::vector<size_t> block_ctrl_base::get_ctrl_ports() const
std::vector<size_t> ctrl_ports;
ctrl_ports.reserve(_ctrl_ifaces.size());
std::pair<size_t, wb_iface::sptr> it;
- BOOST_FOREACH(it, _ctrl_ifaces) {
+ for(auto it: _ctrl_ifaces) {
ctrl_ports.push_back(it.first);
}
return ctrl_ports;
@@ -227,7 +225,7 @@ std::vector<size_t> block_ctrl_base::get_ctrl_ports() const
void block_ctrl_base::sr_write(const uint32_t reg, const uint32_t data, const size_t port)
{
//UHD_BLOCK_LOG() << " ";
- //UHD_RFNOC_BLOCK_TRACE() << boost::format("sr_write(%d, %08X, %d)") % reg % data % port << std::endl;
+ //UHD_RFNOC_BLOCK_TRACE() << boost::format("sr_write(%d, %08X, %d)") % reg % data % port ;
if (not _ctrl_ifaces.count(port)) {
throw uhd::key_error(str(boost::format("[%s] sr_write(): No such port: %d") % get_block_id().get() % port));
}
@@ -254,7 +252,7 @@ void block_ctrl_base::sr_write(const std::string &reg, const uint32_t data, cons
reg_addr = uint32_t(_tree->access<size_t>(_root_path / "registers" / "sr" / reg).get());
}
UHD_BLOCK_LOG() << " ";
- UHD_RFNOC_BLOCK_TRACE() << boost::format("sr_write(%s, %08X) ==> ") % reg % data << std::endl;
+ UHD_RFNOC_BLOCK_TRACE() << boost::format("sr_write(%s, %08X) ==> ") % reg % data ;
return sr_write(reg_addr, data, port);
}
@@ -341,7 +339,7 @@ void block_ctrl_base::set_command_time(
const size_t port
) {
if (port == ANY_PORT) {
- BOOST_FOREACH(const size_t specific_port, get_ctrl_ports()) {
+ for(const size_t specific_port: get_ctrl_ports()) {
set_command_time(time_spec, specific_port);
}
return;
@@ -379,7 +377,7 @@ void block_ctrl_base::set_command_tick_rate(
const size_t port
) {
if (port == ANY_PORT) {
- BOOST_FOREACH(const size_t specific_port, get_ctrl_ports()) {
+ for(const size_t specific_port: get_ctrl_ports()) {
set_command_tick_rate(tick_rate, specific_port);
}
return;
@@ -412,11 +410,11 @@ void block_ctrl_base::clear_command_time(const size_t port)
void block_ctrl_base::clear()
{
- UHD_RFNOC_BLOCK_TRACE() << "block_ctrl_base::clear() " << std::endl;
+ UHD_RFNOC_BLOCK_TRACE() << "block_ctrl_base::clear() " ;
// Call parent...
node_ctrl_base::clear();
// ...then child
- BOOST_FOREACH(const size_t port_index, get_ctrl_ports()) {
+ for(const size_t port_index: get_ctrl_ports()) {
_clear(port_index);
}
}
@@ -431,7 +429,7 @@ uint32_t block_ctrl_base::get_address(size_t block_port) {
**********************************************************************/
void block_ctrl_base::set_args(const uhd::device_addr_t &args, const size_t port)
{
- BOOST_FOREACH(const std::string &key, args.keys()) {
+ for(const std::string &key: args.keys()) {
if (_tree->exists(get_arg_path(key, port))) {
set_arg(key, args.get(key), port);
}
@@ -474,7 +472,7 @@ void block_ctrl_base::set_arg(const std::string &key, const std::string &val, co
device_addr_t block_ctrl_base::get_args(const size_t port) const
{
device_addr_t args;
- BOOST_FOREACH(const std::string &key, _tree->list(_root_path / "args" / port)) {
+ for(const std::string &key: _tree->list(_root_path / "args" / port)) {
args[key] = get_arg(key);
}
return args;
@@ -535,7 +533,7 @@ stream_sig_t block_ctrl_base::_resolve_port_def(const blockdef::port_t &port_def
} else {
stream_sig.item_type = port_def["type"];
}
- //UHD_RFNOC_BLOCK_TRACE() << " item type: " << stream_sig.item_type << std::endl;
+ //UHD_RFNOC_BLOCK_TRACE() << " item type: " << stream_sig.item_type ;
// Vector length
if (port_def.is_variable("vlen")) {
@@ -546,7 +544,7 @@ stream_sig_t block_ctrl_base::_resolve_port_def(const blockdef::port_t &port_def
} else {
stream_sig.vlen = boost::lexical_cast<size_t>(port_def["vlen"]);
}
- //UHD_RFNOC_BLOCK_TRACE() << " vector length: " << stream_sig.vlen << std::endl;
+ //UHD_RFNOC_BLOCK_TRACE() << " vector length: " << stream_sig.vlen ;
// Packet size
if (port_def.is_variable("pkt_size")) {
@@ -568,7 +566,7 @@ stream_sig_t block_ctrl_base::_resolve_port_def(const blockdef::port_t &port_def
} else {
stream_sig.packet_size = boost::lexical_cast<size_t>(port_def["pkt_size"]);
}
- //UHD_RFNOC_BLOCK_TRACE() << " packet size: " << stream_sig.vlen << std::endl;
+ //UHD_RFNOC_BLOCK_TRACE() << " packet size: " << stream_sig.vlen ;
return stream_sig;
}
@@ -579,7 +577,7 @@ stream_sig_t block_ctrl_base::_resolve_port_def(const blockdef::port_t &port_def
**********************************************************************/
void block_ctrl_base::_clear(const size_t port)
{
- UHD_RFNOC_BLOCK_TRACE() << "block_ctrl_base::_clear() " << std::endl;
+ UHD_RFNOC_BLOCK_TRACE() << "block_ctrl_base::_clear() " ;
sr_write(SR_CLEAR_TX_FC, 0x00C1EA12, port); // 'CLEAR', but we can write anything, really
sr_write(SR_CLEAR_RX_FC, 0x00C1EA12, port); // 'CLEAR', but we can write anything, really
}
diff --git a/host/lib/rfnoc/block_ctrl_base_factory.cpp b/host/lib/rfnoc/block_ctrl_base_factory.cpp
index 0e2d5ae03..9e32b81d7 100644
--- a/host/lib/rfnoc/block_ctrl_base_factory.cpp
+++ b/host/lib/rfnoc/block_ctrl_base_factory.cpp
@@ -16,12 +16,12 @@
//
#include <boost/format.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/utils/log.hpp>
#include <uhd/rfnoc/blockdef.hpp>
#include <uhd/rfnoc/block_ctrl_base.hpp>
-#define UHD_FACTORY_LOG() UHD_LOGV(never)
+#define UHD_FACTORY_LOG() UHD_LOGGER_TRACE("RFNOC")
using namespace uhd;
using namespace uhd::rfnoc;
@@ -59,7 +59,7 @@ static void lookup_block_key(uint64_t noc_id, make_args_t &make_args)
make_args.block_name = bd->get_name();
return;
} catch (std::exception &e) {
- UHD_MSG(warning) << str(boost::format("Error while looking up name for NoC-ID %016X.\n%s") % noc_id % e.what()) << std::endl;
+ UHD_LOGGER_WARNING("RFNOC") << str(boost::format("Error while looking up name for NoC-ID %016X.\n%s") % noc_id % e.what()) ;
}
make_args.block_key = DEFAULT_BLOCK_NAME;
@@ -71,7 +71,7 @@ block_ctrl_base::sptr block_ctrl_base::make(
const make_args_t &make_args_,
uint64_t noc_id
) {
- UHD_FACTORY_LOG() << "[RFNoC Factory] block_ctrl_base::make() " << std::endl;
+ UHD_FACTORY_LOG() << "[RFNoC Factory] block_ctrl_base::make() " ;
make_args_t make_args = make_args_;
// Check if a block key was specified, in this case, we *must* either
@@ -90,7 +90,7 @@ block_ctrl_base::sptr block_ctrl_base::make(
make_args.block_name = make_args.block_key;
}
- UHD_FACTORY_LOG() << "[RFNoC Factory] Using controller key '" << make_args.block_key << "' and block name '" << make_args.block_name << "'" << std::endl;
+ UHD_FACTORY_LOG() << "[RFNoC Factory] Using controller key '" << make_args.block_key << "' and block name '" << make_args.block_name << "'" ;
return get_block_fcn_regs()[make_args.block_key](make_args);
}
diff --git a/host/lib/rfnoc/blockdef_xml_impl.cpp b/host/lib/rfnoc/blockdef_xml_impl.cpp
index 78d1995d1..5f8af232d 100644
--- a/host/lib/rfnoc/blockdef_xml_impl.cpp
+++ b/host/lib/rfnoc/blockdef_xml_impl.cpp
@@ -18,10 +18,9 @@
#include <uhd/exception.hpp>
#include <uhd/rfnoc/constants.hpp>
#include <uhd/rfnoc/blockdef.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/paths.hpp>
#include <boost/assign/list_of.hpp>
-#include <boost/foreach.hpp>
#include <boost/format.hpp>
#include <boost/lexical_cast.hpp>
#include <boost/algorithm/string.hpp>
@@ -57,7 +56,7 @@ blockdef::port_t::port_t()
{
// This guarantees that we can access these keys
// even if they were never initialized:
- BOOST_FOREACH(const std::string &key, PORT_ARGS.keys()) {
+ for(const std::string &key: PORT_ARGS.keys()) {
set(key, PORT_ARGS[key]);
}
}
@@ -77,7 +76,7 @@ bool blockdef::port_t::is_keyword(const std::string &key) const
bool blockdef::port_t::is_valid() const
{
// Check we have all the keys:
- BOOST_FOREACH(const std::string &key, PORT_ARGS.keys()) {
+ for(const std::string &key: PORT_ARGS.keys()) {
if (not has_key(key)) {
return false;
}
@@ -90,7 +89,7 @@ bool blockdef::port_t::is_valid() const
std::string blockdef::port_t::to_string() const
{
std::string result;
- BOOST_FOREACH(const std::string &key, PORT_ARGS.keys()) {
+ for(const std::string &key: PORT_ARGS.keys()) {
if (has_key(key)) {
result += str(boost::format("%s=%s,") % key % get(key));
}
@@ -125,7 +124,7 @@ blockdef::arg_t::arg_t()
{
// This guarantees that we can access these keys
// even if they were never initialized:
- BOOST_FOREACH(const std::string &key, ARG_ARGS.keys()) {
+ for(const std::string &key: ARG_ARGS.keys()) {
set(key, ARG_ARGS[key]);
}
}
@@ -133,7 +132,7 @@ blockdef::arg_t::arg_t()
bool blockdef::arg_t::is_valid() const
{
// 1. Check we have all the keys:
- BOOST_FOREACH(const std::string &key, ARG_ARGS.keys()) {
+ for(const std::string &key: ARG_ARGS.keys()) {
if (not has_key(key)) {
return false;
}
@@ -151,7 +150,7 @@ bool blockdef::arg_t::is_valid() const
std::string blockdef::arg_t::to_string() const
{
std::string result;
- BOOST_FOREACH(const std::string &key, ARG_ARGS.keys()) {
+ for(const std::string &key: ARG_ARGS.keys()) {
if (has_key(key)) {
result += str(boost::format("%s=%s,") % key % get(key));
}
@@ -218,13 +217,13 @@ public:
pt::ptree propt;
try {
read_xml(filename.string(), propt);
- BOOST_FOREACH(pt::ptree::value_type &v, propt.get_child("nocblock.ids")) {
+ for(pt::ptree::value_type &v: propt.get_child("nocblock.ids")) {
if (v.first == "id" and match_noc_id(v.second.data(), noc_id)) {
return true;
}
}
} catch (std::exception &e) {
- UHD_MSG(warning) << "has_noc_id(): caught exception " << e.what() << std::endl;
+ UHD_LOGGER_WARNING("RFNOC") << "has_noc_id(): caught exception " << e.what() ;
return false;
}
return false;
@@ -234,7 +233,7 @@ public:
_type(type),
_noc_id(noc_id)
{
- //UHD_MSG(status) << "Reading XML file: " << filename.string().c_str() << std::endl;
+ //UHD_LOGGER_INFO("RFNOC") << "Reading XML file: " << filename.string().c_str() ;
read_xml(filename.string(), _pt);
try {
// Check key is valid
@@ -302,11 +301,11 @@ public:
std::set<size_t> port_numbers;
size_t n_ports = 0;
ports_t ports;
- BOOST_FOREACH(pt::ptree::value_type &v, _pt.get_child("nocblock.ports")) {
+ for(pt::ptree::value_type &v: _pt.get_child("nocblock.ports")) {
if (v.first != port_type) continue;
// Now we have the correct sink or source node:
port_t port;
- BOOST_FOREACH(const std::string &key, port_t::PORT_ARGS.keys()) {
+ for(const std::string &key: port_t::PORT_ARGS.keys()) {
port[key] = v.second.get(key, port_t::PORT_ARGS[key]);
}
// We have to be extra-careful with the port numbers:
@@ -338,10 +337,10 @@ public:
std::vector<size_t> get_all_port_numbers()
{
std::set<size_t> set_ports;
- BOOST_FOREACH(const port_t &port, get_input_ports()) {
+ for(const port_t &port: get_input_ports()) {
set_ports.insert(boost::lexical_cast<size_t>(port["port"]));
}
- BOOST_FOREACH(const port_t &port, get_output_ports()) {
+ for(const port_t &port: get_output_ports()) {
set_ports.insert(boost::lexical_cast<size_t>(port["port"]));
}
return std::vector<size_t>(set_ports.begin(), set_ports.end());
@@ -353,17 +352,17 @@ public:
args_t args;
bool is_valid = true;
pt::ptree def;
- BOOST_FOREACH(pt::ptree::value_type &v, _pt.get_child("nocblock.args", def)) {
+ for(pt::ptree::value_type &v: _pt.get_child("nocblock.args", def)) {
arg_t arg;
if (v.first != "arg") continue;
- BOOST_FOREACH(const std::string &key, arg_t::ARG_ARGS.keys()) {
+ for(const std::string &key: arg_t::ARG_ARGS.keys()) {
arg[key] = v.second.get(key, arg_t::ARG_ARGS[key]);
}
if (arg["type"].empty()) {
arg["type"] = "string";
}
if (not arg.is_valid()) {
- UHD_MSG(warning) << boost::format("Found invalid argument: %s") % arg.to_string() << std::endl;
+ UHD_LOGGER_WARNING("RFNOC") << boost::format("Found invalid argument: %s") % arg.to_string() ;
is_valid = false;
}
args.push_back(arg);
@@ -391,7 +390,7 @@ public:
{
registers_t registers;
pt::ptree def;
- BOOST_FOREACH(pt::ptree::value_type &v, _pt.get_child("nocblock.registers", def)) {
+ for(pt::ptree::value_type &v: _pt.get_child("nocblock.registers", def)) {
if (v.first != reg_type) continue;
registers[v.second.get<std::string>("name")] =
boost::lexical_cast<size_t>(v.second.get<size_t>("address"));
@@ -419,7 +418,7 @@ blockdef::sptr blockdef::make_from_noc_id(uint64_t noc_id)
std::vector<fs::path> valid;
// Check if any of the paths exist
- BOOST_FOREACH(const fs::path &base_path, paths) {
+ for(const fs::path &base_path: paths) {
fs::path this_path = base_path / XML_BLOCKS_SUBDIR;
if (fs::exists(this_path) and fs::is_directory(this_path)) {
valid.push_back(this_path);
@@ -436,7 +435,7 @@ blockdef::sptr blockdef::make_from_noc_id(uint64_t noc_id)
}
// Iterate over all paths
- BOOST_FOREACH(const fs::path &path, valid) {
+ for(const fs::path &path: valid) {
// Iterate over all .xml files
fs::directory_iterator end_itr;
for (fs::directory_iterator i(path); i != end_itr; ++i) {
diff --git a/host/lib/rfnoc/ctrl_iface.cpp b/host/lib/rfnoc/ctrl_iface.cpp
index b2ac1778e..3a16f7ec1 100644
--- a/host/lib/rfnoc/ctrl_iface.cpp
+++ b/host/lib/rfnoc/ctrl_iface.cpp
@@ -16,9 +16,8 @@
//
#include "ctrl_iface.hpp"
-#include "async_packet_handler.hpp"
#include <uhd/exception.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/utils/byteswap.hpp>
#include <uhd/utils/safe_call.hpp>
#include <uhd/transport/bounded_buffer.hpp>
@@ -37,7 +36,6 @@ using namespace uhd::transport;
static const double ACK_TIMEOUT = 2.0; //supposed to be worst case practical timeout
static const double MASSIVE_TIMEOUT = 10.0; //for when we wait on a timed command
-static const size_t SR_READBACK = 32;
ctrl_iface::~ctrl_iface(void){
/* NOP */
@@ -60,13 +58,12 @@ public:
_name(name),
_seq_out(0),
_timeout(ACK_TIMEOUT),
- _resp_queue(128/*max response msgs*/),
- _resp_queue_size(_resp_xport ? _resp_xport->get_num_recv_frames() : 3),
+ _resp_queue_size(_resp_xport->get_num_recv_frames()),
_rb_address(uhd::rfnoc::SR_READBACK)
{
- if (resp_xport) {
- while (resp_xport->get_recv_buff(0.0)) {} //flush
- }
+ UHD_ASSERT_THROW(_ctrl_xport);
+ UHD_ASSERT_THROW(_resp_xport);
+ while (resp_xport->get_recv_buff(0.0)) {} //flush
this->set_time(uhd::time_spec_t(0.0));
this->set_tick_rate(1.0); //something possible but bogus
}
@@ -76,7 +73,6 @@ public:
_timeout = ACK_TIMEOUT; //reset timeout to something small
UHD_SAFE_CALL(
this->peek32(0);//dummy peek with the purpose of ack'ing all packets
- _async_task.reset();//now its ok to release the task
)
}
@@ -172,7 +168,7 @@ private:
//load payload
pkt[packet_info.num_header_words32+0] = (_bige)? uhd::htonx(addr) : uhd::htowx(addr);
pkt[packet_info.num_header_words32+1] = (_bige)? uhd::htonx(data) : uhd::htowx(data);
- //UHD_MSG(status) << boost::format("0x%08x, 0x%08x\n") % addr % data;
+ //UHD_LOGGER_INFO("RFNOC") << boost::format("0x%08x, 0x%08x\n") % addr % data;
//send the buffer over the interface
_outstanding_seqs.push(_seq_out);
buff->commit(sizeof(uint32_t)*(packet_info.num_packet_words32));
@@ -196,50 +192,16 @@ private:
uint32_t const *pkt = NULL;
managed_recv_buffer::sptr buff;
- //get buffer from response endpoint - or die in timeout
- if (_resp_xport)
- {
- buff = _resp_xport->get_recv_buff(_timeout);
- try
- {
- UHD_ASSERT_THROW(bool(buff));
- UHD_ASSERT_THROW(buff->size() > 0);
- }
- catch(const std::exception &ex)
- {
- throw uhd::io_error(str(boost::format("Block ctrl (%s) no response packet - %s") % _name % ex.what()));
- }
- pkt = buff->cast<const uint32_t *>();
- packet_info.num_packet_words32 = buff->size()/sizeof(uint32_t);
+ buff = _resp_xport->get_recv_buff(_timeout);
+ try {
+ UHD_ASSERT_THROW(bool(buff));
+ UHD_ASSERT_THROW(buff->size() > 0);
}
-
- //get buffer from response endpoint - or die in timeout
- else
- {
- /*
- * Couldn't get message with haste.
- * Now check both possible queues for messages.
- * Messages should come in on _resp_queue,
- * but could end up in dump_queue.
- * If we don't get a message --> Die in timeout.
- */
- double accum_timeout = 0.0;
- const double short_timeout = 0.005; // == 5ms
- while(not ((_resp_queue.pop_with_haste(resp_buff))
- || (check_dump_queue(resp_buff))
- || (_resp_queue.pop_with_timed_wait(resp_buff, short_timeout))
- )){
- /*
- * If a message couldn't be received within a given timeout
- * --> throw AssertionError!
- */
- accum_timeout += short_timeout;
- UHD_ASSERT_THROW(accum_timeout < _timeout);
- }
-
- pkt = resp_buff.data;
- packet_info.num_packet_words32 = sizeof(resp_buff)/sizeof(uint32_t);
+ catch(const std::exception &ex) {
+ throw uhd::io_error(str(boost::format("Block ctrl (%s) no response packet - %s") % _name % ex.what()));
}
+ pkt = buff->cast<const uint32_t *>();
+ packet_info.num_packet_words32 = buff->size()/sizeof(uint32_t);
//parse the buffer
try
@@ -250,15 +212,15 @@ private:
}
catch(const std::exception &ex)
{
- UHD_MSG(error) << "[" << _name << "] Block ctrl bad VITA packet: " << ex.what() << std::endl;
+ UHD_LOGGER_ERROR("RFNOC") << "[" << _name << "] Block ctrl bad VITA packet: " << ex.what() ;
if (buff){
- UHD_MSG(status) << boost::format("%08X") % pkt[0] << std::endl;
- UHD_MSG(status) << boost::format("%08X") % pkt[1] << std::endl;
- UHD_MSG(status) << boost::format("%08X") % pkt[2] << std::endl;
- UHD_MSG(status) << boost::format("%08X") % pkt[3] << std::endl;
+ UHD_LOGGER_INFO("RFNOC") << boost::format("%08X") % pkt[0] ;
+ UHD_LOGGER_INFO("RFNOC") << boost::format("%08X") % pkt[1] ;
+ UHD_LOGGER_INFO("RFNOC") << boost::format("%08X") % pkt[2] ;
+ UHD_LOGGER_INFO("RFNOC") << boost::format("%08X") % pkt[3] ;
}
else{
- UHD_MSG(status) << "buff is NULL" << std::endl;
+ UHD_LOGGER_INFO("RFNOC") << "buff is NULL" ;
}
}
@@ -306,48 +268,11 @@ private:
return 0;
}
- /*
- * If ctrl_core waits for a message that didn't arrive it can search for it in the dump queue.
- * This actually happens during shutdown.
- * handle_async_task can't access queue anymore thus it returns the corresponding message.
- * msg_task class implements a dump_queue to store such messages.
- * With check_dump_queue we can check if a message we are waiting for got stranded there.
- * If a message got stuck we get it here and push it onto our own message_queue.
- */
- bool check_dump_queue(resp_buff_type& b) {
- const size_t min_buff_size = 8; // Same value as in b200_io_impl->handle_async_task
- uint32_t recv_sid = (((_sid)<<16)|((_sid)>>16));
- uhd::msg_task::msg_payload_t msg;
- do{
- msg = _async_task->get_msg_from_dump_queue(recv_sid);
- }
- while(msg.size() < min_buff_size && msg.size() != 0);
-
- if(msg.size() >= min_buff_size) {
- memcpy(b.data, &msg.front(), std::min(msg.size(), sizeof(b.data)));
- return true;
- }
- return false;
- }
-
- void push_response(const uint32_t *buff)
- {
- resp_buff_type resp_buff;
- std::memcpy(resp_buff.data, buff, sizeof(resp_buff));
- _resp_queue.push_with_haste(resp_buff);
- }
-
- void hold_task(uhd::msg_task::sptr task)
- {
- _async_task = task;
- }
-
const vrt::if_packet_info_t::link_type_t _link_type;
const vrt::if_packet_info_t::packet_type_t _packet_type;
const bool _bige;
const uhd::transport::zero_copy_if::sptr _ctrl_xport;
const uhd::transport::zero_copy_if::sptr _resp_xport;
- uhd::msg_task::sptr _async_task;
const uint32_t _sid;
const std::string _name;
boost::mutex _mutex;
@@ -357,7 +282,6 @@ private:
double _tick_rate;
double _timeout;
std::queue<size_t> _outstanding_seqs;
- bounded_buffer<resp_buff_type> _resp_queue;
const size_t _resp_queue_size;
const size_t _rb_address;
diff --git a/host/lib/rfnoc/ctrl_iface.hpp b/host/lib/rfnoc/ctrl_iface.hpp
index 3690874f9..5e88c3dd6 100644
--- a/host/lib/rfnoc/ctrl_iface.hpp
+++ b/host/lib/rfnoc/ctrl_iface.hpp
@@ -47,18 +47,6 @@ public:
const std::string &name = "0"
);
- //! Hold a ref to a task thats feeding push response
- virtual void hold_task(uhd::msg_task::sptr task) = 0;
-
- //! Push a response externall (resp_xport is NULL)
- virtual void push_response(const uint32_t *buff) = 0;
-
- //! Set the command time that will activate
- virtual void set_time(const uhd::time_spec_t &time) = 0;
-
- //! Get the command time that will activate
- virtual uhd::time_spec_t get_time(void) = 0;
-
//! Set the tick rate (converting time into ticks)
virtual void set_tick_rate(const double rate) = 0;
};
diff --git a/host/lib/rfnoc/ddc_block_ctrl_impl.cpp b/host/lib/rfnoc/ddc_block_ctrl_impl.cpp
index 8c8b07afd..b0b510cde 100644
--- a/host/lib/rfnoc/ddc_block_ctrl_impl.cpp
+++ b/host/lib/rfnoc/ddc_block_ctrl_impl.cpp
@@ -17,7 +17,7 @@
#include "dsp_core_utils.hpp"
#include <uhd/rfnoc/ddc_block_ctrl.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/convert.hpp>
#include <uhd/types/ranges.hpp>
#include <boost/math/special_functions/round.hpp>
@@ -150,10 +150,10 @@ public:
const uhd::stream_cmd_t &stream_cmd_,
const size_t chan
) {
- UHD_RFNOC_BLOCK_TRACE() << "ddc_block_ctrl_base::issue_stream_cmd()" << std::endl;
+ UHD_RFNOC_BLOCK_TRACE() << "ddc_block_ctrl_base::issue_stream_cmd()" ;
if (list_upstream_nodes().count(chan) == 0) {
- UHD_MSG(status) << "No upstream blocks." << std::endl;
+ UHD_LOGGER_INFO("RFNOC") << "No upstream blocks." ;
return;
}
@@ -241,7 +241,7 @@ private:
sr_write("M", 1, chan);
if (decim > 1 and hb_enable == 0) {
- UHD_MSG(warning) << boost::format(
+ UHD_LOGGER_WARNING("RFNOC") << boost::format(
"The requested decimation is odd; the user should expect passband CIC rolloff.\n"
"Select an even decimation to ensure that a halfband filter is enabled.\n"
"Decimations factorable by 4 will enable 2 halfbands, those factorable by 8 will enable 3 halfbands.\n"
diff --git a/host/lib/rfnoc/dma_fifo_block_ctrl_impl.cpp b/host/lib/rfnoc/dma_fifo_block_ctrl_impl.cpp
index aea7c591c..93f599314 100644
--- a/host/lib/rfnoc/dma_fifo_block_ctrl_impl.cpp
+++ b/host/lib/rfnoc/dma_fifo_block_ctrl_impl.cpp
@@ -19,7 +19,7 @@
#include "dma_fifo_core_3000.hpp"
#include "wb_iface_adapter.hpp"
#include <uhd/convert.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/types/wb_iface.hpp>
#include <boost/make_shared.hpp>
#include <boost/thread/mutex.hpp>
@@ -64,18 +64,18 @@ public:
_perifs[i].depth = DEFAULT_SIZE;
_perifs[i].core = dma_fifo_core_3000::make(_perifs[i].ctrl, USER_SR_BASE, USER_RB_BASE);
_perifs[i].core->resize(_perifs[i].base_addr, _perifs[i].depth);
- UHD_MSG(status) << boost::format("[DMA FIFO] Running BIST for FIFO %d... ") % i;
+ UHD_LOGGER_INFO("RFNOC") << boost::format("[DMA FIFO] Running BIST for FIFO %d... ") % i;
if (_perifs[i].core->ext_bist_supported()) {
uint32_t bisterr = _perifs[i].core->run_bist();
if (bisterr != 0) {
throw uhd::runtime_error(str(boost::format("BIST failed! (code: %d)\n") % bisterr));
} else {
double throughput = _perifs[i].core->get_bist_throughput(BUS_CLK_RATE);
- UHD_MSG(status) << (boost::format("pass (Throughput: %.1fMB/s)") % (throughput/1e6)) << std::endl;
+ UHD_LOGGER_INFO("RFNOC") << (boost::format("pass (Throughput: %.1fMB/s)") % (throughput/1e6)) ;
}
} else {
if (_perifs[i].core->run_bist() == 0) {
- UHD_MSG(status) << "pass\n";
+ UHD_LOGGER_INFO("RFNOC") << "pass\n";
} else {
throw uhd::runtime_error("BIST failed!\n");
}
diff --git a/host/lib/rfnoc/duc_block_ctrl_impl.cpp b/host/lib/rfnoc/duc_block_ctrl_impl.cpp
index d755bf519..d0742a6d0 100644
--- a/host/lib/rfnoc/duc_block_ctrl_impl.cpp
+++ b/host/lib/rfnoc/duc_block_ctrl_impl.cpp
@@ -17,7 +17,7 @@
#include "dsp_core_utils.hpp"
#include <uhd/rfnoc/duc_block_ctrl.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/convert.hpp>
#include <uhd/types/ranges.hpp>
#include <boost/math/special_functions/round.hpp>
@@ -141,7 +141,7 @@ public:
const uhd::stream_cmd_t &stream_cmd_,
const size_t chan
) {
- UHD_RFNOC_BLOCK_TRACE() << "duc_block_ctrl_base::issue_stream_cmd()" << std::endl;
+ UHD_RFNOC_BLOCK_TRACE() << "duc_block_ctrl_base::issue_stream_cmd()" ;
uhd::stream_cmd_t stream_cmd = stream_cmd_;
if (stream_cmd.stream_mode == uhd::stream_cmd_t::STREAM_MODE_NUM_SAMPS_AND_DONE or
@@ -150,7 +150,7 @@ public:
stream_cmd.num_samps *= interpolation;
}
- BOOST_FOREACH(const node_ctrl_base::node_map_pair_t upstream_node, list_upstream_nodes()) {
+ for(const node_ctrl_base::node_map_pair_t upstream_node: list_upstream_nodes()) {
source_node_ctrl::sptr this_upstream_block_ctrl =
boost::dynamic_pointer_cast<source_node_ctrl>(upstream_node.second.lock());
this_upstream_block_ctrl->issue_stream_cmd(stream_cmd, chan);
@@ -226,7 +226,7 @@ private:
sr_write("M", std::pow(2.0, double(hb_enable)) * (interp & 0xff), chan);
if (interp > 1 and hb_enable == 0) {
- UHD_MSG(warning) << boost::format(
+ UHD_LOGGER_WARNING("RFNOC") << boost::format(
"The requested interpolation is odd; the user should expect passband CIC rolloff.\n"
"Select an even interpolation to ensure that a halfband filter is enabled.\n"
"interpolation = dsp_rate/samp_rate -> %d = (%f MHz)/(%f MHz)\n"
diff --git a/host/lib/rfnoc/graph_impl.cpp b/host/lib/rfnoc/graph_impl.cpp
index 64c6f6abe..005e23815 100644
--- a/host/lib/rfnoc/graph_impl.cpp
+++ b/host/lib/rfnoc/graph_impl.cpp
@@ -18,7 +18,7 @@
#include "graph_impl.hpp"
#include <uhd/rfnoc/source_block_ctrl_base.hpp>
#include <uhd/rfnoc/sink_block_ctrl_base.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
using namespace uhd::rfnoc;
@@ -85,10 +85,10 @@ void graph_impl::connect(
dst->set_upstream_port(actual_dst_block_port, actual_src_block_port);
// At this point, ports are locked and no one else can simply connect
// into them.
- //UHD_MSG(status)
+ //UHD_LOGGER_INFO("RFNOC")
//<< "[" << _name << "] Connecting "
//<< src_block << ":" << actual_src_block_port << " --> "
- //<< dst_block << ":" << actual_dst_block_port << std::endl;
+ //<< dst_block << ":" << actual_dst_block_port ;
/********************************************************************
* 2. Check IO signatures match
@@ -120,7 +120,7 @@ void graph_impl::connect(
********************************************************************/
size_t pkt_size = (pkt_size_ != 0) ? pkt_size_ : src->get_output_signature(src_block_port).packet_size;
if (pkt_size == 0) { // Unspecified packet rate. Assume max packet size.
- UHD_MSG(status) << "Assuming max packet size for " << src->get_block_id() << std::endl;
+ UHD_LOGGER_INFO("RFNOC") << "Assuming max packet size for " << src->get_block_id() ;
pkt_size = uhd::rfnoc::MAX_PACKET_SIZE;
}
// FC window (in packets) depends on FIFO size... ...and packet size.
diff --git a/host/lib/rfnoc/legacy_compat.cpp b/host/lib/rfnoc/legacy_compat.cpp
index bf653a89a..70fe067f8 100644
--- a/host/lib/rfnoc/legacy_compat.cpp
+++ b/host/lib/rfnoc/legacy_compat.cpp
@@ -26,14 +26,14 @@
#include <uhd/types/stream_cmd.hpp>
#include <uhd/types/direction.hpp>
#include <uhd/types/ranges.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/utils/log.hpp>
#include <uhd/transport/chdr.hpp>
#include <uhd/usrp/multi_usrp.hpp>
#include <boost/make_shared.hpp>
#include <boost/assign.hpp>
-#define UHD_LEGACY_LOG() UHD_LOGV(never)
+#define UHD_LEGACY_LOG() UHD_LOGGER_TRACE("RFNOC")
using namespace uhd::rfnoc;
using uhd::usrp::subdev_spec_t;
@@ -97,16 +97,6 @@ size_t calc_num_tx_chans_per_radio(
);
}
-double lambda_const_double(const double d)
-{
- return d;
-}
-
-uhd::meta_range_t lambda_const_meta_range(const double start, const double stop, const double step)
-{
- return uhd::meta_range_t(start, stop, step);
-}
-
/*! Recreate passed property without bound subscribers. Maintains current property value.
*/
template <typename T>
@@ -115,6 +105,7 @@ static void recreate_property(const uhd::fs_path &path, uhd::property_tree::sptr
tree->remove(path);
tree->create<T>(path).set(temp);
}
+
/************************************************************************
* Class Definition
***********************************************************************/
@@ -154,16 +145,16 @@ public:
}
connect_blocks();
if (args.has_key("skip_ddc")) {
- UHD_LEGACY_LOG() << "[legacy_compat] Skipping DDCs by user request." << std::endl;
+ UHD_LEGACY_LOG() << "[legacy_compat] Skipping DDCs by user request." ;
} else if (not _has_ddcs) {
- UHD_MSG(warning)
+ UHD_LOGGER_WARNING("RFNOC")
<< "[legacy_compat] No DDCs detected. You will only be able to receive at the radio frontend rate."
- << std::endl;
+ ;
}
if (args.has_key("skip_duc")) {
- UHD_LEGACY_LOG() << "[legacy_compat] Skipping DUCs by user request." << std::endl;
+ UHD_LEGACY_LOG() << "[legacy_compat] Skipping DUCs by user request." ;
} else if (not _has_ducs) {
- UHD_MSG(warning) << "[legacy_compat] No DUCs detected. You will only be able to transmit at the radio frontend rate." << std::endl;
+ UHD_LOGGER_WARNING("RFNOC") << "[legacy_compat] No DUCs detected. You will only be able to transmit at the radio frontend rate." ;
}
if (args.has_key("skip_dram")) {
UHD_LEGACY_LOG() << "[legacy_compat] Skipping DRAM by user request." << std::endl;
@@ -172,7 +163,7 @@ public:
UHD_LEGACY_LOG() << "[legacy_compat] Skipping SRAM by user request." << std::endl;
}
if (not _has_dmafifo and not _has_sramfifo) {
- UHD_MSG(warning) << "[legacy_compat] No FIFO detected. Higher transmit rates may encounter errors." << std::endl;
+ UHD_LOGGER_WARNING("RFNOC") << "[legacy_compat] No FIFO detected. Higher transmit rates may encounter errors.";
}
for (size_t mboard = 0; mboard < _num_mboards; mboard++) {
@@ -256,7 +247,7 @@ public:
void issue_stream_cmd(const stream_cmd_t &stream_cmd, size_t mboard, size_t chan)
{
- UHD_LEGACY_LOG() << "[legacy_compat] issue_stream_cmd() " << std::endl;
+ UHD_LEGACY_LOG() << "[legacy_compat] issue_stream_cmd() " ;
const size_t &radio_index = _rx_channel_map[mboard][chan].radio_index;
const size_t &port_index = _rx_channel_map[mboard][chan].port_index;
if (_has_ddcs) {
@@ -274,9 +265,9 @@ public:
args.otw_format = "sc16";
}
_update_stream_args_for_streaming<uhd::RX_DIRECTION>(args, _rx_channel_map);
- UHD_LEGACY_LOG() << "[legacy_compat] rx stream args: " << args.args.to_string() << std::endl;
+ UHD_LEGACY_LOG() << "[legacy_compat] rx stream args: " << args.args.to_string() ;
uhd::rx_streamer::sptr streamer = _device->get_rx_stream(args);
- BOOST_FOREACH(const size_t chan, args.channels) {
+ for(const size_t chan: args.channels) {
_rx_stream_cache[chan] = streamer;
}
return streamer;
@@ -291,9 +282,9 @@ public:
args.otw_format = "sc16";
}
_update_stream_args_for_streaming<uhd::TX_DIRECTION>(args, _tx_channel_map);
- UHD_LEGACY_LOG() << "[legacy_compat] tx stream args: " << args.args.to_string() << std::endl;
+ UHD_LEGACY_LOG() << "[legacy_compat] tx stream args: " << args.args.to_string() ;
uhd::tx_streamer::sptr streamer = _device->get_tx_stream(args);
- BOOST_FOREACH(const size_t chan, args.channels) {
+ for(const size_t chan: args.channels) {
_tx_stream_cache[chan] = streamer;
}
return streamer;
@@ -347,14 +338,14 @@ public:
if (_rx_stream_cache.count(chan)) {
uhd::rx_streamer::sptr str_ptr = _rx_stream_cache[chan].lock();
if (str_ptr) {
- BOOST_FOREACH(const rx_stream_map_type::value_type &chan_streamer_pair, _rx_stream_cache) {
+ for(const rx_stream_map_type::value_type &chan_streamer_pair: _rx_stream_cache) {
if (chan_streamer_pair.second.lock() == str_ptr) {
chans_to_change.insert(chan_streamer_pair.first);
}
}
}
}
- BOOST_FOREACH(const size_t this_chan, chans_to_change) {
+ for(const size_t this_chan: chans_to_change) {
size_t mboard, mb_chan;
chan_to_mcp<uhd::RX_DIRECTION>(this_chan, _rx_channel_map, mboard, mb_chan);
const size_t dsp_index = _rx_channel_map[mboard][mb_chan].radio_index;
@@ -390,14 +381,14 @@ public:
if (_tx_stream_cache.count(chan)) {
uhd::tx_streamer::sptr str_ptr = _tx_stream_cache[chan].lock();
if (str_ptr) {
- BOOST_FOREACH(const tx_stream_map_type::value_type &chan_streamer_pair, _tx_stream_cache) {
+ for(const tx_stream_map_type::value_type &chan_streamer_pair: _tx_stream_cache) {
if (chan_streamer_pair.second.lock() == str_ptr) {
chans_to_change.insert(chan_streamer_pair.first);
}
}
}
}
- BOOST_FOREACH(const size_t this_chan, chans_to_change) {
+ for(const size_t this_chan: chans_to_change) {
size_t mboard, mb_chan;
chan_to_mcp<uhd::TX_DIRECTION>(this_chan, _tx_channel_map, mboard, mb_chan);
const size_t dsp_index = _tx_channel_map[mboard][mb_chan].radio_index;
@@ -590,7 +581,7 @@ private: // methods
const size_t this_spp = get_block_ctrl<radio_ctrl>(i, RADIO_BLOCK_NAME, k)->get_arg<int>("spp");
if (this_spp != _rx_spp) {
- UHD_LOG << str(
+ UHD_LOGGER_WARNING("RFNOC") << str(
boost::format("[legacy compat] Radios have differing spp values: %s has %d, others have %d. UHD will use smaller spp value for all connections. Performance might be not optimal.")
% radio_block_id.to_string() % this_spp % _rx_spp
);
@@ -653,10 +644,10 @@ private: // methods
)
;
_tree->create<double>(rx_dsp_base_path / "freq/value")
- .set_publisher(boost::bind(&lambda_const_double, 0.0))
+ .set_publisher([](){ return 0.0; })
;
_tree->create<uhd::meta_range_t>(rx_dsp_base_path / "freq/range")
- .set_publisher(boost::bind(&lambda_const_meta_range, 0.0, 0.0, 0.0))
+ .set_publisher([](){ return uhd::meta_range_t(0.0, 0.0, 0.0); })
;
}
}
@@ -686,10 +677,10 @@ private: // methods
)
;
_tree->create<double>(tx_dsp_base_path / "freq/value")
- .set_publisher(boost::bind(&lambda_const_double, 0.0))
+ .set_publisher([](){ return 0.0; })
;
_tree->create<uhd::meta_range_t>(tx_dsp_base_path / "freq/range")
- .set_publisher(boost::bind(&lambda_const_meta_range, 0.0, 0.0, 0.0))
+ .set_publisher([](){ return uhd::meta_range_t(0.0, 0.0, 0.0); })
;
}
}
@@ -898,7 +889,7 @@ legacy_compat::sptr legacy_compat::make(
if (legacy_cache.count(device.get()) and not legacy_cache.at(device.get()).expired()) {
legacy_compat::sptr legacy_compat_copy = legacy_cache.at(device.get()).lock();
UHD_ASSERT_THROW(bool(legacy_compat_copy));
- UHD_LEGACY_LOG() << "[legacy_compat] Using existing legacy compat object for this device." << std::endl;
+ UHD_LEGACY_LOG() << "[legacy_compat] Using existing legacy compat object for this device." ;
return legacy_compat_copy;
}
diff --git a/host/lib/rfnoc/nocscript/block_iface.cpp b/host/lib/rfnoc/nocscript/block_iface.cpp
index 0d301e5bc..544593def 100644
--- a/host/lib/rfnoc/nocscript/block_iface.cpp
+++ b/host/lib/rfnoc/nocscript/block_iface.cpp
@@ -18,12 +18,12 @@
#include "block_iface.hpp"
#include "function_table.hpp"
#include <uhd/exception.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <boost/assign.hpp>
#include <boost/bind.hpp>
#include <boost/format.hpp>
-#define UHD_NOCSCRIPT_LOG() UHD_LOGV(never)
+#define UHD_NOCSCRIPT_LOG() UHD_LOGGER_TRACE("RFNOC")
using namespace uhd::rfnoc;
using namespace uhd::rfnoc::nocscript;
@@ -116,7 +116,7 @@ void block_iface::run_and_check(const std::string &code, const std::string &erro
{
boost::mutex::scoped_lock local_interpreter_lock(_lil_mutex);
- UHD_NOCSCRIPT_LOG() << "[NocScript] Executing and asserting code: " << code << std::endl;
+ UHD_NOCSCRIPT_LOG() << "[NocScript] Executing and asserting code: " << code ;
expression::sptr e = _parser->create_expr_tree(code);
expression_literal result = e->eval();
if (not result.to_bool()) {
@@ -143,12 +143,12 @@ expression_literal block_iface::_nocscript__sr_write(expression_container::expr_
const uint32_t reg_val = uint32_t(args[1]->eval().get_int());
bool result = true;
try {
- UHD_NOCSCRIPT_LOG() << "[NocScript] Executing SR_WRITE() " << std::endl;
+ UHD_NOCSCRIPT_LOG() << "[NocScript] Executing SR_WRITE() " ;
_block_ptr->sr_write(reg_name, reg_val);
} catch (const uhd::exception &e) {
- UHD_MSG(error) << boost::format("[NocScript] Error while executing SR_WRITE(%s, 0x%X):\n%s")
+ UHD_LOGGER_ERROR("RFNOC") << boost::format("[NocScript] Error while executing SR_WRITE(%s, 0x%X):\n%s")
% reg_name % reg_val % e.what()
- << std::endl;
+ ;
result = false;
}
@@ -195,7 +195,7 @@ expression_literal block_iface::_nocscript__arg_set_int(const expression_contain
if (args.size() == 3) {
port = size_t(args[2]->eval().get_int());
}
- UHD_NOCSCRIPT_LOG() << "[NocScript] Setting $" << var_name << std::endl;
+ UHD_NOCSCRIPT_LOG() << "[NocScript] Setting $" << var_name ;
_block_ptr->set_arg<int>(var_name, val, port);
return expression_literal(true);
}
@@ -208,7 +208,7 @@ expression_literal block_iface::_nocscript__arg_set_string(const expression_cont
if (args.size() == 3) {
port = size_t(args[2]->eval().get_int());
}
- UHD_NOCSCRIPT_LOG() << "[NocScript] Setting $" << var_name << std::endl;
+ UHD_NOCSCRIPT_LOG() << "[NocScript] Setting $" << var_name ;
_block_ptr->set_arg<std::string>(var_name, val, port);
return expression_literal(true);
}
@@ -221,7 +221,7 @@ expression_literal block_iface::_nocscript__arg_set_double(const expression_cont
if (args.size() == 3) {
port = size_t(args[2]->eval().get_int());
}
- UHD_NOCSCRIPT_LOG() << "[NocScript] Setting $" << var_name << std::endl;
+ UHD_NOCSCRIPT_LOG() << "[NocScript] Setting $" << var_name ;
_block_ptr->set_arg<double>(var_name, val, port);
return expression_literal(true);
}
@@ -239,8 +239,8 @@ block_iface::sptr block_iface::make(uhd::rfnoc::block_ctrl_base* block_ptr)
expression_literal block_iface::_nocscript__var_get(const expression_container::expr_list_type &args)
{
expression_literal expr = _vars[args[0]->eval().get_string()];
- //std::cout << "[NocScript] Getting var " << args[0]->eval().get_string() << " == " << expr << std::endl;
- //std::cout << "[NocScript] Type " << expr.infer_type() << std::endl;
+ //std::cout << "[NocScript] Getting var " << args[0]->eval().get_string() << " == " << expr ;
+ //std::cout << "[NocScript] Type " << expr.infer_type() ;
//return _vars[args[0]->eval().get_string()];
return expr;
}
@@ -248,8 +248,8 @@ expression_literal block_iface::_nocscript__var_get(const expression_container::
expression_literal block_iface::_nocscript__var_set(const expression_container::expr_list_type &args)
{
_vars[args[0]->eval().get_string()] = args[1]->eval();
- //std::cout << "[NocScript] Set var " << args[0]->eval().get_string() << " to " << _vars[args[0]->eval().get_string()] << std::endl;
- //std::cout << "[NocScript] Type " << _vars[args[0]->eval().get_string()].infer_type() << std::endl;
+ //std::cout << "[NocScript] Set var " << args[0]->eval().get_string() << " to " << _vars[args[0]->eval().get_string()] ;
+ //std::cout << "[NocScript] Type " << _vars[args[0]->eval().get_string()].infer_type() ;
return expression_literal(true);
}
diff --git a/host/lib/rfnoc/nocscript/expression.cpp b/host/lib/rfnoc/nocscript/expression.cpp
index 38d6e2128..257e0b447 100644
--- a/host/lib/rfnoc/nocscript/expression.cpp
+++ b/host/lib/rfnoc/nocscript/expression.cpp
@@ -18,7 +18,6 @@
#include "expression.hpp"
#include "function_table.hpp"
#include <uhd/utils/cast.hpp>
-#include <boost/foreach.hpp>
#include <boost/format.hpp>
#include <boost/assign.hpp>
#include <boost/algorithm/string.hpp>
@@ -77,7 +76,7 @@ expression_literal::expression_literal(
std::string str_vec = _val.substr(1, _val.size()-2);
std::vector<std::string> subtoken_list;
boost::split(subtoken_list, str_vec, boost::is_any_of(", "), boost::token_compress_on);
- BOOST_FOREACH(const std::string &t, subtoken_list) {
+ for(const std::string &t: subtoken_list) {
_int_vector_val.push_back(boost::lexical_cast<int>(t));
}
break;
@@ -299,7 +298,7 @@ expression_literal expression_container::eval()
}
expression_literal ret_val;
- BOOST_FOREACH(const expression::sptr &sub_expr, _sub_exprs) {
+ for(const expression::sptr &sub_expr: _sub_exprs) {
ret_val = sub_expr->eval();
if (_combiner == COMBINE_AND and ret_val.to_bool() == false) {
return ret_val;
@@ -319,7 +318,7 @@ std::string expression_function::to_string(const std::string &name, const argtyp
{
std::string s = name;
int arg_count = 0;
- BOOST_FOREACH(const expression::type_t type, types) {
+ for(const expression::type_t type: types) {
if (arg_count == 0) {
s += "(";
} else {
diff --git a/host/lib/rfnoc/nocscript/function_table.cpp b/host/lib/rfnoc/nocscript/function_table.cpp
index a4e36c1a1..aabaef635 100644
--- a/host/lib/rfnoc/nocscript/function_table.cpp
+++ b/host/lib/rfnoc/nocscript/function_table.cpp
@@ -18,7 +18,6 @@
#include "function_table.hpp"
#include "basic_functions.hpp"
#include <boost/bind.hpp>
-#include <boost/foreach.hpp>
#include <boost/format.hpp>
#include <map>
diff --git a/host/lib/rfnoc/node_ctrl_base.cpp b/host/lib/rfnoc/node_ctrl_base.cpp
index b4d0a30ff..e9424d319 100644
--- a/host/lib/rfnoc/node_ctrl_base.cpp
+++ b/host/lib/rfnoc/node_ctrl_base.cpp
@@ -16,7 +16,7 @@
//
#include <uhd/rfnoc/node_ctrl_base.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <boost/range/adaptor/map.hpp>
using namespace uhd::rfnoc;
@@ -30,7 +30,7 @@ std::string node_ctrl_base::unique_id() const
void node_ctrl_base::clear()
{
- UHD_RFNOC_BLOCK_TRACE() << "node_ctrl_base::clear() " << std::endl;
+ UHD_RFNOC_BLOCK_TRACE() << "node_ctrl_base::clear() " ;
// Reset connections:
_upstream_nodes.clear();
_downstream_nodes.clear();
diff --git a/host/lib/rfnoc/radio_ctrl_impl.cpp b/host/lib/rfnoc/radio_ctrl_impl.cpp
index c43d39f71..69c9fb68c 100644
--- a/host/lib/rfnoc/radio_ctrl_impl.cpp
+++ b/host/lib/rfnoc/radio_ctrl_impl.cpp
@@ -16,11 +16,10 @@
//
#include "wb_iface_adapter.hpp"
-#include <boost/foreach.hpp>
#include <boost/format.hpp>
#include <boost/bind.hpp>
#include <uhd/convert.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/types/ranges.hpp>
#include <uhd/types/direction.hpp>
#include "radio_ctrl_impl.hpp"
@@ -30,6 +29,7 @@ using namespace uhd;
using namespace uhd::rfnoc;
static const size_t BYTES_PER_SAMPLE = 4;
+const std::string radio_ctrl::ALL_LOS = "all";
/****************************************************************************
* Structors and init
@@ -137,7 +137,6 @@ radio_ctrl_impl::radio_ctrl_impl() :
void radio_ctrl_impl::_register_loopback_self_test(size_t chan)
{
- UHD_MSG(status) << "[RFNoC Radio] Performing register loopback test... " << std::flush;
size_t hash = size_t(time(NULL));
for (size_t i = 0; i < 100; i++)
{
@@ -145,12 +144,12 @@ void radio_ctrl_impl::_register_loopback_self_test(size_t chan)
sr_write(regs::TEST, uint32_t(hash), chan);
uint32_t result = user_reg_read32(regs::RB_TEST, chan);
if (result != uint32_t(hash)) {
- UHD_MSG(status) << "fail" << std::endl;
- UHD_MSG(status) << boost::format("expected: %x result: %x") % uint32_t(hash) % result << std::endl;
+ UHD_LOGGER_ERROR("RFNOC RADIO") << "Register loopback test failed";
+ UHD_LOGGER_ERROR("RFNOC RADIO") << boost::format("expected: %x result: %x") % uint32_t(hash) % result ;
return; // exit on any failure
}
}
- UHD_MSG(status) << "pass" << std::endl;
+ UHD_LOGGER_INFO("RFNOC RADIO") << "Register loopback test passed";
}
/****************************************************************************
@@ -196,6 +195,11 @@ double radio_ctrl_impl::set_rx_gain(const double gain, const size_t chan)
return _rx_gain[chan] = gain;
}
+double radio_ctrl_impl::set_rx_bandwidth(const double bandwidth, const size_t chan)
+{
+ return _rx_bandwidth[chan] = bandwidth;
+}
+
void radio_ctrl_impl::set_time_sync(const uhd::time_spec_t &time)
{
_time64->set_time_sync(time);
@@ -236,6 +240,56 @@ double radio_ctrl_impl::get_rx_gain(const size_t chan) /* const */
return _rx_gain[chan];
}
+double radio_ctrl_impl::get_rx_bandwidth(const size_t chan) /* const */
+{
+ return _rx_bandwidth[chan];
+}
+
+std::vector<std::string> radio_ctrl_impl::get_rx_lo_names(const size_t /* chan */)
+{
+ return std::vector<std::string>();
+}
+
+std::vector<std::string> radio_ctrl_impl::get_rx_lo_sources(const std::string & /* name */, const size_t /* chan */)
+{
+ return std::vector<std::string>();
+}
+
+freq_range_t radio_ctrl_impl::get_rx_lo_freq_range(const std::string & /* name */, const size_t /* chan */)
+{
+ return freq_range_t();
+}
+
+void radio_ctrl_impl::set_rx_lo_source(const std::string & /* src */, const std::string & /* name */, const size_t /* chan */)
+{
+ throw uhd::not_implemented_error("set_rx_lo_source is not supported on this radio");
+}
+
+const std::string radio_ctrl_impl::get_rx_lo_source(const std::string & /* name */, const size_t /* chan */)
+{
+ return "internal";
+}
+
+void radio_ctrl_impl::set_rx_lo_export_enabled(bool /* enabled */, const std::string & /* name */, const size_t /* chan */)
+{
+ throw uhd::not_implemented_error("set_rx_lo_export_enabled is not supported on this radio");
+}
+
+bool radio_ctrl_impl::get_rx_lo_export_enabled(const std::string & /* name */, const size_t /* chan */)
+{
+ return false; // Not exporting non-existant LOs
+}
+
+double radio_ctrl_impl::set_rx_lo_freq(double /* freq */, const std::string & /* name */, const size_t /* chan */)
+{
+ throw uhd::not_implemented_error("set_rx_lo_freq is not supported on this radio");
+}
+
+double radio_ctrl_impl::get_rx_lo_freq(const std::string & /* name */, const size_t /* chan */)
+{
+ return 0;
+}
+
/***********************************************************************
* RX Streamer-related methods (from source_block_ctrl_base)
**********************************************************************/
@@ -243,9 +297,9 @@ double radio_ctrl_impl::get_rx_gain(const size_t chan) /* const */
void radio_ctrl_impl::issue_stream_cmd(const uhd::stream_cmd_t &stream_cmd, const size_t chan)
{
boost::mutex::scoped_lock lock(_mutex);
- UHD_RFNOC_BLOCK_TRACE() << "radio_ctrl_impl::issue_stream_cmd() " << chan << " " << char(stream_cmd.stream_mode) << std::endl;
+ UHD_RFNOC_BLOCK_TRACE() << "radio_ctrl_impl::issue_stream_cmd() " << chan << " " << char(stream_cmd.stream_mode) ;
if (not _is_streamer_active(uhd::RX_DIRECTION, chan)) {
- UHD_RFNOC_BLOCK_TRACE() << "radio_ctrl_impl::issue_stream_cmd() called on inactive channel. Skipping." << std::endl;
+ UHD_RFNOC_BLOCK_TRACE() << "radio_ctrl_impl::issue_stream_cmd() called on inactive channel. Skipping." ;
return;
}
UHD_ASSERT_THROW(stream_cmd.num_samps <= 0x0fffffff);
@@ -284,7 +338,7 @@ std::vector<size_t> radio_ctrl_impl::get_active_rx_ports()
{
std::vector<size_t> active_rx_ports;
typedef std::map<size_t, bool> map_t;
- BOOST_FOREACH(map_t::value_type &m, _rx_streamer_active) {
+ for(map_t::value_type &m: _rx_streamer_active) {
if (m.second) {
active_rx_ports.push_back(m.first);
}
@@ -297,7 +351,7 @@ std::vector<size_t> radio_ctrl_impl::get_active_rx_ports()
**********************************************************************/
void radio_ctrl_impl::set_rx_streamer(bool active, const size_t port)
{
- UHD_RFNOC_BLOCK_TRACE() << "radio_ctrl_impl::set_rx_streamer() " << port << " -> " << active << std::endl;
+ UHD_RFNOC_BLOCK_TRACE() << "radio_ctrl_impl::set_rx_streamer() " << port << " -> " << active ;
if (port > _num_rx_channels) {
throw uhd::value_error(str(
boost::format("[%s] Can't (un)register RX streamer on port %d (invalid port)")
@@ -315,7 +369,7 @@ void radio_ctrl_impl::set_rx_streamer(bool active, const size_t port)
void radio_ctrl_impl::set_tx_streamer(bool active, const size_t port)
{
- UHD_RFNOC_BLOCK_TRACE() << "radio_ctrl_impl::set_tx_streamer() " << port << " -> " << active << std::endl;
+ UHD_RFNOC_BLOCK_TRACE() << "radio_ctrl_impl::set_tx_streamer() " << port << " -> " << active ;
if (port > _num_tx_channels) {
throw uhd::value_error(str(
boost::format("[%s] Can't (un)register TX streamer on port %d (invalid port)")
@@ -336,11 +390,11 @@ void radio_ctrl_impl::set_tx_streamer(bool active, const size_t port)
void radio_ctrl_impl::_update_spp(int spp)
{
boost::mutex::scoped_lock lock(_mutex);
- UHD_RFNOC_BLOCK_TRACE() << "radio_ctrl_impl::_update_spp(): Requested spp: " << spp << std::endl;
+ UHD_RFNOC_BLOCK_TRACE() << "radio_ctrl_impl::_update_spp(): Requested spp: " << spp ;
if (spp == 0) {
spp = DEFAULT_PACKET_SIZE / BYTES_PER_SAMPLE;
}
- UHD_RFNOC_BLOCK_TRACE() << "radio_ctrl_impl::_update_spp(): Setting spp to: " << spp << std::endl;
+ UHD_RFNOC_BLOCK_TRACE() << "radio_ctrl_impl::_update_spp(): Setting spp to: " << spp ;
for (size_t i = 0; i < _num_rx_channels; i++) {
sr_write(regs::RX_CTRL_MAXLEN, uint32_t(spp), i);
}
@@ -356,7 +410,6 @@ void radio_ctrl_impl::set_time_next_pps(const time_spec_t &time_spec)
_time64->set_time_next_pps(time_spec);
}
-
time_spec_t radio_ctrl_impl::get_time_now()
{
return _time64->get_time_now();
@@ -367,3 +420,52 @@ time_spec_t radio_ctrl_impl::get_time_last_pps()
return _time64->get_time_last_pps();
}
+void radio_ctrl_impl::set_time_source(const std::string &source)
+{
+ _tree->access<std::string>("time_source/value").set(source);
+}
+
+std::string radio_ctrl_impl::get_time_source()
+{
+ return _tree->access<std::string>("time_source/value").get();
+}
+
+std::vector<std::string> radio_ctrl_impl::get_time_sources()
+{
+ return _tree->access<std::vector<std::string>>("time_source/options").get();
+}
+
+void radio_ctrl_impl::set_clock_source(const std::string &source)
+{
+ _tree->access<std::string>("clock_source/value").set(source);
+}
+
+std::string radio_ctrl_impl::get_clock_source()
+{
+ return _tree->access<std::string>("clock_source/value").get();
+}
+
+std::vector<std::string> radio_ctrl_impl::get_clock_sources()
+{
+ return _tree->access<std::vector<std::string>>("clock_source/options").get();
+}
+
+
+std::vector<std::string> radio_ctrl_impl::get_gpio_banks() const
+{
+ return std::vector<std::string>();
+}
+
+void radio_ctrl_impl::set_gpio_attr(
+ const std::string &bank,
+ const std::string &attr,
+ const uint32_t value,
+ const uint32_t mask
+) {
+ throw uhd::not_implemented_error("set_gpio_attr was not defined for this radio");
+}
+
+uint32_t radio_ctrl_impl::get_gpio_attr(const std::string &bank, const std::string &attr)
+{
+ throw uhd::not_implemented_error("get_gpio_attr was not defined for this radio");
+}
diff --git a/host/lib/rfnoc/radio_ctrl_impl.hpp b/host/lib/rfnoc/radio_ctrl_impl.hpp
index d6b402120..6f0c82504 100644
--- a/host/lib/rfnoc/radio_ctrl_impl.hpp
+++ b/host/lib/rfnoc/radio_ctrl_impl.hpp
@@ -60,6 +60,7 @@ public:
virtual double set_rx_frequency(const double freq, const size_t chan);
virtual double set_tx_gain(const double gain, const size_t chan);
virtual double set_rx_gain(const double gain, const size_t chan);
+ virtual double set_rx_bandwidth(const double bandwidth, const size_t chan);
virtual double get_rate() const;
virtual std::string get_tx_antenna(const size_t chan) /* const */;
@@ -68,12 +69,41 @@ public:
virtual double get_rx_frequency(const size_t) /* const */;
virtual double get_tx_gain(const size_t) /* const */;
virtual double get_rx_gain(const size_t) /* const */;
+ virtual double get_rx_bandwidth(const size_t) /* const */;
+
+ virtual std::vector<std::string> get_rx_lo_names(const size_t chan);
+ virtual std::vector<std::string> get_rx_lo_sources(const std::string &name, const size_t chan);
+ virtual freq_range_t get_rx_lo_freq_range(const std::string &name, const size_t chan);
+
+ virtual void set_rx_lo_source(const std::string &src, const std::string &name, const size_t chan);
+ virtual const std::string get_rx_lo_source(const std::string &name, const size_t chan);
+
+ virtual void set_rx_lo_export_enabled(bool enabled, const std::string &name, const size_t chan);
+ virtual bool get_rx_lo_export_enabled(const std::string &name, const size_t chan);
+
+ virtual double set_rx_lo_freq(double freq, const std::string &name, const size_t chan);
+ virtual double get_rx_lo_freq(const std::string &name, const size_t chan);
void set_time_now(const time_spec_t &time_spec);
void set_time_next_pps(const time_spec_t &time_spec);
void set_time_sync(const uhd::time_spec_t &time);
time_spec_t get_time_now();
time_spec_t get_time_last_pps();
+ virtual void set_time_source(const std::string &source);
+ virtual std::string get_time_source();
+ virtual std::vector<std::string> get_time_sources();
+ virtual void set_clock_source(const std::string &source);
+ virtual std::string get_clock_source();
+ virtual std::vector<std::string> get_clock_sources();
+
+ virtual std::vector<std::string> get_gpio_banks() const;
+ virtual void set_gpio_attr(
+ const std::string &bank,
+ const std::string &attr,
+ const uint32_t value,
+ const uint32_t mask
+ );
+ virtual uint32_t get_gpio_attr(const std::string &bank, const std::string &attr);
/***********************************************************************
* Block control API calls
@@ -199,6 +229,7 @@ private:
std::map<size_t, double> _rx_freq;
std::map<size_t, double> _tx_gain;
std::map<size_t, double> _rx_gain;
+ std::map<size_t, double> _rx_bandwidth;
std::vector<bool> _continuous_streaming;
}; /* class radio_ctrl_impl */
diff --git a/host/lib/rfnoc/rx_stream_terminator.cpp b/host/lib/rfnoc/rx_stream_terminator.cpp
index 43b3664fc..e05d9cd49 100644
--- a/host/lib/rfnoc/rx_stream_terminator.cpp
+++ b/host/lib/rfnoc/rx_stream_terminator.cpp
@@ -18,10 +18,9 @@
#include "rx_stream_terminator.hpp"
#include "radio_ctrl_impl.hpp"
#include "../transport/super_recv_packet_handler.hpp"
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/rfnoc/source_node_ctrl.hpp>
#include <boost/format.hpp>
-#include <boost/foreach.hpp>
using namespace uhd::rfnoc;
@@ -48,8 +47,8 @@ void rx_stream_terminator::set_tx_streamer(bool, const size_t)
void rx_stream_terminator::set_rx_streamer(bool active, const size_t)
{
// TODO this is identical to source_node_ctrl::set_rx_streamer() -> factor out
- UHD_RFNOC_BLOCK_TRACE() << "rx_stream_terminator::set_rx_streamer() " << active << std::endl;
- BOOST_FOREACH(const node_ctrl_base::node_map_pair_t upstream_node, _upstream_nodes) {
+ UHD_RFNOC_BLOCK_TRACE() << "rx_stream_terminator::set_rx_streamer() " << active;
+ for(const node_ctrl_base::node_map_pair_t upstream_node: _upstream_nodes) {
source_node_ctrl::sptr curr_upstream_block_ctrl =
boost::dynamic_pointer_cast<source_node_ctrl>(upstream_node.second.lock());
if (curr_upstream_block_ctrl) {
@@ -71,16 +70,16 @@ void rx_stream_terminator::handle_overrun(boost::weak_ptr<uhd::rx_streamer> stre
return;
}
- UHD_RFNOC_BLOCK_TRACE() << "rx_stream_terminator::handle_overrun()" << std::endl;
+ UHD_RFNOC_BLOCK_TRACE() << "rx_stream_terminator::handle_overrun()" ;
boost::shared_ptr<uhd::transport::sph::recv_packet_streamer> my_streamer =
boost::dynamic_pointer_cast<uhd::transport::sph::recv_packet_streamer>(streamer.lock());
if (not my_streamer) return; //If the rx_streamer has expired then overflow handling makes no sense.
bool in_continuous_streaming_mode = true;
int num_channels = 0;
- BOOST_FOREACH(const boost::shared_ptr<uhd::rfnoc::radio_ctrl_impl> &node, upstream_radio_nodes) {
+ for(const boost::shared_ptr<uhd::rfnoc::radio_ctrl_impl> &node: upstream_radio_nodes) {
num_channels += node->get_active_rx_ports().size();
- BOOST_FOREACH(const size_t port, node->get_active_rx_ports()) {
+ for(const size_t port: node->get_active_rx_ports()) {
in_continuous_streaming_mode = in_continuous_streaming_mode && node->in_continuous_streaming_mode(port);
}
}
@@ -101,8 +100,8 @@ void rx_stream_terminator::handle_overrun(boost::weak_ptr<uhd::rx_streamer> stre
/////////////////////////////////////////////////////////////
// MIMO overflow recovery time
/////////////////////////////////////////////////////////////
- BOOST_FOREACH(const boost::shared_ptr<uhd::rfnoc::radio_ctrl_impl> &node, upstream_radio_nodes) {
- BOOST_FOREACH(const size_t port, node->get_active_rx_ports()) {
+ for(const boost::shared_ptr<uhd::rfnoc::radio_ctrl_impl> &node: upstream_radio_nodes) {
+ for(const size_t port: node->get_active_rx_ports()) {
// check all the ports on all the radios
node->rx_ctrl_clear_cmds(port);
node->issue_stream_cmd(stream_cmd_t::STREAM_MODE_STOP_CONTINUOUS, port);
@@ -116,8 +115,8 @@ void rx_stream_terminator::handle_overrun(boost::weak_ptr<uhd::rx_streamer> stre
stream_cmd.stream_now = false;
stream_cmd.time_spec = upstream_radio_nodes[0]->get_time_now() + time_spec_t(0.05);
- BOOST_FOREACH(const boost::shared_ptr<uhd::rfnoc::radio_ctrl_impl> &node, upstream_radio_nodes) {
- BOOST_FOREACH(const size_t port, node->get_active_rx_ports()) {
+ for(const boost::shared_ptr<uhd::rfnoc::radio_ctrl_impl> &node: upstream_radio_nodes) {
+ for(const size_t port: node->get_active_rx_ports()) {
node->issue_stream_cmd(stream_cmd, port);
}
}
@@ -126,7 +125,7 @@ void rx_stream_terminator::handle_overrun(boost::weak_ptr<uhd::rx_streamer> stre
rx_stream_terminator::~rx_stream_terminator()
{
- UHD_RFNOC_BLOCK_TRACE() << "rx_stream_terminator::~rx_stream_terminator() " << std::endl;
+ UHD_RFNOC_BLOCK_TRACE() << "rx_stream_terminator::~rx_stream_terminator() " ;
set_rx_streamer(false, 0);
}
diff --git a/host/lib/rfnoc/sink_block_ctrl_base.cpp b/host/lib/rfnoc/sink_block_ctrl_base.cpp
index 55d502ede..48291e02f 100644
--- a/host/lib/rfnoc/sink_block_ctrl_base.cpp
+++ b/host/lib/rfnoc/sink_block_ctrl_base.cpp
@@ -18,7 +18,7 @@
#include "utils.hpp"
#include <uhd/rfnoc/sink_block_ctrl_base.hpp>
#include <uhd/rfnoc/constants.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
using namespace uhd;
using namespace uhd::rfnoc;
@@ -45,7 +45,7 @@ std::vector<size_t> sink_block_ctrl_base::get_input_ports() const
{
std::vector<size_t> input_ports;
input_ports.reserve(_tree->list(_root_path / "ports" / "in").size());
- BOOST_FOREACH(const std::string port, _tree->list(_root_path / "ports" / "in")) {
+ for(const std::string port: _tree->list(_root_path / "ports" / "in")) {
input_ports.push_back(boost::lexical_cast<size_t>(port));
}
return input_ports;
@@ -66,7 +66,7 @@ void sink_block_ctrl_base::configure_flow_control_in(
size_t packets,
size_t block_port
) {
- UHD_RFNOC_BLOCK_TRACE() << boost::format("sink_block_ctrl_base::configure_flow_control_in(cycles=%d, packets=%d)") % cycles % packets << std::endl;
+ UHD_RFNOC_BLOCK_TRACE() << boost::format("sink_block_ctrl_base::configure_flow_control_in(cycles=%d, packets=%d)") % cycles % packets ;
uint32_t cycles_word = 0;
if (cycles) {
cycles_word = (1<<31) | cycles;
diff --git a/host/lib/rfnoc/sink_node_ctrl.cpp b/host/lib/rfnoc/sink_node_ctrl.cpp
index 8398641fd..c570b8eca 100644
--- a/host/lib/rfnoc/sink_node_ctrl.cpp
+++ b/host/lib/rfnoc/sink_node_ctrl.cpp
@@ -16,7 +16,7 @@
//
#include "utils.hpp"
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/rfnoc/sink_node_ctrl.hpp>
#include <uhd/rfnoc/source_node_ctrl.hpp>
@@ -35,10 +35,10 @@ size_t sink_node_ctrl::connect_upstream(
void sink_node_ctrl::set_tx_streamer(bool active, const size_t port)
{
- UHD_RFNOC_BLOCK_TRACE() << "sink_node_ctrl::set_tx_streamer() " << active << " " << port << std::endl;
+ UHD_RFNOC_BLOCK_TRACE() << "sink_node_ctrl::set_tx_streamer() " << active << " " << port ;
/* Enable all downstream connections:
- BOOST_FOREACH(const node_ctrl_base::node_map_pair_t downstream_node, list_downstream_nodes()) {
+ for(const node_ctrl_base::node_map_pair_t downstream_node: list_downstream_nodes()) {
sptr curr_downstream_block_ctrl =
boost::dynamic_pointer_cast<sink_node_ctrl>(downstream_node.second.lock());
if (curr_downstream_block_ctrl) {
diff --git a/host/lib/rfnoc/source_block_ctrl_base.cpp b/host/lib/rfnoc/source_block_ctrl_base.cpp
index 94e271541..4cc889545 100644
--- a/host/lib/rfnoc/source_block_ctrl_base.cpp
+++ b/host/lib/rfnoc/source_block_ctrl_base.cpp
@@ -17,7 +17,7 @@
#include "utils.hpp"
#include <uhd/rfnoc/source_block_ctrl_base.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/rfnoc/constants.hpp>
using namespace uhd;
@@ -30,13 +30,13 @@ void source_block_ctrl_base::issue_stream_cmd(
const uhd::stream_cmd_t &stream_cmd,
const size_t chan
) {
- UHD_RFNOC_BLOCK_TRACE() << "source_block_ctrl_base::issue_stream_cmd()" << std::endl;
+ UHD_RFNOC_BLOCK_TRACE() << "source_block_ctrl_base::issue_stream_cmd()" ;
if (_upstream_nodes.empty()) {
- UHD_MSG(warning) << "issue_stream_cmd() not implemented for " << get_block_id() << std::endl;
+ UHD_LOGGER_WARNING("RFNOC") << "issue_stream_cmd() not implemented for " << get_block_id() ;
return;
}
- BOOST_FOREACH(const node_ctrl_base::node_map_pair_t upstream_node, _upstream_nodes) {
+ for(const node_ctrl_base::node_map_pair_t upstream_node: _upstream_nodes) {
source_node_ctrl::sptr this_upstream_block_ctrl =
boost::dynamic_pointer_cast<source_node_ctrl>(upstream_node.second.lock());
this_upstream_block_ctrl->issue_stream_cmd(stream_cmd, chan);
@@ -64,7 +64,7 @@ std::vector<size_t> source_block_ctrl_base::get_output_ports() const
{
std::vector<size_t> output_ports;
output_ports.reserve(_tree->list(_root_path / "ports" / "out").size());
- BOOST_FOREACH(const std::string port, _tree->list(_root_path / "ports" / "out")) {
+ for(const std::string port: _tree->list(_root_path / "ports" / "out")) {
output_ports.push_back(boost::lexical_cast<size_t>(port));
}
return output_ports;
@@ -77,10 +77,10 @@ void source_block_ctrl_base::set_destination(
uint32_t next_address,
size_t output_block_port
) {
- UHD_RFNOC_BLOCK_TRACE() << "source_block_ctrl_base::set_destination() " << uhd::sid_t(next_address) << std::endl;
+ UHD_RFNOC_BLOCK_TRACE() << "source_block_ctrl_base::set_destination() " << uhd::sid_t(next_address) ;
sid_t new_sid(next_address);
new_sid.set_src(get_address(output_block_port));
- UHD_RFNOC_BLOCK_TRACE() << " Setting SID: " << new_sid << std::endl << " ";
+ UHD_RFNOC_BLOCK_TRACE() << " Setting SID: " << new_sid << " ";
sr_write(SR_NEXT_DST_SID, (1<<16) | next_address, output_block_port);
}
@@ -89,7 +89,7 @@ void source_block_ctrl_base::configure_flow_control_out(
size_t block_port,
UHD_UNUSED(const uhd::sid_t &sid)
) {
- UHD_RFNOC_BLOCK_TRACE() << "source_block_ctrl_base::configure_flow_control_out() buf_size_pkts==" << buf_size_pkts << std::endl;
+ UHD_RFNOC_BLOCK_TRACE() << "source_block_ctrl_base::configure_flow_control_out() buf_size_pkts==" << buf_size_pkts ;
if (buf_size_pkts < 2) {
throw uhd::runtime_error(str(
boost::format("Invalid window size %d for block %s. Window size must at least be 2.")
diff --git a/host/lib/rfnoc/source_node_ctrl.cpp b/host/lib/rfnoc/source_node_ctrl.cpp
index c97c72354..1b9d427b9 100644
--- a/host/lib/rfnoc/source_node_ctrl.cpp
+++ b/host/lib/rfnoc/source_node_ctrl.cpp
@@ -16,7 +16,7 @@
//
#include "utils.hpp"
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/rfnoc/source_node_ctrl.hpp>
#include <uhd/rfnoc/sink_node_ctrl.hpp>
@@ -35,10 +35,10 @@ size_t source_node_ctrl::connect_downstream(
void source_node_ctrl::set_rx_streamer(bool active, const size_t port)
{
- UHD_RFNOC_BLOCK_TRACE() << "source_node_ctrl::set_rx_streamer() " << port << " -> " << active << std::endl;
+ UHD_RFNOC_BLOCK_TRACE() << "source_node_ctrl::set_rx_streamer() " << port << " -> " << active ;
/* This will enable all upstream blocks:
- BOOST_FOREACH(const node_ctrl_base::node_map_pair_t upstream_node, list_upstream_nodes()) {
+ for(const node_ctrl_base::node_map_pair_t upstream_node: list_upstream_nodes()) {
sptr curr_upstream_block_ctrl =
boost::dynamic_pointer_cast<source_node_ctrl>(upstream_node.second.lock());
if (curr_upstream_block_ctrl) {
diff --git a/host/lib/rfnoc/tick_node_ctrl.cpp b/host/lib/rfnoc/tick_node_ctrl.cpp
index 5548194ae..625771945 100644
--- a/host/lib/rfnoc/tick_node_ctrl.cpp
+++ b/host/lib/rfnoc/tick_node_ctrl.cpp
@@ -47,7 +47,7 @@ double tick_node_ctrl::get_tick_rate(
);
} // neighbouring_tick_nodes is now initialized
double ret_val = RATE_UNDEFINED;
- BOOST_FOREACH(const sptr &node, neighbouring_tick_nodes) {
+ for(const sptr &node: neighbouring_tick_nodes) {
if (_explored_nodes.count(node)) {
continue;
}
diff --git a/host/lib/rfnoc/tx_stream_terminator.cpp b/host/lib/rfnoc/tx_stream_terminator.cpp
index ee856843d..1d2653d47 100644
--- a/host/lib/rfnoc/tx_stream_terminator.cpp
+++ b/host/lib/rfnoc/tx_stream_terminator.cpp
@@ -44,8 +44,8 @@ void tx_stream_terminator::set_rx_streamer(bool, const size_t)
void tx_stream_terminator::set_tx_streamer(bool active, const size_t /* port */)
{
// TODO this is identical to sink_node_ctrl::set_tx_streamer() -> factor out
- UHD_RFNOC_BLOCK_TRACE() << "tx_stream_terminator::set_tx_streamer() " << active << std::endl;
- BOOST_FOREACH(const node_ctrl_base::node_map_pair_t downstream_node, _downstream_nodes) {
+ UHD_RFNOC_BLOCK_TRACE() << "tx_stream_terminator::set_tx_streamer() " << active;
+ for(const node_ctrl_base::node_map_pair_t downstream_node: _downstream_nodes) {
sink_node_ctrl::sptr curr_downstream_block_ctrl =
boost::dynamic_pointer_cast<sink_node_ctrl>(downstream_node.second.lock());
if (curr_downstream_block_ctrl) {
@@ -61,7 +61,7 @@ void tx_stream_terminator::set_tx_streamer(bool active, const size_t /* port */)
tx_stream_terminator::~tx_stream_terminator()
{
- UHD_RFNOC_BLOCK_TRACE() << "tx_stream_terminator::~tx_stream_terminator() " << std::endl;
+ UHD_RFNOC_BLOCK_TRACE() << "tx_stream_terminator::~tx_stream_terminator() " ;
set_tx_streamer(false, 0);
}
diff --git a/host/lib/rfnoc/tx_stream_terminator.hpp b/host/lib/rfnoc/tx_stream_terminator.hpp
index 169d7cd6a..dd2b991f6 100644
--- a/host/lib/rfnoc/tx_stream_terminator.hpp
+++ b/host/lib/rfnoc/tx_stream_terminator.hpp
@@ -24,7 +24,7 @@
#include <uhd/rfnoc/scalar_node_ctrl.hpp>
#include <uhd/rfnoc/terminator_node_ctrl.hpp>
#include <uhd/rfnoc/block_ctrl_base.hpp> // For the block macros
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
namespace uhd {
namespace rfnoc {
@@ -51,7 +51,7 @@ public:
void issue_stream_cmd(const uhd::stream_cmd_t &, const size_t)
{
- UHD_RFNOC_BLOCK_TRACE() << "tx_stream_terminator::issue_stream_cmd()" << std::endl;
+ UHD_RFNOC_BLOCK_TRACE() << "tx_stream_terminator::issue_stream_cmd()" ;
}
// If this is called, then by a send terminator at the other end
diff --git a/host/lib/rfnoc/utils.hpp b/host/lib/rfnoc/utils.hpp
index ecd3d7cfb..1ee10397c 100644
--- a/host/lib/rfnoc/utils.hpp
+++ b/host/lib/rfnoc/utils.hpp
@@ -47,7 +47,7 @@ namespace uhd { namespace rfnoc { namespace utils {
port++;
}
} else {
- BOOST_FOREACH(const size_t allowed_port, allowed_ports) {
+ for(const size_t allowed_port: allowed_ports) {
if (not nodes.count(port)) {
return allowed_port;
}
@@ -65,7 +65,7 @@ namespace uhd { namespace rfnoc { namespace utils {
template <typename T>
static std::set<T> str_list_to_set(const std::vector<std::string> &list) {
std::set<T> return_set;
- BOOST_FOREACH(const std::string &S, list) {
+ for(const std::string &S: list) {
return_set.insert(boost::lexical_cast<T>(S));
}
return return_set;
diff --git a/host/lib/rfnoc/xports.hpp b/host/lib/rfnoc/xports.hpp
index 98cf71b6d..7bd4f2fc8 100644
--- a/host/lib/rfnoc/xports.hpp
+++ b/host/lib/rfnoc/xports.hpp
@@ -16,6 +16,7 @@
//
#include <uhd/types/sid.hpp>
+#include <uhd/types/endianness.hpp>
#include <uhd/transport/zero_copy.hpp>
namespace uhd {
@@ -31,6 +32,7 @@ namespace uhd {
size_t send_buff_size;
uhd::sid_t send_sid;
uhd::sid_t recv_sid;
+ uhd::endianness_t endianness;
};
};
diff --git a/host/lib/transport/if_addrs.cpp b/host/lib/transport/if_addrs.cpp
index 3b8be5dd5..37b41f303 100644
--- a/host/lib/transport/if_addrs.cpp
+++ b/host/lib/transport/if_addrs.cpp
@@ -36,9 +36,9 @@ std::vector<uhd::transport::if_addrs_t> uhd::transport::get_if_addrs(void){
std::vector<if_addrs_t> if_addrs;
struct ifaddrs *ifap;
if (getifaddrs(&ifap) == 0){
- for (struct ifaddrs *iter = ifap; iter != NULL; iter = iter->ifa_next){
+ for (struct ifaddrs *iter = ifap; iter != nullptr; iter = iter->ifa_next){
//ensure that the entries are valid
- if (iter->ifa_addr == NULL) continue;
+ if (iter->ifa_addr == nullptr) continue;
if (iter->ifa_addr->sa_family != AF_INET) continue;
if (iter->ifa_netmask->sa_family != AF_INET) continue;
if (iter->ifa_broadaddr->sa_family != AF_INET) continue;
diff --git a/host/lib/transport/libusb1_base.cpp b/host/lib/transport/libusb1_base.cpp
index d04b679f8..24c9e3471 100644
--- a/host/lib/transport/libusb1_base.cpp
+++ b/host/lib/transport/libusb1_base.cpp
@@ -17,14 +17,13 @@
#include "libusb1_base.hpp"
#include <uhd/exception.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/utils/log.hpp>
#include <uhd/utils/tasks.hpp>
#include <uhd/types/dict.hpp>
#include <uhd/types/serial.hpp>
#include <boost/weak_ptr.hpp>
#include <boost/thread/mutex.hpp>
-#include <boost/foreach.hpp>
#include <boost/bind.hpp>
#include <cstdlib>
#include <iostream>
@@ -77,7 +76,7 @@ private:
case LIBUSB_ERROR_NO_DEVICE:
throw uhd::io_error(libusb_strerror(LIBUSB_ERROR_NO_DEVICE));
default:
- UHD_MSG(error) << __FUNCTION__ << ": " << libusb_strerror((libusb_error)ret) << std::endl;
+ UHD_LOGGER_ERROR("USB") << __FUNCTION__ << ": " << libusb_strerror((libusb_error)ret) ;
break;
}
}
@@ -227,7 +226,7 @@ public:
std::string string_descriptor((char *)buff, size_t(ret));
byte_vector_t string_vec(string_descriptor.begin(), string_descriptor.end());
std::string out;
- BOOST_FOREACH(uint8_t byte, string_vec){
+ for(uint8_t byte: string_vec){
if (byte < 32 or byte > 127) return out;
out += byte;
}
@@ -276,15 +275,15 @@ public:
{
int ret;
ret = libusb_clear_halt(this->get(), recv_endpoint | 0x80);
- UHD_LOG << "usb device handle: recv endpoint clear: " << libusb_error_name(ret) << std::endl;
+ UHD_LOGGER_TRACE("USB") << "usb device handle: recv endpoint clear: " << libusb_error_name(ret) ;
ret = libusb_clear_halt(this->get(), send_endpoint | 0x00);
- UHD_LOG << "usb device handle: send endpoint clear: " << libusb_error_name(ret) << std::endl;
+ UHD_LOGGER_TRACE("USB") << "usb device handle: send endpoint clear: " << libusb_error_name(ret) ;
}
void reset_device(void)
{
int ret = libusb_reset_device(this->get());
- UHD_LOG << "usb device handle: dev Reset: " << libusb_error_name(ret) << std::endl;
+ UHD_LOGGER_TRACE("USB") << "usb device handle: dev Reset: " << libusb_error_name(ret) ;
}
private:
@@ -321,12 +320,12 @@ libusb::device_handle::sptr libusb::device_handle::get_cached_handle(device::spt
}
catch(const uhd::exception &){
#ifdef UHD_PLATFORM_LINUX
- UHD_MSG(error) <<
+ UHD_LOGGER_ERROR("USB") <<
"USB open failed: insufficient permissions.\n"
"See the application notes for your device.\n"
- << std::endl;
+ ;
#else
- UHD_LOG << "USB open failed: device already claimed." << std::endl;
+ UHD_LOGGER_DEBUG("USB") << "USB open failed: device already claimed." ;
#endif
throw;
}
diff --git a/host/lib/transport/libusb1_zero_copy.cpp b/host/lib/transport/libusb1_zero_copy.cpp
index c32b96b63..39666af94 100644
--- a/host/lib/transport/libusb1_zero_copy.cpp
+++ b/host/lib/transport/libusb1_zero_copy.cpp
@@ -19,9 +19,8 @@
#include <uhd/transport/usb_zero_copy.hpp>
#include <uhd/transport/buffer_pool.hpp>
#include <uhd/transport/bounded_buffer.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/exception.hpp>
-#include <boost/foreach.hpp>
#include <boost/format.hpp>
#include <boost/function.hpp>
#include <boost/bind.hpp>
@@ -274,19 +273,19 @@ public:
~libusb_zero_copy_single(void)
{
//cancel all transfers
- BOOST_FOREACH(libusb_transfer *lut, _all_luts)
+ for(libusb_transfer *lut: _all_luts)
{
libusb_cancel_transfer(lut);
}
//process all transfers until timeout occurs
- BOOST_FOREACH(libusb_zero_copy_mb *mb, _enqueued)
+ for(libusb_zero_copy_mb *mb: _enqueued)
{
mb->wait_for_completion(0.01);
}
//free all transfers
- BOOST_FOREACH(libusb_transfer *lut, _all_luts)
+ for(libusb_transfer *lut: _all_luts)
{
libusb_free_transfer(lut);
}
diff --git a/host/lib/transport/nirio/rpc/rpc_client.cpp b/host/lib/transport/nirio/rpc/rpc_client.cpp
index 7beaf9974..96f98ec20 100644
--- a/host/lib/transport/nirio/rpc/rpc_client.cpp
+++ b/host/lib/transport/nirio/rpc/rpc_client.cpp
@@ -25,7 +25,7 @@
if (status) { \
status = (static_cast<size_t>((func)) == exp); \
} else { \
- UHD_LOG << "rpc_client operation skipped: " #func "\n"; \
+ UHD_LOGGER_DEBUG("NIRIO") << "rpc_client operation skipped: " #func "\n"; \
} \
namespace uhd { namespace usrprio_rpc {
@@ -57,7 +57,7 @@ rpc_client::rpc_client (
tcp::resolver::iterator iterator = resolver.resolve(query);
boost::asio::connect(_socket, iterator);
- UHD_LOG << "rpc_client connected to server." << std::endl;
+ UHD_LOGGER_TRACE("NIRIO") << "rpc_client connected to server." ;
try {
//Perform handshake
@@ -75,24 +75,24 @@ rpc_client::rpc_client (
_hshake_args_client.version >= _hshake_args_server.oldest_comp_version &&
status)
{
- UHD_LOG << "rpc_client bound to server." << std::endl;
+ UHD_LOGGER_TRACE("NIRIO") << "rpc_client bound to server." ;
_wait_for_next_response_header();
//Spawn a thread for the io_service callback handler. This thread will run until rpc_client is destroyed.
_io_service_thread.reset(new boost::thread(boost::bind(&boost::asio::io_service::run, &_io_service)));
} else {
- UHD_LOG << "rpc_client handshake failed." << std::endl;
+ UHD_LOGGER_DEBUG("NIRIO") << "rpc_client handshake failed." ;
_exec_err.assign(boost::asio::error::connection_refused, boost::asio::error::get_system_category());
}
- UHD_LOG << boost::format("rpc_client archive = %d, rpc_server archive = %d\n.") %
+ UHD_LOGGER_TRACE("NIRIO") << boost::format("rpc_client archive = %d, rpc_server archive = %d\n.") %
_hshake_args_client.boost_archive_version %
_hshake_args_server.boost_archive_version;
} catch (boost::exception&) {
- UHD_LOG << "rpc_client handshake aborted." << std::endl;
+ UHD_LOGGER_DEBUG("NIRIO") << "rpc_client handshake aborted." ;
_exec_err.assign(boost::asio::error::connection_refused, boost::asio::error::get_system_category());
}
} catch (boost::exception&) {
- UHD_LOG << "rpc_client connection request cancelled/aborted." << std::endl;
+ UHD_LOGGER_DEBUG("NIRIO") << "rpc_client connection request cancelled/aborted." ;
_exec_err.assign(boost::asio::error::connection_aborted, boost::asio::error::get_system_category());
}
}
@@ -136,18 +136,18 @@ const boost::system::error_code& rpc_client::call(
//Wait for response using condition variable
if (status) {
if (!_exec_gate.timed_wait(lock, timeout)) {
- UHD_LOG << "rpc_client function timed out." << std::endl;
+ UHD_LOGGER_DEBUG("NIRIO") << "rpc_client function timed out." ;
_exec_err.assign(boost::asio::error::timed_out, boost::asio::error::get_system_category());
}
} else {
- UHD_LOG << "rpc_client connection dropped." << std::endl;
+ UHD_LOGGER_DEBUG("NIRIO") << "rpc_client connection dropped." ;
_exec_err.assign(boost::asio::error::connection_aborted, boost::asio::error::get_system_category());
_stop_io_service();
}
//Verify that we are talking to the correct endpoint
if ((_request.header.client_id != _response.header.client_id) && !_exec_err) {
- UHD_LOG << "rpc_client confused about who its talking to." << std::endl;
+ UHD_LOGGER_DEBUG("NIRIO") << "rpc_client confused about who its talking to." ;
_exec_err.assign(boost::asio::error::operation_aborted, boost::asio::error::get_system_category());
}
@@ -180,7 +180,7 @@ void rpc_client::_handle_response_hdr(const boost::system::error_code& err, size
}
} else {
//Unexpected response. Ignore it.
- UHD_LOG << "rpc_client received garbage responses." << std::endl;
+ UHD_LOGGER_DEBUG("NIRIO") << "rpc_client received garbage responses." ;
_exec_err.assign(boost::asio::error::operation_aborted, boost::asio::error::get_system_category());
_wait_for_next_response_header();
diff --git a/host/lib/transport/nirio_zero_copy.cpp b/host/lib/transport/nirio_zero_copy.cpp
index ae32727d7..0635e01cf 100644
--- a/host/lib/transport/nirio_zero_copy.cpp
+++ b/host/lib/transport/nirio_zero_copy.cpp
@@ -19,7 +19,7 @@
#include <stdio.h>
#include <uhd/transport/nirio/nirio_fifo.h>
#include <uhd/transport/buffer_pool.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/utils/log.hpp>
#include <uhd/utils/atomic.hpp>
#include <boost/format.hpp>
@@ -149,11 +149,11 @@ public:
_xport_params(xport_params),
_next_recv_buff_index(0), _next_send_buff_index(0)
{
- UHD_LOG << boost::format("Creating PCIe transport for channel %d") % instance << std::endl;
- UHD_LOG << boost::format("nirio zero-copy RX transport configured with frame size = %u, #frames = %u, buffer size = %u\n")
+ UHD_LOGGER_TRACE("NIRIO") << boost::format("Creating PCIe transport for channel %d") % instance ;
+ UHD_LOGGER_TRACE("NIRIO") << boost::format("nirio zero-copy RX transport configured with frame size = %u, #frames = %u, buffer size = %u\n")
% _xport_params.recv_frame_size % _xport_params.num_recv_frames %
(_xport_params.recv_frame_size * _xport_params.num_recv_frames);
- UHD_LOG << boost::format("nirio zero-copy TX transport configured with frame size = %u, #frames = %u, buffer size = %u\n")
+ UHD_LOGGER_TRACE("NIRIO") << boost::format("nirio zero-copy TX transport configured with frame size = %u, #frames = %u, buffer size = %u\n")
% _xport_params.send_frame_size % _xport_params.num_send_frames % (_xport_params.send_frame_size * _xport_params.num_send_frames);
_recv_buffer_pool = buffer_pool::make(_xport_params.num_recv_frames, _xport_params.recv_frame_size);
diff --git a/host/lib/transport/super_recv_packet_handler.hpp b/host/lib/transport/super_recv_packet_handler.hpp
index d34f222e8..4abc73398 100644
--- a/host/lib/transport/super_recv_packet_handler.hpp
+++ b/host/lib/transport/super_recv_packet_handler.hpp
@@ -23,14 +23,13 @@
#include <uhd/exception.hpp>
#include <uhd/convert.hpp>
#include <uhd/stream.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/tasks.hpp>
#include <uhd/utils/byteswap.hpp>
#include <uhd/types/metadata.hpp>
#include <uhd/transport/vrt_if_packet.hpp>
#include <uhd/transport/zero_copy.hpp>
#include <boost/dynamic_bitset.hpp>
-#include <boost/foreach.hpp>
#include <boost/function.hpp>
#include <boost/format.hpp>
#include <boost/bind.hpp>
@@ -333,9 +332,9 @@ private:
void reset()
{
buff.reset();
- vrt_hdr = NULL;
+ vrt_hdr = nullptr;
time = time_spec_t(0.0);
- copy_buff = NULL;
+ copy_buff = nullptr;
}
managed_recv_buffer::sptr buff;
const uint32_t *vrt_hdr;
@@ -410,7 +409,7 @@ private:
//get a single packet from the transport layer
managed_recv_buffer::sptr &buff = curr_buffer_info.buff;
buff = _props[index].get_buff(timeout);
- if (buff.get() == NULL) return PACKET_TIMEOUT_ERROR;
+ if (buff.get() == nullptr) return PACKET_TIMEOUT_ERROR;
#ifdef ERROR_INJECT_DROPPED_PACKETS
if (++recvd_packets > 1000)
@@ -418,7 +417,7 @@ private:
recvd_packets = 0;
buff.reset();
buff = _props[index].get_buff(timeout);
- if (buff.get() == NULL) return PACKET_TIMEOUT_ERROR;
+ if (buff.get() == nullptr) return PACKET_TIMEOUT_ERROR;
}
#endif
@@ -461,7 +460,7 @@ private:
const size_t expected_packet_count = _props[index].packet_count;
_props[index].packet_count = (info.ifpi.packet_count + 1) & seq_mask;
if (expected_packet_count != info.ifpi.packet_count){
- //UHD_MSG(status) << "expected: " << expected_packet_count << " got: " << info.ifpi.packet_count << std::endl;
+ //UHD_LOGGER_INFO("STREAMER") << "expected: " << expected_packet_count << " got: " << info.ifpi.packet_count;
if (_props[index].handle_flowctrl) {
// Always update flow control in this case, because we don't
// know which packet was dropped and what state the upstream
@@ -574,9 +573,9 @@ private:
//handle the case where a bad header exists
catch(const uhd::value_error &e){
- UHD_MSG(error) << boost::format(
+ UHD_LOGGER_ERROR("STREAMER") << boost::format(
"The receive packet handler caught a value exception.\n%s"
- ) % e.what() << std::endl;
+ ) % e.what();
std::swap(curr_info, next_info); //save progress from curr -> next
curr_info.metadata.error_code = rx_metadata_t::ERROR_CODE_BAD_PACKET;
return;
@@ -614,10 +613,10 @@ private:
rx_metadata_t metadata = curr_info.metadata;
_props[index].handle_overflow();
curr_info.metadata = metadata;
- UHD_MSG(fastpath) << "O";
+ UHD_LOG_FASTPATH("O")
}
curr_info[index].buff.reset();
- curr_info[index].copy_buff = NULL;
+ curr_info[index].copy_buff = nullptr;
return;
case PACKET_TIMEOUT_ERROR:
@@ -636,18 +635,18 @@ private:
prev_info[index].ifpi.num_payload_words32*sizeof(uint32_t)/_bytes_per_otw_item, _samp_rate);
curr_info.metadata.out_of_sequence = true;
curr_info.metadata.error_code = rx_metadata_t::ERROR_CODE_OVERFLOW;
- UHD_MSG(fastpath) << "D";
+ UHD_LOG_FASTPATH("D")
return;
}
//too many iterations: detect alignment failure
if (iterations++ > _alignment_failure_threshold){
- UHD_MSG(error) << boost::format(
- "The receive packet handler failed to time-align packets.\n"
- "%u received packets were processed by the handler.\n"
- "However, a timestamp match could not be determined.\n"
- ) % iterations << std::endl;
+ UHD_LOGGER_ERROR("STREAMER") << boost::format(
+ "The receive packet handler failed to time-align packets. "
+ "%u received packets were processed by the handler. "
+ "However, a timestamp match could not be determined."
+ ) % iterations;
std::swap(curr_info, next_info); //save progress from curr -> next
curr_info.metadata.error_code = rx_metadata_t::ERROR_CODE_ALIGNMENT;
_props[index].handle_overflow();
diff --git a/host/lib/transport/super_send_packet_handler.hpp b/host/lib/transport/super_send_packet_handler.hpp
index 0acc8df4b..12c053efb 100644
--- a/host/lib/transport/super_send_packet_handler.hpp
+++ b/host/lib/transport/super_send_packet_handler.hpp
@@ -23,7 +23,7 @@
#include <uhd/exception.hpp>
#include <uhd/convert.hpp>
#include <uhd/stream.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/tasks.hpp>
#include <uhd/utils/byteswap.hpp>
#include <uhd/types/metadata.hpp>
@@ -31,7 +31,6 @@
#include <uhd/transport/zero_copy.hpp>
#include <boost/thread/thread.hpp>
#include <boost/thread/thread_time.hpp>
-#include <boost/foreach.hpp>
#include <boost/function.hpp>
#include <iostream>
#include <vector>
diff --git a/host/lib/transport/tcp_zero_copy.cpp b/host/lib/transport/tcp_zero_copy.cpp
index f2ae51695..dc1848b00 100644
--- a/host/lib/transport/tcp_zero_copy.cpp
+++ b/host/lib/transport/tcp_zero_copy.cpp
@@ -18,7 +18,7 @@
#include "udp_common.hpp"
#include <uhd/transport/tcp_zero_copy.hpp>
#include <uhd/transport/buffer_pool.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/utils/log.hpp>
#include <uhd/utils/atomic.hpp>
#include <boost/format.hpp>
@@ -144,7 +144,7 @@ public:
_send_buffer_pool(buffer_pool::make(_num_send_frames, _send_frame_size)),
_next_recv_buff_index(0), _next_send_buff_index(0)
{
- UHD_LOG << boost::format("Creating tcp transport for %s %s") % addr % port << std::endl;
+ UHD_LOGGER_TRACE("TCP") << boost::format("Creating tcp transport for %s %s") % addr % port ;
//resolve the address
asio::ip::tcp::resolver resolver(_io_service);
diff --git a/host/lib/transport/udp_common.hpp b/host/lib/transport/udp_common.hpp
index 3ee568432..d6e73cc5f 100644
--- a/host/lib/transport/udp_common.hpp
+++ b/host/lib/transport/udp_common.hpp
@@ -35,6 +35,7 @@ namespace uhd{ namespace transport{
* \return true when the socket is ready for receive
*/
UHD_INLINE bool wait_for_recv_ready(int sock_fd, double timeout){
+#ifdef UHD_PLATFORM_WIN32 // select is more portable than poll unfortunately
//setup timeval for timeout
timeval tv;
//If the tv_usec > 1 second on some platforms, select will
@@ -56,6 +57,17 @@ namespace uhd{ namespace transport{
//call select with timeout on receive socket
return TEMP_FAILURE_RETRY(::select(sock_fd+1, &rset, NULL, NULL, &tv)) > 0;
+#else
+ //calculate the total timeout in milliseconds (from seconds)
+ int total_timeout = int(timeout*1000);
+
+ pollfd pfd_read;
+ pfd_read.fd = sock_fd;
+ pfd_read.events = POLLIN;
+
+ //call poll with timeout on receive socket
+ return ::poll(&pfd_read, 1, total_timeout) > 0;
+#endif
}
}} //namespace uhd::transport
diff --git a/host/lib/transport/udp_simple.cpp b/host/lib/transport/udp_simple.cpp
index 0a93941b8..75862d1a9 100644
--- a/host/lib/transport/udp_simple.cpp
+++ b/host/lib/transport/udp_simple.cpp
@@ -31,7 +31,7 @@ public:
udp_simple_impl(
const std::string &addr, const std::string &port, bool bcast, bool connect
):_connected(connect){
- UHD_LOG << boost::format("Creating udp transport for %s %s") % addr % port << std::endl;
+ UHD_LOGGER_TRACE("UDP") << boost::format("Creating udp transport for %s %s") % addr % port ;
//resolve the address
asio::ip::udp::resolver resolver(_io_service);
@@ -64,6 +64,10 @@ public:
return _recv_endpoint.address().to_string();
}
+ std::string get_send_addr(void){
+ return _send_endpoint.address().to_string();
+ }
+
private:
bool _connected;
asio::io_service _io_service;
diff --git a/host/lib/transport/udp_wsa_zero_copy.cpp b/host/lib/transport/udp_wsa_zero_copy.cpp
index 52382f84d..66af394a9 100644
--- a/host/lib/transport/udp_wsa_zero_copy.cpp
+++ b/host/lib/transport/udp_wsa_zero_copy.cpp
@@ -19,7 +19,7 @@
#include <uhd/transport/udp_zero_copy.hpp>
#include <uhd/transport/udp_simple.hpp> //mtu
#include <uhd/transport/buffer_pool.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/utils/log.hpp>
#include <boost/format.hpp>
#include <vector>
@@ -47,11 +47,11 @@ static void check_registry_for_fast_send_threshold(const size_t mtu){
reg_key.Open(HKEY_LOCAL_MACHINE, "System\\CurrentControlSet\\Services\\AFD\\Parameters", KEY_READ) != ERROR_SUCCESS or
reg_key.QueryDWORDValue("FastSendDatagramThreshold", threshold) != ERROR_SUCCESS or threshold < mtu
){
- UHD_MSG(warning) << boost::format(
+ UHD_LOGGER_WARNING("UDP") << boost::format(
"The MTU (%d) is larger than the FastSendDatagramThreshold (%d)!\n"
"This will negatively affect the transmit performance.\n"
"See the transport application notes for more detail.\n"
- ) % mtu % threshold << std::endl;
+ ) % mtu % threshold ;
warned = true;
}
reg_key.Close();
@@ -197,7 +197,7 @@ public:
check_registry_for_fast_send_threshold(this->get_send_frame_size());
#endif /*CHECK_REG_SEND_THRESH*/
- UHD_MSG(status) << boost::format("Creating WSA UDP transport for %s:%s") % addr % port << std::endl;
+ UHD_LOGGER_INFO("UDP") << boost::format("Creating WSA UDP transport for %s:%s") % addr % port ;
static uhd_wsa_control uhd_wsa; //makes wsa start happen via lazy initialization
UHD_ASSERT_THROW(_num_send_frames <= WSA_MAXIMUM_WAIT_EVENTS);
@@ -275,6 +275,16 @@ public:
size_t get_num_send_frames(void) const {return _num_send_frames;}
size_t get_send_frame_size(void) const {return _send_frame_size;}
+ uint16_t get_local_port(void) const {
+ struct sockaddr_in addr_info;
+ int addr_len = sizeof(addr_info);
+ uint16_t local_port = 0;
+ if (getsockname( _sock_fd, (SOCKADDR*) &addr_info,
+ &addr_len) == 0){
+ local_port = ntohs(addr_info.sin_port);
+ }
+ return local_port;
+ }
//! Read back the socket's buffer space reserved for receives
size_t get_recv_buff_size(void) {
@@ -327,11 +337,11 @@ void check_usr_buff_size(
size_t user_buff_size, // Set this to zero for no user-defined preference
const std::string tx_rx
){
- UHD_LOG << boost::format(
+ UHD_LOGGER_DEBUG("UDP") << boost::format(
"Target %s sock buff size: %d bytes\n"
"Actual %s sock buff size: %d bytes"
- ) % tx_rx % user_buff_size % tx_rx % actual_buff_size << std::endl;
- if ((user_buff_size != 0.0) and (actual_buff_size < user_buff_size)) UHD_MSG(warning) << boost::format(
+ ) % tx_rx % user_buff_size % tx_rx % actual_buff_size ;
+ if ((user_buff_size != 0.0) and (actual_buff_size < user_buff_size)) UHD_LOGGER_WARNING("UDP") << boost::format(
"The %s buffer could not be resized sufficiently.\n"
"Target sock buff size: %d bytes.\n"
"Actual sock buff size: %d bytes.\n"
diff --git a/host/lib/transport/udp_zero_copy.cpp b/host/lib/transport/udp_zero_copy.cpp
index 63044c1e3..814f41500 100644
--- a/host/lib/transport/udp_zero_copy.cpp
+++ b/host/lib/transport/udp_zero_copy.cpp
@@ -19,7 +19,7 @@
#include <uhd/transport/udp_zero_copy.hpp>
#include <uhd/transport/udp_simple.hpp> //mtu
#include <uhd/transport/buffer_pool.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/utils/log.hpp>
#include <uhd/utils/atomic.hpp>
#include <boost/format.hpp>
@@ -50,11 +50,11 @@ static void check_registry_for_fast_send_threshold(const size_t mtu){
reg_key.Open(HKEY_LOCAL_MACHINE, "System\\CurrentControlSet\\Services\\AFD\\Parameters", KEY_READ) != ERROR_SUCCESS or
reg_key.QueryDWORDValue("FastSendDatagramThreshold", threshold) != ERROR_SUCCESS or threshold < mtu
){
- UHD_MSG(warning) << boost::format(
+ UHD_LOGGER_WARNING("UDP") << boost::format(
"The MTU (%d) is larger than the FastSendDatagramThreshold (%d)!\n"
"This will negatively affect the transmit performance.\n"
"See the transport application notes for more detail.\n"
- ) % mtu % threshold << std::endl;
+ ) % mtu % threshold ;
warned = true;
}
reg_key.Close();
@@ -175,7 +175,7 @@ public:
_send_buffer_pool(buffer_pool::make(xport_params.num_send_frames, xport_params.send_frame_size)),
_next_recv_buff_index(0), _next_send_buff_index(0)
{
- UHD_LOG << boost::format("Creating udp transport for %s %s") % addr % port << std::endl;
+ UHD_LOGGER_TRACE("UDP") << boost::format("Creating udp transport for %s %s") % addr % port ;
#ifdef CHECK_REG_SEND_THRESH
check_registry_for_fast_send_threshold(this->get_send_frame_size());
@@ -244,6 +244,7 @@ public:
size_t get_num_send_frames(void) const {return _num_send_frames;}
size_t get_send_frame_size(void) const {return _send_frame_size;}
+ uint16_t get_local_port(void) const {return _socket->local_endpoint().port();}
private:
//memory management -> buffers and fifos
@@ -272,18 +273,18 @@ template<typename Opt> static size_t resize_buff_helper(
std::string help_message;
#if defined(UHD_PLATFORM_LINUX)
help_message = str(boost::format(
- "Please run: sudo sysctl -w net.core.%smem_max=%d\n"
+ "Please run: sudo sysctl -w net.core.%smem_max=%d"
) % ((name == "recv")?"r":"w") % target_size);
#endif /*defined(UHD_PLATFORM_LINUX)*/
//resize the buffer if size was provided
if (target_size > 0){
actual_size = udp_trans->resize_buff<Opt>(target_size);
- UHD_LOG << boost::format(
+ UHD_LOGGER_DEBUG("UDP") << boost::format(
"Target %s sock buff size: %d bytes\n"
"Actual %s sock buff size: %d bytes"
- ) % name % target_size % name % actual_size << std::endl;
- if (actual_size < target_size) UHD_MSG(warning) << boost::format(
+ ) % name % target_size % name % actual_size ;
+ if (actual_size < target_size) UHD_LOGGER_WARNING("UDP") << boost::format(
"The %s buffer could not be resized sufficiently.\n"
"Target sock buff size: %d bytes.\n"
"Actual sock buff size: %d bytes.\n"
diff --git a/host/lib/transport/xport_benchmarker.hpp b/host/lib/transport/xport_benchmarker.hpp
index 7383d34f1..ffafbea5a 100644
--- a/host/lib/transport/xport_benchmarker.hpp
+++ b/host/lib/transport/xport_benchmarker.hpp
@@ -20,7 +20,7 @@
#include <uhd/transport/zero_copy.hpp>
#include <uhd/types/device_addr.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <boost/shared_ptr.hpp>
#include <boost/thread/thread.hpp>
#include <uhd/transport/vrt_if_packet.hpp>
diff --git a/host/lib/transport/zero_copy_flow_ctrl.cpp b/host/lib/transport/zero_copy_flow_ctrl.cpp
index 06d7934e2..ef8f5c9d1 100644
--- a/host/lib/transport/zero_copy_flow_ctrl.cpp
+++ b/host/lib/transport/zero_copy_flow_ctrl.cpp
@@ -18,7 +18,6 @@
#include <uhd/transport/zero_copy_flow_ctrl.hpp>
#include <uhd/transport/bounded_buffer.hpp>
#include <uhd/transport/buffer_pool.hpp>
-#include <uhd/utils/msg.hpp>
#include <uhd/utils/log.hpp>
#include <uhd/utils/safe_call.hpp>
#include <boost/format.hpp>
@@ -38,7 +37,7 @@ public:
zero_copy_flow_ctrl_msb(
flow_ctrl_func flow_ctrl
) :
- _mb(NULL),
+ _mb(nullptr),
_flow_ctrl(flow_ctrl)
{
/* NOP */
@@ -76,7 +75,7 @@ public:
zero_copy_flow_ctrl_mrb(
flow_ctrl_func flow_ctrl
) :
- _mb(NULL),
+ _mb(nullptr),
_flow_ctrl(flow_ctrl)
{
/* NOP */
@@ -128,7 +127,7 @@ public:
_send_flow_ctrl(send_flow_ctrl),
_recv_flow_ctrl(recv_flow_ctrl)
{
- UHD_LOG << "Created zero_copy_flow_ctrl" << std::endl;
+ UHD_LOG_TRACE("TRANSPORT", "Created zero_copy_flow_ctrl");
for (size_t i = 0; i < transport->get_num_send_frames(); i++)
{
diff --git a/host/lib/transport/zero_copy_recv_offload.cpp b/host/lib/transport/zero_copy_recv_offload.cpp
index e8b013abc..5de2cd44d 100644
--- a/host/lib/transport/zero_copy_recv_offload.cpp
+++ b/host/lib/transport/zero_copy_recv_offload.cpp
@@ -18,7 +18,7 @@
#include <uhd/transport/zero_copy_recv_offload.hpp>
#include <uhd/transport/bounded_buffer.hpp>
#include <uhd/transport/buffer_pool.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/utils/log.hpp>
#include <uhd/utils/safe_call.hpp>
#include <boost/format.hpp>
@@ -47,7 +47,7 @@ public:
_inbox(transport->get_num_recv_frames()),
_recv_done(false)
{
- UHD_LOG << "Created threaded transport" << std::endl;
+ UHD_LOGGER_TRACE("XPORT") << "Created threaded transport" ;
// Create the receive and send threads to offload
// the system calls onto other threads
diff --git a/host/lib/types/byte_vector.cpp b/host/lib/types/byte_vector.cpp
index 15ae93858..f08b6e088 100644
--- a/host/lib/types/byte_vector.cpp
+++ b/host/lib/types/byte_vector.cpp
@@ -15,7 +15,6 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//
-#include <boost/foreach.hpp>
#include <uhd/types/byte_vector.hpp>
@@ -23,7 +22,7 @@ namespace uhd{
std::string bytes_to_string(const byte_vector_t &bytes){
std::string out;
- BOOST_FOREACH(uint8_t byte, bytes){
+ for(uint8_t byte: bytes){
if (byte < 32 or byte > 127) return out;
out += byte;
}
diff --git a/host/lib/types/device_addr.cpp b/host/lib/types/device_addr.cpp
index 747f61b8d..69380fa26 100644
--- a/host/lib/types/device_addr.cpp
+++ b/host/lib/types/device_addr.cpp
@@ -18,7 +18,6 @@
#include <uhd/types/device_addr.hpp>
#include <boost/algorithm/string.hpp>
#include <boost/tokenizer.hpp>
-#include <boost/foreach.hpp>
#include <boost/format.hpp>
#include <boost/regex.hpp>
#include <stdexcept>
@@ -38,10 +37,10 @@ static std::string trim(const std::string &in){
(inp, boost::char_separator<char>(sep))
device_addr_t::device_addr_t(const std::string &args){
- BOOST_FOREACH(const std::string &pair, tokenizer(args, arg_delim)){
+ for(const std::string &pair: tokenizer(args, arg_delim)){
if (trim(pair) == "") continue;
std::vector<std::string> toks;
- BOOST_FOREACH(const std::string &tok, tokenizer(pair, pair_delim)){
+ for(const std::string &tok: tokenizer(pair, pair_delim)){
toks.push_back(tok);
}
if (toks.size() == 1) toks.push_back(""); //pad empty value
@@ -57,7 +56,7 @@ std::string device_addr_t::to_pp_string(void) const{
std::stringstream ss;
ss << "Device Address:" << std::endl;
- BOOST_FOREACH(std::string key, this->keys()){
+ for(std::string key: this->keys()){
ss << boost::format(" %s: %s") % key % this->get(key) << std::endl;
}
return ss.str();
@@ -66,13 +65,13 @@ std::string device_addr_t::to_pp_string(void) const{
std::string device_addr_t::to_string(void) const{
std::string args_str;
size_t count = 0;
- BOOST_FOREACH(const std::string &key, this->keys()){
+ for(const std::string &key: this->keys()){
args_str += ((count++)? arg_delim : "") + key + pair_delim + this->get(key);
}
return args_str;
}
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
device_addrs_t uhd::separate_device_addr(const device_addr_t &dev_addr){
//------------ support old deprecated way and print warning --------
@@ -84,7 +83,7 @@ device_addrs_t uhd::separate_device_addr(const device_addr_t &dev_addr){
for (size_t i = 0; i < addrs.size(); i++){
fixed_dev_addr[str(boost::format("addr%d") % i)] = addrs[i];
}
- UHD_MSG(warning) <<
+ UHD_LOGGER_WARNING("UHD") <<
"addr = <space separated list of ip addresses> is deprecated.\n"
"To address a multi-device, use multiple <key><index> = <val>.\n"
"See the USRP-NXXX application notes. Two device example:\n"
@@ -97,7 +96,7 @@ device_addrs_t uhd::separate_device_addr(const device_addr_t &dev_addr){
//------------------------------------------------------------------
device_addrs_t dev_addrs(1); //must be at least one (obviously)
std::vector<std::string> global_keys; //keys that apply to all (no numerical suffix)
- BOOST_FOREACH(const std::string &key, dev_addr.keys()){
+ for(const std::string &key: dev_addr.keys()){
boost::cmatch matches;
if (not boost::regex_match(key.c_str(), matches, boost::regex("^(\\D+)(\\d*)$"))){
throw std::runtime_error("unknown key format: " + key);
@@ -114,8 +113,8 @@ device_addrs_t uhd::separate_device_addr(const device_addr_t &dev_addr){
}
//copy the global settings across all device addresses
- BOOST_FOREACH(device_addr_t &my_dev_addr, dev_addrs){
- BOOST_FOREACH(const std::string &global_key, global_keys){
+ for(device_addr_t &my_dev_addr: dev_addrs){
+ for(const std::string &global_key: global_keys){
my_dev_addr[global_key] = dev_addr[global_key];
}
}
@@ -125,7 +124,7 @@ device_addrs_t uhd::separate_device_addr(const device_addr_t &dev_addr){
device_addr_t uhd::combine_device_addrs(const device_addrs_t &dev_addrs){
device_addr_t dev_addr;
for (size_t i = 0; i < dev_addrs.size(); i++){
- BOOST_FOREACH(const std::string &key, dev_addrs[i].keys()){
+ for(const std::string &key: dev_addrs[i].keys()){
dev_addr[str(boost::format("%s%d") % key % i)] = dev_addrs[i][key];
}
}
diff --git a/host/lib/types/mac_addr.cpp b/host/lib/types/mac_addr.cpp
index ab71bc0c3..2e5651558 100644
--- a/host/lib/types/mac_addr.cpp
+++ b/host/lib/types/mac_addr.cpp
@@ -18,7 +18,6 @@
#include <uhd/types/mac_addr.hpp>
#include <uhd/exception.hpp>
#include <boost/tokenizer.hpp>
-#include <boost/foreach.hpp>
#include <boost/format.hpp>
#include <stdint.h>
#include <sstream>
@@ -45,7 +44,7 @@ mac_addr_t mac_addr_t::from_string(const std::string &mac_addr_str){
//split the mac addr hex string at the colons
boost::tokenizer<boost::char_separator<char> > hex_num_toks(
mac_addr_str, boost::char_separator<char>(":"));
- BOOST_FOREACH(const std::string &hex_str, hex_num_toks){
+ for(const std::string &hex_str: hex_num_toks){
int hex_num;
std::istringstream iss(hex_str);
iss >> std::hex >> hex_num;
@@ -68,7 +67,7 @@ byte_vector_t mac_addr_t::to_bytes(void) const{
std::string mac_addr_t::to_string(void) const{
std::string addr = "";
- BOOST_FOREACH(uint8_t byte, this->to_bytes()){
+ for(uint8_t byte: this->to_bytes()){
addr += str(boost::format("%s%02x") % ((addr == "")?"":":") % int(byte));
}
return addr;
diff --git a/host/lib/types/ranges.cpp b/host/lib/types/ranges.cpp
index 82a9a84e1..d22e2fb6a 100644
--- a/host/lib/types/ranges.cpp
+++ b/host/lib/types/ranges.cpp
@@ -18,7 +18,6 @@
#include <uhd/types/ranges.hpp>
#include <uhd/exception.hpp>
#include <boost/math/special_functions/round.hpp>
-#include <boost/foreach.hpp>
#include <algorithm>
#include <sstream>
@@ -64,6 +63,18 @@ const std::string range_t::to_pp_string(void) const{
return ss.str();
}
+bool range_t::operator==(const range_t &other) const{
+ return (other._start == _start and
+ other._step == _step and
+ other._stop == _stop);
+}
+
+bool range_t::operator!=(const range_t &other) const{
+ return (other._start != _start or
+ other._step != _step or
+ other._stop != _stop);
+}
+
/***********************************************************************
* meta_range_t implementation code
**********************************************************************/
@@ -93,7 +104,7 @@ meta_range_t::meta_range_t(
double meta_range_t::start(void) const{
check_meta_range_monotonic(*this);
double min_start = this->front().start();
- BOOST_FOREACH(const range_t &r, (*this)){
+ for(const range_t &r: (*this)){
min_start = std::min(min_start, r.start());
}
return min_start;
@@ -102,7 +113,7 @@ double meta_range_t::start(void) const{
double meta_range_t::stop(void) const{
check_meta_range_monotonic(*this);
double max_stop = this->front().stop();
- BOOST_FOREACH(const range_t &r, (*this)){
+ for(const range_t &r: (*this)){
max_stop = std::max(max_stop, r.stop());
}
return max_stop;
@@ -112,7 +123,7 @@ double meta_range_t::step(void) const{
check_meta_range_monotonic(*this);
std::vector<double> non_zero_steps;
range_t last = this->front();
- BOOST_FOREACH(const range_t &r, (*this)){
+ for(const range_t &r: (*this)){
//steps at each range
if (r.step() > 0) non_zero_steps.push_back(r.step());
//and steps in-between ranges
@@ -128,7 +139,7 @@ double meta_range_t::step(void) const{
double meta_range_t::clip(double value, bool clip_step) const{
check_meta_range_monotonic(*this);
double last_stop = this->front().stop();
- BOOST_FOREACH(const range_t &r, (*this)){
+ for(const range_t &r: (*this)){
//in-between ranges, clip to nearest
if (value < r.start()){
return (std::abs(value - r.start()) < std::abs(value - last_stop))?
@@ -147,7 +158,7 @@ double meta_range_t::clip(double value, bool clip_step) const{
const std::string meta_range_t::to_pp_string(void) const{
std::stringstream ss;
- BOOST_FOREACH(const range_t &r, (*this)){
+ for(const range_t &r: (*this)){
ss << r.to_pp_string() << std::endl;
}
return ss.str();
diff --git a/host/lib/types/sid.cpp b/host/lib/types/sid.cpp
index b697bd614..40f84a2aa 100644
--- a/host/lib/types/sid.cpp
+++ b/host/lib/types/sid.cpp
@@ -1,5 +1,5 @@
//
-// Copyright 2014 Ettus Research LLC
+// Copyright 2014-2016 Ettus Research LLC
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
@@ -15,12 +15,12 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//
+#include <uhd/types/sid.hpp>
+#include <uhd/exception.hpp>
+#include <uhd/utils/cast.hpp>
#include <boost/format.hpp>
#include <boost/regex.hpp>
#include <boost/lexical_cast.hpp>
-#include <uhd/exception.hpp>
-#include <uhd/types/sid.hpp>
-#include <uhd/utils/cast.hpp>
using namespace uhd;
@@ -141,7 +141,7 @@ void sid_t::set_dst_blockport(uint32_t new_blockport)
set_sid((_sid & 0xFFFFFFF0) | ((new_blockport & 0xF) << 0));
}
-sid_t sid_t::reversed()
+sid_t sid_t::reversed() const
{
return sid_t((get_dst() << 16) | get_src());
}
diff --git a/host/lib/uhd.rc.in b/host/lib/uhd.rc.in
index 24177a00a..dee6bb8a3 100644
--- a/host/lib/uhd.rc.in
+++ b/host/lib/uhd.rc.in
@@ -1,8 +1,8 @@
#include <windows.h>
VS_VERSION_INFO VERSIONINFO
- FILEVERSION @TRIMMED_VERSION_MAJOR_API@,@TRIMMED_VERSION_ABI@,@RC_TRIMMED_VERSION_PATCH@,@UHD_GIT_COUNT@
- PRODUCTVERSION @TRIMMED_VERSION_MAJOR_API@,@TRIMMED_VERSION_ABI@,@RC_TRIMMED_VERSION_PATCH@,@UHD_GIT_COUNT@
+ FILEVERSION @RC_VERSION_MAJOR_API@,@UHD_VERSION_ABI@,@RC_VERSION_PATCH@,@UHD_GIT_COUNT@
+ PRODUCTVERSION @RC_VERSION_MAJOR_API@,@UHD_VERSION_ABI@,@RC_VERSION_PATCH@,@UHD_GIT_COUNT@
FILEFLAGSMASK 0x3fL
#ifndef NDEBUG
FILEFLAGS 0x0L
diff --git a/host/lib/usrp/b100/b100_impl.cpp b/host/lib/usrp/b100/b100_impl.cpp
index 1a38bf3b7..25f99e85e 100644
--- a/host/lib/usrp/b100/b100_impl.cpp
+++ b/host/lib/usrp/b100/b100_impl.cpp
@@ -19,7 +19,7 @@
#include "b100_impl.hpp"
#include "b100_regs.hpp"
#include <uhd/transport/usb_control.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/cast.hpp>
#include <uhd/exception.hpp>
#include <uhd/utils/static.hpp>
@@ -73,17 +73,17 @@ static device_addrs_t b100_find(const device_addr_t &hint)
//find the usrps and load firmware
size_t found = 0;
- BOOST_FOREACH(usb_device_handle::sptr handle, usb_device_handle::get_device_list(vid, pid)) {
+ for(usb_device_handle::sptr handle: usb_device_handle::get_device_list(vid, pid)) {
//extract the firmware path for the b100
std::string b100_fw_image;
try{
b100_fw_image = find_image_path(hint.get("fw", B100_FW_FILE_NAME));
}
catch(...){
- UHD_MSG(warning) << boost::format("Could not locate B100 firmware. %s\n") % print_utility_error("uhd_images_downloader.py");
+ UHD_LOGGER_WARNING("B100") << boost::format("Could not locate B100 firmware. %s\n") % print_utility_error("uhd_images_downloader.py");
return b100_addrs;
}
- UHD_LOG << "the firmware image: " << b100_fw_image << std::endl;
+ UHD_LOGGER_DEBUG("B100") << "the firmware image: " << b100_fw_image ;
usb_control::sptr control;
try{control = usb_control::make(handle, 0);}
@@ -102,7 +102,7 @@ static device_addrs_t b100_find(const device_addr_t &hint)
//search for the device until found or timeout
while (boost::get_system_time() < timeout_time and b100_addrs.empty() and found != 0)
{
- BOOST_FOREACH(usb_device_handle::sptr handle, usb_device_handle::get_device_list(vid, pid))
+ for(usb_device_handle::sptr handle: usb_device_handle::get_device_list(vid, pid))
{
usb_control::sptr control;
try{control = usb_control::make(handle, 0);}
@@ -161,7 +161,7 @@ b100_impl::b100_impl(const device_addr_t &device_addr){
//locate the matching handle in the device list
usb_device_handle::sptr handle;
- BOOST_FOREACH(usb_device_handle::sptr dev_handle, device_list) {
+ for(usb_device_handle::sptr dev_handle: device_list) {
if (dev_handle->get_serial() == device_addr["serial"]){
handle = dev_handle;
break;
@@ -202,9 +202,9 @@ b100_impl::b100_impl(const device_addr_t &device_addr){
//try reset once in the case of failure
catch(const uhd::exception &ex){
if (initialization_count > 1) throw;
- UHD_MSG(warning) <<
+ UHD_LOGGER_WARNING("B100") <<
"The control endpoint was left in a bad state.\n"
- "Attempting endpoint re-enumeration...\n" << ex.what() << std::endl;
+ "Attempting endpoint re-enumeration...\n" << ex.what() ;
_fifo_ctrl.reset();
_ctrl_transport.reset();
_fx2_ctrl->usrp_fx2_reset();
@@ -230,9 +230,9 @@ b100_impl::b100_impl(const device_addr_t &device_addr){
//try reset once in the case of failure
catch(const uhd::exception &){
if (initialization_count > 1) throw;
- UHD_MSG(warning) <<
+ UHD_LOGGER_WARNING("B100") <<
"The control endpoint was left in a bad state.\n"
- "Attempting endpoint re-enumeration...\n" << std::endl;
+ "Attempting endpoint re-enumeration...\n" ;
_fifo_ctrl.reset();
_ctrl_transport.reset();
_fx2_ctrl->usrp_fx2_reset();
@@ -478,12 +478,12 @@ b100_impl::b100_impl(const device_addr_t &device_addr){
//bind frontend corrections to the dboard freq props
const fs_path db_tx_fe_path = mb_path / "dboards" / "A" / "tx_frontends";
- BOOST_FOREACH(const std::string &name, _tree->list(db_tx_fe_path)){
+ for(const std::string &name: _tree->list(db_tx_fe_path)){
_tree->access<double>(db_tx_fe_path / name / "freq" / "value")
.add_coerced_subscriber(boost::bind(&b100_impl::set_tx_fe_corrections, this, _1));
}
const fs_path db_rx_fe_path = mb_path / "dboards" / "A" / "rx_frontends";
- BOOST_FOREACH(const std::string &name, _tree->list(db_rx_fe_path)){
+ for(const std::string &name: _tree->list(db_rx_fe_path)){
_tree->access<double>(db_rx_fe_path / name / "freq" / "value")
.add_coerced_subscriber(boost::bind(&b100_impl::set_rx_fe_corrections, this, _1));
}
@@ -504,10 +504,10 @@ b100_impl::b100_impl(const device_addr_t &device_addr){
.add_coerced_subscriber(boost::bind(&b100_clock_ctrl::set_fpga_clock_rate, _clock_ctrl, _1));
//reset cordic rates and their properties to zero
- BOOST_FOREACH(const std::string &name, _tree->list(mb_path / "rx_dsps")){
+ for(const std::string &name: _tree->list(mb_path / "rx_dsps")){
_tree->access<double>(mb_path / "rx_dsps" / name / "freq" / "value").set(0.0);
}
- BOOST_FOREACH(const std::string &name, _tree->list(mb_path / "tx_dsps")){
+ for(const std::string &name: _tree->list(mb_path / "tx_dsps")){
_tree->access<double>(mb_path / "tx_dsps" / name / "freq" / "value").set(0.0);
}
diff --git a/host/lib/usrp/b100/clock_ctrl.cpp b/host/lib/usrp/b100/clock_ctrl.cpp
index 5700a321a..010a0a6f8 100644
--- a/host/lib/usrp/b100/clock_ctrl.cpp
+++ b/host/lib/usrp/b100/clock_ctrl.cpp
@@ -18,14 +18,13 @@
#include "clock_ctrl.hpp"
#include "ad9522_regs.hpp"
#include <uhd/utils/log.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/exception.hpp>
#include <uhd/utils/assert_has.hpp>
#include <uhd/utils/safe_call.hpp>
#include <stdint.h>
#include "b100_regs.hpp" //spi slave constants
#include <boost/assign/list_of.hpp>
-#include <boost/foreach.hpp>
#include <boost/format.hpp>
#include <boost/thread/thread.hpp>
#include <boost/math/common_factor_rt.hpp> //gcd
@@ -149,11 +148,11 @@ static clock_settings_type get_clock_settings(double rate){
cs.chan_divider /= cs.vco_divider;
}
- UHD_LOGV(always)
- << "gcd " << gcd << std::endl
- << "X " << X << std::endl
- << "Y " << Y << std::endl
- << cs.to_pp_string() << std::endl
+ UHD_LOGGER_DEBUG("B100")
+ << "gcd: " << gcd
+ << " X: " << X
+ << " Y: " << Y
+ << cs.to_pp_string()
;
//filter limits on the counters
@@ -167,7 +166,7 @@ static clock_settings_type get_clock_settings(double rate){
if (cs.get_vco_rate() < 1400e6 + vco_bound_pad) continue;
if (cs.get_out_rate() != rate) continue;
- UHD_MSG(status) << "USRP-B100 clock control: " << i << std::endl << cs.to_pp_string() << std::endl;
+ UHD_LOGGER_INFO("B100") << "USRP-B100 clock control: " << i << cs.to_pp_string() ;
return cs;
}
}
@@ -466,7 +465,7 @@ private:
void send_reg(uint16_t addr){
uint32_t reg = _ad9522_regs.get_write_reg(addr);
- UHD_LOGV(often) << "clock control write reg: " << std::hex << reg << std::endl;
+ UHD_LOGGER_TRACE("B100") << "clock control write reg: " << std::hex << reg ;
byte_vector_t buf;
buf.push_back(uint8_t(reg >> 16));
buf.push_back(uint8_t(reg >> 8));
@@ -502,7 +501,7 @@ private:
_ad9522_regs.set_reg(addr, reg);
if (_ad9522_regs.vco_calibration_finished) goto wait_for_ld;
}
- UHD_MSG(error) << "USRP-B100 clock control: VCO calibration timeout" << std::endl;
+ UHD_LOGGER_ERROR("B100") << "USRP-B100 clock control: VCO calibration timeout";
wait_for_ld:
//wait for digital lock detect:
for (size_t ms10 = 0; ms10 < 100; ms10++){
@@ -511,7 +510,7 @@ private:
_ad9522_regs.set_reg(addr, reg);
if (_ad9522_regs.digital_lock_detect) return;
}
- UHD_MSG(error) << "USRP-B100 clock control: lock detection timeout" << std::endl;
+ UHD_LOGGER_ERROR("B100") << "USRP-B100 clock control: lock detection timeout";
}
void soft_sync(void){
@@ -533,7 +532,7 @@ private:
;
//write initial register values and latch/update
- BOOST_FOREACH(const range_t &range, ranges){
+ for(const range_t &range: ranges){
for(uint16_t addr = range.first; addr <= range.second; addr++){
this->send_reg(addr);
}
diff --git a/host/lib/usrp/b100/codec_ctrl.cpp b/host/lib/usrp/b100/codec_ctrl.cpp
index e78608beb..01fd51f52 100644
--- a/host/lib/usrp/b100/codec_ctrl.cpp
+++ b/host/lib/usrp/b100/codec_ctrl.cpp
@@ -259,7 +259,7 @@ void b100_codec_ctrl_impl::write_aux_dac(aux_dac_t which, double volts){
**********************************************************************/
void b100_codec_ctrl_impl::send_reg(uint8_t addr){
uint32_t reg = _ad9862_regs.get_write_reg(addr);
- UHD_LOGV(rarely) << "codec control write reg: " << std::hex << reg << std::endl;
+ UHD_LOGGER_TRACE("B100") << "codec control write reg: " << std::hex << reg ;
_iface->transact_spi(
B100_SPI_SS_AD9862,
spi_config_t::EDGE_RISE,
@@ -269,13 +269,13 @@ void b100_codec_ctrl_impl::send_reg(uint8_t addr){
void b100_codec_ctrl_impl::recv_reg(uint8_t addr){
uint32_t reg = _ad9862_regs.get_read_reg(addr);
- UHD_LOGV(rarely) << "codec control read reg: " << std::hex << reg << std::endl;
+ UHD_LOGGER_TRACE("B100") << "codec control read reg: " << std::hex << reg ;
uint32_t ret = _iface->transact_spi(
B100_SPI_SS_AD9862,
spi_config_t::EDGE_RISE,
reg, 16, true /*rb*/
);
- UHD_LOGV(rarely) << "codec control read ret: " << std::hex << uint16_t(ret & 0xFF) << std::endl;
+ UHD_LOGGER_TRACE("B100") << "codec control read ret: " << std::hex << uint16_t(ret & 0xFF) ;
_ad9862_regs.set_reg(addr, uint8_t(ret&0xff));
}
diff --git a/host/lib/usrp/b100/io_impl.cpp b/host/lib/usrp/b100/io_impl.cpp
index c1810ed8c..7cccc7752 100644
--- a/host/lib/usrp/b100/io_impl.cpp
+++ b/host/lib/usrp/b100/io_impl.cpp
@@ -23,7 +23,7 @@
#include <boost/format.hpp>
#include <boost/bind.hpp>
#include <boost/thread.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/utils/log.hpp>
#include <boost/make_shared.hpp>
@@ -36,10 +36,10 @@ void b100_impl::update_rates(void){
_tree->access<double>(mb_path / "tick_rate").update();
//and now that the tick rate is set, init the host rates to something
- BOOST_FOREACH(const std::string &name, _tree->list(mb_path / "rx_dsps")){
+ for(const std::string &name: _tree->list(mb_path / "rx_dsps")){
_tree->access<double>(mb_path / "rx_dsps" / name / "rate" / "value").update();
}
- BOOST_FOREACH(const std::string &name, _tree->list(mb_path / "tx_dsps")){
+ for(const std::string &name: _tree->list(mb_path / "tx_dsps")){
_tree->access<double>(mb_path / "tx_dsps" / name / "rate" / "value").update();
}
}
diff --git a/host/lib/usrp/b100/usb_zero_copy_wrapper.cpp b/host/lib/usrp/b100/usb_zero_copy_wrapper.cpp
index d57d57f21..ea3e9f87e 100644
--- a/host/lib/usrp/b100/usb_zero_copy_wrapper.cpp
+++ b/host/lib/usrp/b100/usb_zero_copy_wrapper.cpp
@@ -18,10 +18,9 @@
#include <uhd/transport/usb_zero_copy.hpp>
#include <uhd/transport/buffer_pool.hpp>
#include <uhd/utils/byteswap.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/tasks.hpp>
#include <uhd/utils/atomic.hpp>
-#include <boost/foreach.hpp>
#include <boost/make_shared.hpp>
#include <boost/thread/mutex.hpp>
#include <boost/thread/condition_variable.hpp>
diff --git a/host/lib/usrp/b200/b200_iface.cpp b/host/lib/usrp/b200/b200_iface.cpp
index 6f9bdd330..3d969dfa6 100644
--- a/host/lib/usrp/b200/b200_iface.cpp
+++ b/host/lib/usrp/b200/b200_iface.cpp
@@ -19,7 +19,7 @@
#include "../../utils/ihex.hpp"
#include <uhd/config.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/utils/log.hpp>
#include <uhd/exception.hpp>
#include <boost/functional/hash.hpp>
@@ -211,9 +211,10 @@ public:
void load_firmware(const std::string filestring, UHD_UNUSED(bool force) = false)
{
- if (load_img_msg)
- UHD_MSG(status) << "Loading firmware image: "
- << filestring << "..." << std::flush;
+ if (load_img_msg) {
+ UHD_LOGGER_INFO("B200") << "Loading firmware image: "
+ << filestring << "...";
+ }
ihex_reader file_reader(filestring);
try {
@@ -227,7 +228,6 @@ public:
throw uhd::io_error(str(boost::format("Could not load firmware: \n%s") % e.what()));
}
- UHD_MSG(status) << std::endl;
//TODO
//usrp_set_firmware_hash(hash); //set hash before reset
@@ -446,8 +446,10 @@ public:
wait_count++;
} while(fx3_state != FX3_STATE_FPGA_READY);
- if (load_img_msg) UHD_MSG(status) << "Loading FPGA image: " \
- << filestring << "..." << std::flush;
+ if (load_img_msg) {
+ UHD_LOGGER_INFO("B200") << "Loading FPGA image: "
+ << filestring << "...";
+ }
bytes_to_xfer = 1;
ret = fx3_control_write(B200_VREQ_FPGA_START, 0, 0, out_buff, bytes_to_xfer, 1000);
@@ -485,15 +487,20 @@ public:
else if (nwritten != transfer_count)
throw uhd::io_error((boost::format("load_fpga: short write while transferring bitstream to FX3 (expecting: %d, returned: %d)") % transfer_count % nwritten).str());
+ const size_t LOG_GRANULARITY = 10; // %. Keep this an integer divisor of 100.
if (load_img_msg)
{
- if (bytes_sent == 0) UHD_MSG(status) << " 0%" << std::flush;
- const size_t percent_before = size_t((bytes_sent*100)/file_size);
+ if (bytes_sent == 0) UHD_LOGGER_DEBUG("B200") << " 0%" << std::flush;
+ const size_t percent_before =
+ size_t((bytes_sent*100)/file_size) -
+ (size_t((bytes_sent*100)/file_size) % LOG_GRANULARITY);
bytes_sent += transfer_count;
- const size_t percent_after = size_t((bytes_sent*100)/file_size);
+ const size_t percent_after =
+ size_t((bytes_sent*100)/file_size) -
+ (size_t((bytes_sent*100)/file_size) % LOG_GRANULARITY);
if (percent_before != percent_after)
{
- UHD_MSG(status) << "\b\b\b\b" << std::setw(3) << percent_after << "%" << std::flush;
+ UHD_LOGGER_DEBUG("B200") << std::setw(3) << percent_after << "%";
}
}
}
@@ -515,8 +522,9 @@ public:
usrp_set_fpga_hash(hash);
- if (load_img_msg)
- UHD_MSG(status) << "\b\b\b\b done" << std::endl;
+ if (load_img_msg) {
+ UHD_LOGGER_DEBUG("B200") << "FPGA image loaded!";
+ }
return 0;
}
diff --git a/host/lib/usrp/b200/b200_image_loader.cpp b/host/lib/usrp/b200/b200_image_loader.cpp
index b9c2c0caf..af79a59fc 100644
--- a/host/lib/usrp/b200/b200_image_loader.cpp
+++ b/host/lib/usrp/b200/b200_image_loader.cpp
@@ -16,7 +16,6 @@
//
#include <boost/assign.hpp>
-#include <boost/foreach.hpp>
#include <boost/lexical_cast.hpp>
#include <uhd/exception.hpp>
@@ -45,7 +44,7 @@ static b200_iface::sptr get_b200_iface(const image_loader::image_loader_args_t&
mboard_eeprom_t eeprom; // Internal use
if(dev_handles.size() > 0){
- BOOST_FOREACH(usb_device_handle::sptr dev_handle, dev_handles){
+ for(usb_device_handle::sptr dev_handle: dev_handles){
if(dev_handle->firmware_loaded()){
iface = b200_iface::make(usb_control::make(dev_handle,0));
eeprom = mboard_eeprom_t(*iface, "B200");
@@ -74,7 +73,7 @@ static b200_iface::sptr get_b200_iface(const image_loader::image_loader_args_t&
std::string err_msg = "Could not resolve given args to a single B2XX device.\n"
"Applicable devices:\n";
- BOOST_FOREACH(usb_device_handle::sptr dev_handle, applicable_dev_handles){
+ for(usb_device_handle::sptr dev_handle: applicable_dev_handles){
eeprom = mboard_eeprom_t(*b200_iface::make(usb_control::make(dev_handle,0)), "B200");
err_msg += str(boost::format(" * %s (serial=%s)\n")
% B2XX_STR_NAMES.get(get_b200_product(dev_handle, mb_eeprom), "B2XX")
diff --git a/host/lib/usrp/b200/b200_impl.cpp b/host/lib/usrp/b200/b200_impl.cpp
index e2a0e0b70..a513e1336 100644
--- a/host/lib/usrp/b200/b200_impl.cpp
+++ b/host/lib/usrp/b200/b200_impl.cpp
@@ -19,7 +19,7 @@
#include "b200_regs.hpp"
#include <uhd/config.hpp>
#include <uhd/transport/usb_control.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/cast.hpp>
#include <uhd/exception.hpp>
#include <uhd/utils/static.hpp>
@@ -60,7 +60,7 @@ public:
}
}
clocking_mode_t get_clocking_mode() {
- return AD9361_XTAL_N_CLK_PATH;
+ return clocking_mode_t::AD9361_XTAL_N_CLK_PATH;
}
digital_interface_mode_t get_digital_interface_mode() {
return AD9361_DDR_FDD_LVCMOS;
@@ -88,7 +88,7 @@ public:
}
}
clocking_mode_t get_clocking_mode() {
- return AD9361_XTAL_N_CLK_PATH;
+ return clocking_mode_t::AD9361_XTAL_N_CLK_PATH;
}
digital_interface_mode_t get_digital_interface_mode() {
return AD9361_DDR_FDD_LVCMOS;
@@ -172,7 +172,7 @@ static device_addrs_t b200_find(const device_addr_t &hint)
//Return an empty list of addresses when an address or resource is specified,
//since an address and resource is intended for a different, non-USB, device.
- BOOST_FOREACH(device_addr_t hint_i, separate_device_addr(hint)) {
+ for(device_addr_t hint_i: separate_device_addr(hint)) {
if (hint_i.has_key("addr") || hint_i.has_key("resource")) return b200_addrs;
}
@@ -182,7 +182,7 @@ static device_addrs_t b200_find(const device_addr_t &hint)
// so that re-enumeration after fw load can occur successfully.
// This requirement is a courtesy of libusb1.0 on windows.
size_t found = 0;
- BOOST_FOREACH(usb_device_handle::sptr handle, get_b200_device_handles(hint)) {
+ for(usb_device_handle::sptr handle: get_b200_device_handles(hint)) {
//extract the firmware path for the b200
std::string b200_fw_image;
try{
@@ -190,10 +190,10 @@ static device_addrs_t b200_find(const device_addr_t &hint)
b200_fw_image = uhd::find_image_path(b200_fw_image, STR(UHD_IMAGES_DIR)); // FIXME
}
catch(uhd::exception &e){
- UHD_MSG(warning) << e.what();
+ UHD_LOGGER_WARNING("B200") << e.what();
return b200_addrs;
}
- UHD_LOG << "the firmware image: " << b200_fw_image << std::endl;
+ UHD_LOGGER_DEBUG("B200") << "the firmware image: " << b200_fw_image ;
usb_control::sptr control;
try{control = usb_control::make(handle, 0);}
@@ -213,7 +213,7 @@ static device_addrs_t b200_find(const device_addr_t &hint)
//search for the device until found or timeout
while (boost::get_system_time() < timeout_time and b200_addrs.empty() and found != 0)
{
- BOOST_FOREACH(usb_device_handle::sptr handle, get_b200_device_handles(hint))
+ for(usb_device_handle::sptr handle: get_b200_device_handles(hint))
{
usb_control::sptr control;
try{control = usb_control::make(handle, 0);}
@@ -261,7 +261,7 @@ static device::sptr b200_make(const device_addr_t &device_addr)
return device::sptr(new b200_impl(device_addr, handle));
}
catch (const uhd::usb_error &) {
- UHD_MSG(status) << "Detected bad USB state; resetting." << std::endl;
+ UHD_LOGGER_INFO("B200") << "Detected bad USB state; resetting." ;
libusb::device_handle::sptr dev_handle(libusb::device_handle::get_cached_handle(
boost::static_pointer_cast<libusb::special_handle>(handle)->get_device()
));
@@ -344,7 +344,7 @@ b200_impl::b200_impl(const uhd::device_addr_t& device_addr, usb_device_handle::s
std::vector<usb_device_handle::sptr> device_list = usb_device_handle::get_device_list(vid_pid_pair_list);
//locate the matching handle in the device list
- BOOST_FOREACH(usb_device_handle::sptr dev_handle, device_list) {
+ for(usb_device_handle::sptr dev_handle: device_list) {
try {
if (dev_handle->get_serial() == device_addr["serial"]){
handle = dev_handle;
@@ -390,7 +390,7 @@ b200_impl::b200_impl(const uhd::device_addr_t& device_addr, usb_device_handle::s
_revision = boost::lexical_cast<size_t>(mb_eeprom["revision"]);
}
- UHD_MSG(status) << "Detected Device: " << B2XX_STR_NAMES[_product] << std::endl;
+ UHD_LOGGER_INFO("B200") << "Detected Device: " << B2XX_STR_NAMES[_product] ;
_gpsdo_capable = (not (_product == B200MINI or _product == B205MINI));
@@ -437,7 +437,7 @@ b200_impl::b200_impl(const uhd::device_addr_t& device_addr, usb_device_handle::s
// Create control transport
////////////////////////////////////////////////////////////////////
uint8_t usb_speed = _iface->get_usb_speed();
- UHD_MSG(status) << "Operating over USB " << (int) usb_speed << "." << std::endl;
+ UHD_LOGGER_INFO("B200") << "Operating over USB " << (int) usb_speed << "." ;
const std::string min_frame_size = (usb_speed == 3) ? "1024" : "512";
device_addr_t ctrl_xport_args;
@@ -488,19 +488,18 @@ b200_impl::b200_impl(const uhd::device_addr_t& device_addr, usb_device_handle::s
if ((_local_ctrl->peek32(RB32_CORE_STATUS) & 0xff) != B200_GPSDO_ST_NONE)
{
- UHD_MSG(status) << "Detecting internal GPSDO.... " << std::flush;
+ UHD_LOGGER_INFO("B200") << "Detecting internal GPSDO.... " << std::flush;
try
{
_gps = gps_ctrl::make(_async_task_data->gpsdo_uart);
}
catch(std::exception &e)
{
- UHD_MSG(error) << "An error occurred making GPSDO control: " << e.what() << std::endl;
+ UHD_LOGGER_ERROR("B200") << "An error occurred making GPSDO control: " << e.what();
}
if (_gps and _gps->gps_detected())
{
- //UHD_MSG(status) << "found" << std::endl;
- BOOST_FOREACH(const std::string &name, _gps->get_sensors())
+ for(const std::string &name: _gps->get_sensors())
{
_tree->create<sensor_value_t>(mb_path / "sensors" / name)
.set_publisher(boost::bind(&gps_ctrl::get_sensor, _gps, name));
@@ -553,7 +552,7 @@ b200_impl::b200_impl(const uhd::device_addr_t& device_addr, usb_device_handle::s
////////////////////////////////////////////////////////////////////
// Init codec - turns on clocks
////////////////////////////////////////////////////////////////////
- UHD_MSG(status) << "Initialize CODEC control..." << std::endl;
+ UHD_LOGGER_INFO("B200") << "Initialize CODEC control..." ;
ad9361_params::sptr client_settings;
if (_product == B200MINI or _product == B205MINI) {
client_settings = boost::make_shared<b2xxmini_ad9361_client_t>();
@@ -610,7 +609,7 @@ b200_impl::b200_impl(const uhd::device_addr_t& device_addr, usb_device_handle::s
////////////////////////////////////////////////////////////////////
// setup radio control
////////////////////////////////////////////////////////////////////
- UHD_MSG(status) << "Initialize Radio control..." << std::endl;
+ UHD_LOGGER_INFO("B200") << "Initialize Radio control..." ;
const size_t num_radio_chains = ((_local_ctrl->peek32(RB32_CORE_STATUS) >> 8) & 0xff);
UHD_ASSERT_THROW(num_radio_chains > 0);
UHD_ASSERT_THROW(num_radio_chains <= 2);
@@ -621,7 +620,7 @@ b200_impl::b200_impl(const uhd::device_addr_t& device_addr, usb_device_handle::s
this->setup_radio(i);
//now test each radio module's connection to the codec interface
- BOOST_FOREACH(radio_perifs_t &perif, _radio_perifs)
+ for(radio_perifs_t &perif: _radio_perifs)
{
_codec_mgr->loopback_self_test(
boost::bind(
@@ -641,7 +640,7 @@ b200_impl::b200_impl(const uhd::device_addr_t& device_addr, usb_device_handle::s
.add_coerced_subscriber(boost::bind(&b200_impl::sync_times, this));
_tree->create<time_spec_t>(mb_path / "time" / "pps")
.set_publisher(boost::bind(&time_core_3000::get_time_last_pps, _radio_perifs[0].time64));
- BOOST_FOREACH(radio_perifs_t &perif, _radio_perifs)
+ for(radio_perifs_t &perif: _radio_perifs)
{
_tree->access<time_spec_t>(mb_path / "time" / "pps")
.add_coerced_subscriber(boost::bind(&time_core_3000::set_time_next_pps, perif.time64, _1));
@@ -670,7 +669,7 @@ b200_impl::b200_impl(const uhd::device_addr_t& device_addr, usb_device_handle::s
// front panel gpio
////////////////////////////////////////////////////////////////////
_radio_perifs[0].fp_gpio = gpio_atr_3000::make(_radio_perifs[0].ctrl, TOREG(SR_FP_GPIO), RB32_FP_GPIO);
- BOOST_FOREACH(const gpio_attr_map_t::value_type attr, gpio_attr_map)
+ for(const gpio_attr_map_t::value_type attr: gpio_attr_map)
{
_tree->create<uint32_t>(mb_path / "gpio" / "FP0" / attr.second)
.set(0)
@@ -692,7 +691,7 @@ b200_impl::b200_impl(const uhd::device_addr_t& device_addr, usb_device_handle::s
////////////////////////////////////////////////////////////////////
// Init the clock rate and the auto mcr appropriately
if (not device_addr.has_key("master_clock_rate")) {
- UHD_MSG(status) << "Setting master clock rate selection to 'automatic'." << std::endl;
+ UHD_LOGGER_INFO("B200") << "Setting master clock rate selection to 'automatic'." ;
}
// We can automatically choose a master clock rate, but not if the user specifies one
const double default_tick_rate = device_addr.cast<double>("master_clock_rate", ad936x_manager::DEFAULT_TICK_RATE);
@@ -701,11 +700,11 @@ b200_impl::b200_impl(const uhd::device_addr_t& device_addr, usb_device_handle::s
//subdev spec contains full width of selections
subdev_spec_t rx_spec, tx_spec;
- BOOST_FOREACH(const std::string &fe, _tree->list(mb_path / "dboards" / "A" / "rx_frontends"))
+ for(const std::string &fe: _tree->list(mb_path / "dboards" / "A" / "rx_frontends"))
{
rx_spec.push_back(subdev_spec_pair_t("A", fe));
}
- BOOST_FOREACH(const std::string &fe, _tree->list(mb_path / "dboards" / "A" / "tx_frontends"))
+ for(const std::string &fe: _tree->list(mb_path / "dboards" / "A" / "tx_frontends"))
{
tx_spec.push_back(subdev_spec_pair_t("A", fe));
}
@@ -830,7 +829,7 @@ void b200_impl::setup_radio(const size_t dspno)
// create RF frontend interfacing
////////////////////////////////////////////////////////////////////
static const std::vector<direction_t> dirs = boost::assign::list_of(RX_DIRECTION)(TX_DIRECTION);
- BOOST_FOREACH(direction_t dir, dirs) {
+ for(direction_t dir: dirs) {
const std::string x = (dir == RX_DIRECTION) ? "rx" : "tx";
const std::string key = std::string(((dir == RX_DIRECTION) ? "RX" : "TX")) + std::string(((dspno == _fe1) ? "1" : "2"));
const fs_path rf_fe_path
@@ -874,7 +873,7 @@ void b200_impl::setup_radio(const size_t dspno)
void b200_impl::register_loopback_self_test(wb_iface::sptr iface)
{
bool test_fail = false;
- UHD_MSG(status) << "Performing register loopback test... " << std::flush;
+ UHD_LOGGER_INFO("B200") << "Performing register loopback test... " << std::flush;
size_t hash = size_t(time(NULL));
for (size_t i = 0; i < 100; i++)
{
@@ -883,7 +882,7 @@ void b200_impl::register_loopback_self_test(wb_iface::sptr iface)
test_fail = iface->peek32(RB32_TEST) != uint32_t(hash);
if (test_fail) break; //exit loop on any failure
}
- UHD_MSG(status) << ((test_fail)? "fail" : "pass") << std::endl;
+ UHD_LOGGER_INFO("B200") << ((test_fail)? "fail" : "pass") ;
}
/***********************************************************************
@@ -919,20 +918,20 @@ void b200_impl::enforce_tick_rate_limits(size_t chan_count, double tick_rate, co
double b200_impl::set_tick_rate(const double new_tick_rate)
{
- UHD_MSG(status) << (boost::format("Asking for clock rate %.6f MHz... ") % (new_tick_rate/1e6)) << std::flush;
+ UHD_LOGGER_INFO("B200") << (boost::format("Asking for clock rate %.6f MHz... ") % (new_tick_rate/1e6)) << std::flush;
check_tick_rate_with_current_streamers(new_tick_rate); // Defined in b200_io_impl.cpp
// Make sure the clock rate is actually changed before doing
// the full Monty of setting regs and loopback tests etc.
if (std::abs(new_tick_rate - _tick_rate) < 1.0) {
- UHD_MSG(status) << "OK" << std::endl;
+ UHD_LOGGER_INFO("B200") << "OK" ;
return _tick_rate;
}
_tick_rate = _codec_ctrl->set_clock_rate(new_tick_rate);
- UHD_MSG(status) << std::endl << (boost::format("Actually got clock rate %.6f MHz.") % (_tick_rate/1e6)) << std::endl;
+ UHD_LOGGER_INFO("B200") << (boost::format("Actually got clock rate %.6f MHz.") % (_tick_rate/1e6)) ;
- BOOST_FOREACH(radio_perifs_t &perif, _radio_perifs)
+ for(radio_perifs_t &perif: _radio_perifs)
{
perif.time64->set_tick_rate(_tick_rate);
perif.time64->self_test();
@@ -1088,7 +1087,7 @@ void b200_impl::update_time_source(const std::string &source)
void b200_impl::set_time(const uhd::time_spec_t& t)
{
- BOOST_FOREACH(radio_perifs_t &perif, _radio_perifs)
+ for(radio_perifs_t &perif: _radio_perifs)
perif.time64->set_time_sync(t);
_local_ctrl->poke32(TOREG(SR_CORE_SYNC), 1 << 2 | uint32_t(_time_source));
_local_ctrl->poke32(TOREG(SR_CORE_SYNC), _time_source);
diff --git a/host/lib/usrp/b200/b200_io_impl.cpp b/host/lib/usrp/b200/b200_io_impl.cpp
index 0a00b9402..7366b06f5 100644
--- a/host/lib/usrp/b200/b200_io_impl.cpp
+++ b/host/lib/usrp/b200/b200_io_impl.cpp
@@ -45,7 +45,7 @@ void b200_impl::check_tick_rate_with_current_streamers(double rate)
size_t b200_impl::max_chan_count(const std::string &direction /* = "" */)
{
size_t max_count = 0;
- BOOST_FOREACH(radio_perifs_t &perif, _radio_perifs)
+ for(radio_perifs_t &perif: _radio_perifs)
{
if ((direction == "RX" or direction.empty()) and not perif.rx_streamer.expired()) {
boost::shared_ptr<sph::recv_packet_streamer> rx_streamer =
@@ -97,7 +97,7 @@ void b200_impl::set_auto_tick_rate(
for (int i = 0; i < 2; i++) { // Loop through rx and tx
std::string dir = (i == 0) ? "tx" : "rx";
// We assume all 'set' DSPs are being used.
- BOOST_FOREACH(const std::string &dsp_no, _tree->list(str(boost::format("/mboards/0/%s_dsps") % dir))) {
+ for(const std::string &dsp_no: _tree->list(str(boost::format("/mboards/0/%s_dsps") % dir))) {
fs_path dsp_path = str(boost::format("/mboards/0/%s_dsps/%s") % dir % dsp_no);
if (dsp_path == tree_dsp_path) {
continue;
@@ -135,9 +135,9 @@ void b200_impl::set_auto_tick_rate(
_tree->access<double>("/mboards/0/tick_rate").set(new_rate);
}
} catch (const uhd::value_error &) {
- UHD_MSG(warning)
- << "Cannot automatically determine an appropriate tick rate for these sampling rates." << std::endl
- << "Consider using different sampling rates, or manually specify a suitable master clock rate." << std::endl;
+ UHD_LOGGER_WARNING("B200")
+ << "Cannot automatically determine an appropriate tick rate for these sampling rates."
+ << "Consider using different sampling rates, or manually specify a suitable master clock rate." ;
return; // Let the others handle this
}
}
@@ -146,14 +146,14 @@ void b200_impl::update_tick_rate(const double new_tick_rate)
{
check_tick_rate_with_current_streamers(new_tick_rate);
- BOOST_FOREACH(radio_perifs_t &perif, _radio_perifs)
+ for(radio_perifs_t &perif: _radio_perifs)
{
boost::shared_ptr<sph::recv_packet_streamer> my_streamer =
boost::dynamic_pointer_cast<sph::recv_packet_streamer>(perif.rx_streamer.lock());
if (my_streamer) my_streamer->set_tick_rate(new_tick_rate);
perif.framer->set_tick_rate(new_tick_rate);
}
- BOOST_FOREACH(radio_perifs_t &perif, _radio_perifs)
+ for(radio_perifs_t &perif: _radio_perifs)
{
boost::shared_ptr<sph::send_packet_streamer> my_streamer =
boost::dynamic_pointer_cast<sph::send_packet_streamer>(perif.tx_streamer.lock());
@@ -356,7 +356,7 @@ boost::optional<uhd::msg_task::msg_type_t> b200_impl::handle_async_task(
}
catch(const std::exception &ex)
{
- UHD_MSG(error) << "Error parsing ctrl packet: " << ex.what() << std::endl;
+ UHD_LOGGER_ERROR("B200") << "Error parsing ctrl packet: " << ex.what();
break;
}
@@ -370,7 +370,7 @@ boost::optional<uhd::msg_task::msg_type_t> b200_impl::handle_async_task(
//doh!
default:
- UHD_MSG(error) << "Got a ctrl packet with unknown SID " << sid << std::endl;
+ UHD_LOGGER_ERROR("B200") << "Got a ctrl packet with unknown SID " << sid;
}
return boost::none;
}
diff --git a/host/lib/usrp/b200/b200_uart.cpp b/host/lib/usrp/b200/b200_uart.cpp
index 3c49ebf2a..143600c51 100644
--- a/host/lib/usrp/b200/b200_uart.cpp
+++ b/host/lib/usrp/b200/b200_uart.cpp
@@ -20,10 +20,9 @@
#include <uhd/transport/bounded_buffer.hpp>
#include <uhd/transport/vrt_if_packet.hpp>
#include <uhd/utils/byteswap.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/types/time_spec.hpp>
#include <uhd/exception.hpp>
-#include <boost/foreach.hpp>
using namespace uhd;
using namespace uhd::transport;
@@ -69,7 +68,7 @@ struct b200_uart_impl : b200_uart
void write_uart(const std::string &buff)
{
- BOOST_FOREACH(const char ch, buff)
+ for(const char ch: buff)
{
this->send_char(ch);
}
diff --git a/host/lib/usrp/common/CMakeLists.txt b/host/lib/usrp/common/CMakeLists.txt
index 9f4cf09b5..9480d0284 100644
--- a/host/lib/usrp/common/CMakeLists.txt
+++ b/host/lib/usrp/common/CMakeLists.txt
@@ -38,5 +38,4 @@ LIBUHD_APPEND_SOURCES(
${CMAKE_CURRENT_SOURCE_DIR}/validate_subdev_spec.cpp
${CMAKE_CURRENT_SOURCE_DIR}/recv_packet_demuxer.cpp
${CMAKE_CURRENT_SOURCE_DIR}/fifo_ctrl_excelsior.cpp
- ${CMAKE_CURRENT_SOURCE_DIR}/usrp3_fw_ctrl_iface.cpp
)
diff --git a/host/lib/usrp/common/ad9361_ctrl.cpp b/host/lib/usrp/common/ad9361_ctrl.cpp
index 0dc5e7919..a1c158eae 100644
--- a/host/lib/usrp/common/ad9361_ctrl.cpp
+++ b/host/lib/usrp/common/ad9361_ctrl.cpp
@@ -17,7 +17,7 @@
#include "ad9361_ctrl.hpp"
#include <uhd/types/ranges.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/types/serial.hpp>
#include <cstring>
#include <boost/format.hpp>
@@ -153,10 +153,10 @@ public:
const double clipped_rate = clock_rate_range.clip(rate);
if (clipped_rate != rate) {
- UHD_MSG(warning) << boost::format(
+ UHD_LOGGER_WARNING("AD936X") << boost::format(
"The requested master_clock_rate %f MHz exceeds bounds imposed by UHD.\n"
"The master_clock_rate has been forced to %f MHz.\n"
- ) % (rate/1e6) % (clipped_rate/1e6) << std::endl;
+ ) % (rate/1e6) % (clipped_rate/1e6) ;
}
double return_rate = _device.set_clock_rate(clipped_rate);
diff --git a/host/lib/usrp/common/ad9361_driver/ad9361_client.h b/host/lib/usrp/common/ad9361_driver/ad9361_client.h
index 921045fbd..4fea53521 100644
--- a/host/lib/usrp/common/ad9361_driver/ad9361_client.h
+++ b/host/lib/usrp/common/ad9361_driver/ad9361_client.h
@@ -34,10 +34,10 @@ typedef enum {
/*!
* Clocking mode
*/
-typedef enum {
+enum class clocking_mode_t {
AD9361_XTAL_P_CLK_PATH,
AD9361_XTAL_N_CLK_PATH
-} clocking_mode_t;
+};
/*!
* Digital interface specific
diff --git a/host/lib/usrp/common/ad9361_driver/ad9361_device.cpp b/host/lib/usrp/common/ad9361_driver/ad9361_device.cpp
index e2ed2c77d..da0ab0b9a 100644
--- a/host/lib/usrp/common/ad9361_driver/ad9361_device.cpp
+++ b/host/lib/usrp/common/ad9361_driver/ad9361_device.cpp
@@ -24,7 +24,7 @@
#include <cmath>
#include <uhd/exception.hpp>
#include <uhd/utils/log.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <stdint.h>
#include <boost/date_time/posix_time/posix_time.hpp>
#include <boost/thread/thread.hpp>
@@ -321,7 +321,7 @@ double ad9361_device_t::_calibrate_baseband_rx_analog_filter(double req_rfbw)
double bbbw = req_rfbw / 2.0;
if(bbbw > _baseband_bw / 2.0)
{
- UHD_LOG << "baseband bandwidth too large for current sample rate. Setting bandwidth to: "<<_baseband_bw;
+ UHD_LOGGER_DEBUG("AD936X")<< "baseband bandwidth too large for current sample rate. Setting bandwidth to: "<<_baseband_bw;
bbbw = _baseband_bw / 2.0;
}
@@ -388,7 +388,7 @@ double ad9361_device_t::_calibrate_baseband_tx_analog_filter(double req_rfbw)
if(bbbw > _baseband_bw / 2.0)
{
- UHD_LOG << "baseband bandwidth too large for current sample rate. Setting bandwidth to: "<<_baseband_bw;
+ UHD_LOGGER_DEBUG("AD936X")<< "baseband bandwidth too large for current sample rate. Setting bandwidth to: "<<_baseband_bw;
bbbw = _baseband_bw / 2.0;
}
@@ -443,7 +443,7 @@ double ad9361_device_t::_calibrate_secondary_tx_filter(double req_rfbw)
if(bbbw > _baseband_bw / 2.0)
{
- UHD_LOG << "baseband bandwidth too large for current sample rate. Setting bandwidth to: "<<_baseband_bw;
+ UHD_LOGGER_DEBUG("AD936X")<< "baseband bandwidth too large for current sample rate. Setting bandwidth to: "<<_baseband_bw;
bbbw = _baseband_bw / 2.0;
}
@@ -539,7 +539,7 @@ double ad9361_device_t::_calibrate_rx_TIAs(double req_rfbw)
if(bbbw > _baseband_bw / 2.0)
{
- UHD_LOG << "baseband bandwidth too large for current sample rate. Setting bandwidth to: "<<_baseband_bw;
+ UHD_LOGGER_DEBUG("AD936X")<< "baseband bandwidth too large for current sample rate. Setting bandwidth to: "<<_baseband_bw;
bbbw = _baseband_bw / 2.0;
}
@@ -1169,7 +1169,7 @@ void ad9361_device_t::_setup_synth(direction_t direction, double vcorate)
* fed to the public set_clock_rate function. */
double ad9361_device_t::_tune_bbvco(const double rate)
{
- UHD_LOG << boost::format("[ad9361_device_t::_tune_bbvco] rate=%.10f\n") % rate;
+ UHD_LOGGER_TRACE("AD936X")<< boost::format("[ad9361_device_t::_tune_bbvco] rate=%.10f\n") % rate;
/* Let's not re-tune to the same frequency over and over... */
if (freq_is_nearly_equal(rate, _req_coreclk)) {
@@ -1197,13 +1197,13 @@ double ad9361_device_t::_tune_bbvco(const double rate)
if (i == 7)
throw uhd::runtime_error("[ad9361_device_t] _tune_bbvco: wrong vcorate");
- UHD_LOG << boost::format("[ad9361_device_t::_tune_bbvco] vcodiv=%d vcorate=%.10f\n") % vcodiv % vcorate;
+ UHD_LOGGER_TRACE("AD936X")<< boost::format("[ad9361_device_t::_tune_bbvco] vcodiv=%d vcorate=%.10f\n") % vcodiv % vcorate;
/* Fo = Fref * (Nint + Nfrac / mod) */
int nint = static_cast<int>(vcorate / fref);
- UHD_LOG << boost::format("[ad9361_device_t::_tune_bbvco] (nint)=%.10f\n") % (vcorate / fref);
+ UHD_LOGGER_TRACE("AD936X")<< boost::format("[ad9361_device_t::_tune_bbvco] (nint)=%.10f\n") % (vcorate / fref);
int nfrac = static_cast<int>(boost::math::round(((vcorate / fref) - (double) nint) * (double) modulus));
- UHD_LOG << boost::format("[ad9361_device_t::_tune_bbvco] (nfrac)=%.10f\n") % (((vcorate / fref) - (double) nint) * (double) modulus);
- UHD_LOG << boost::format("[ad9361_device_t::_tune_bbvco] nint=%d nfrac=%d\n") % nint % nfrac;
+ UHD_LOGGER_TRACE("AD936X")<< boost::format("[ad9361_device_t::_tune_bbvco] (nfrac)=%.10f\n") % (((vcorate / fref) - (double) nint) * (double) modulus);
+ UHD_LOGGER_TRACE("AD936X")<< boost::format("[ad9361_device_t::_tune_bbvco] nint=%d nfrac=%d\n") % nint % nfrac;
double actual_vcorate = fref
* ((double) nint + ((double) nfrac / (double) modulus));
@@ -1381,7 +1381,7 @@ double ad9361_device_t::_setup_rates(const double rate)
/* If we make it into this function, then we are tuning to a new rate.
* Store the new rate. */
_req_clock_rate = rate;
- UHD_LOG << boost::format("[ad9361_device_t::_setup_rates] rate=%.6d\n") % rate;
+ UHD_LOGGER_TRACE("AD936X")<< boost::format("[ad9361_device_t::_setup_rates] rate=%.6d\n") % rate;
/* Set the decimation and interpolation values in the RX and TX chains.
* This also switches filters in / out. Note that all transmitters and
@@ -1467,7 +1467,7 @@ double ad9361_device_t::_setup_rates(const double rate)
throw uhd::runtime_error("[ad9361_device_t] [_setup_rates] INVALID_CODE_PATH");
}
- UHD_LOG << boost::format("[ad9361_device_t::_setup_rates] divfactor=%d\n") % divfactor;
+ UHD_LOGGER_TRACE("AD936X")<< boost::format("[ad9361_device_t::_setup_rates] divfactor=%d\n") % divfactor;
/* Tune the BBPLL to get the ADC and DAC clocks. */
const double adcclk = _tune_bbvco(rate * divfactor);
@@ -1489,7 +1489,7 @@ double ad9361_device_t::_setup_rates(const double rate)
_io_iface->poke8(0x004, _regs.inputsel);
_io_iface->poke8(0x00A, _regs.bbpll);
- UHD_LOG << boost::format("[ad9361_device_t::_setup_rates] adcclk=%f\n") % adcclk;
+ UHD_LOGGER_TRACE("AD936X")<< boost::format("[ad9361_device_t::_setup_rates] adcclk=%f\n") % adcclk;
_baseband_bw = (adcclk / divfactor);
/*
@@ -1583,11 +1583,11 @@ void ad9361_device_t::initialize()
/* Enable clocks. */
switch (_client_params->get_clocking_mode()) {
- case AD9361_XTAL_N_CLK_PATH: {
+ case clocking_mode_t::AD9361_XTAL_N_CLK_PATH: {
_io_iface->poke8(0x009, 0x17);
} break;
- case AD9361_XTAL_P_CLK_PATH: {
+ case clocking_mode_t::AD9361_XTAL_P_CLK_PATH: {
_io_iface->poke8(0x009, 0x07);
_io_iface->poke8(0x292, 0x08);
_io_iface->poke8(0x293, 0x80);
@@ -1798,7 +1798,7 @@ double ad9361_device_t::set_clock_rate(const double req_rate)
throw uhd::runtime_error("[ad9361_device_t] Requested master clock rate outside range");
}
- UHD_LOG << boost::format("[ad9361_device_t::set_clock_rate] req_rate=%.10f\n") % req_rate;
+ UHD_LOGGER_TRACE("AD936X")<< boost::format("[ad9361_device_t::set_clock_rate] req_rate=%.10f\n") % req_rate;
/* UHD has a habit of requesting the same rate like four times when it
* starts up. This prevents that, and any bugs in user code that request
@@ -1840,7 +1840,7 @@ double ad9361_device_t::set_clock_rate(const double req_rate)
* all the hard work gets done. */
double rate = _setup_rates(req_rate);
- UHD_LOG << boost::format("[ad9361_device_t::set_clock_rate] rate=%.10f\n") % rate;
+ UHD_LOGGER_TRACE("AD936X")<< boost::format("[ad9361_device_t::set_clock_rate] rate=%.10f\n") % rate;
/* Transition to the ALERT state and calibrate everything. */
_io_iface->poke8(0x015, 0x04); //dual synth mode, synth en ctrl en
diff --git a/host/lib/usrp/common/ad936x_manager.cpp b/host/lib/usrp/common/ad936x_manager.cpp
index 2b6d69c15..503678554 100644
--- a/host/lib/usrp/common/ad936x_manager.cpp
+++ b/host/lib/usrp/common/ad936x_manager.cpp
@@ -16,8 +16,7 @@
//
#include "ad936x_manager.hpp"
-#include <uhd/utils/msg.hpp>
-#include <boost/foreach.hpp>
+#include <uhd/utils/log.hpp>
#include <boost/functional/hash.hpp>
#include <boost/thread/thread.hpp>
@@ -66,7 +65,7 @@ class ad936x_manager_impl : public ad936x_manager
***********************************************************************/
void init_codec()
{
- BOOST_FOREACH(const std::string &rx_fe, _rx_frontends) {
+ for(const std::string &rx_fe: _rx_frontends) {
_codec_ctrl->set_gain(rx_fe, DEFAULT_GAIN);
_codec_ctrl->set_bw_filter(rx_fe, DEFAULT_BANDWIDTH);
_codec_ctrl->tune(rx_fe, DEFAULT_FREQ);
@@ -74,7 +73,7 @@ class ad936x_manager_impl : public ad936x_manager
_codec_ctrl->set_iq_balance_auto(rx_fe, DEFAULT_AUTO_IQ_BALANCE);
_codec_ctrl->set_agc(rx_fe, DEFAULT_AGC_ENABLE);
}
- BOOST_FOREACH(const std::string &tx_fe, _tx_frontends) {
+ for(const std::string &tx_fe: _tx_frontends) {
_codec_ctrl->set_gain(tx_fe, DEFAULT_GAIN);
_codec_ctrl->set_bw_filter(tx_fe, DEFAULT_BANDWIDTH);
_codec_ctrl->tune(tx_fe, DEFAULT_FREQ);
@@ -98,7 +97,7 @@ class ad936x_manager_impl : public ad936x_manager
) {
// Put AD936x in loopback mode
_codec_ctrl->data_port_loopback(true);
- UHD_MSG(status) << "Performing CODEC loopback test... " << std::flush;
+ UHD_LOGGER_INFO("AD936X") << "Performing CODEC loopback test... ";
size_t hash = size_t(time(NULL));
// Allow some time for AD936x to enter loopback mode.
@@ -127,11 +126,11 @@ class ad936x_manager_impl : public ad936x_manager
bool test_fail = word32 != rb_tx or word32 != rb_rx;
if(test_fail)
{
- UHD_MSG(status) << "fail" << std::endl;
+ UHD_LOGGER_INFO("AD936X") << "CODEC loopback test failed";
throw uhd::runtime_error("CODEC loopback test failed.");
}
}
- UHD_MSG(status) << "pass" << std::endl;
+ UHD_LOGGER_INFO("AD936X") << "CODEC loopback test passed";
// Zero out the idle data.
poker_functor(0);
@@ -194,10 +193,10 @@ class ad936x_manager_impl : public ad936x_manager
bool check_bandwidth(double rate, const std::string dir)
{
if (rate > _codec_ctrl->get_bw_filter_range(dir).stop()) {
- UHD_MSG(warning)
+ UHD_LOGGER_WARNING("AD936X")
<< "Selected " << dir << " bandwidth (" << (rate/1e6) << " MHz) exceeds\n"
<< "analog frontend filter bandwidth (" << (_codec_ctrl->get_bw_filter_range(dir).stop()/1e6) << " MHz)."
- << std::endl;
+ ;
return false;
}
return true;
@@ -218,7 +217,7 @@ class ad936x_manager_impl : public ad936x_manager
}
// Gains
- BOOST_FOREACH(const std::string &name, ad9361_ctrl::get_gain_names(key))
+ for(const std::string &name: ad9361_ctrl::get_gain_names(key))
{
subtree->create<meta_range_t>(uhd::fs_path("gains") / name / "range")
.set(ad9361_ctrl::get_gain_range(key));
@@ -278,7 +277,7 @@ class ad936x_manager_impl : public ad936x_manager
}
// Frontend filters
- BOOST_FOREACH(const std::string &filter_name, _codec_ctrl->get_filter_names(key)) {
+ for(const std::string &filter_name: _codec_ctrl->get_filter_names(key)) {
subtree->create<filter_info_base::sptr>(uhd::fs_path("filters") / filter_name / "value" )
.set_publisher(boost::bind(&ad9361_ctrl::get_filter, _codec_ctrl, key, filter_name))
.add_coerced_subscriber(boost::bind(&ad9361_ctrl::set_filter, _codec_ctrl, key, filter_name, _1));
diff --git a/host/lib/usrp/common/ad936x_manager.hpp b/host/lib/usrp/common/ad936x_manager.hpp
index c456715e3..99464d0af 100644
--- a/host/lib/usrp/common/ad936x_manager.hpp
+++ b/host/lib/usrp/common/ad936x_manager.hpp
@@ -22,6 +22,7 @@
#include <uhd/utils/math.hpp>
#include <uhd/property_tree.hpp>
#include <uhd/types/direction.hpp>
+#include <boost/format.hpp>
#include <boost/shared_ptr.hpp>
#include "ad9361_ctrl.hpp"
#include <stdint.h>
diff --git a/host/lib/usrp/common/adf4001_ctrl.cpp b/host/lib/usrp/common/adf4001_ctrl.cpp
index 01a35dbec..b824824a4 100644
--- a/host/lib/usrp/common/adf4001_ctrl.cpp
+++ b/host/lib/usrp/common/adf4001_ctrl.cpp
@@ -22,7 +22,7 @@
#include "adf4001_ctrl.hpp"
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <iostream>
#include <iomanip>
diff --git a/host/lib/usrp/common/adf435x.hpp b/host/lib/usrp/common/adf435x.hpp
index ff7b1a2f4..18d5b70ba 100644
--- a/host/lib/usrp/common/adf435x.hpp
+++ b/host/lib/usrp/common/adf435x.hpp
@@ -268,14 +268,15 @@ public:
adf435x_regs_t::LDF_FRAC_N;
std::string tuning_str = (int_n_mode) ? "Integer-N" : "Fractional";
- UHD_LOGV(often)
- << boost::format("ADF 435X Frequencies (MHz): REQUESTED=%0.9f, ACTUAL=%0.9f"
- ) % (target_freq/1e6) % (actual_freq/1e6) << std::endl
- << boost::format("ADF 435X Intermediates (MHz): Feedback=%0.2f, VCO=%0.2f, PFD=%0.2f, BAND=%0.2f, REF=%0.2f"
- ) % (feedback_freq/1e6) % (vco_freq/1e6) % (pfd_freq/1e6) % (pfd_freq/BS/1e6) % (_reference_freq/1e6) << std::endl
- << boost::format("ADF 435X Tuning: %s") % tuning_str.c_str() << std::endl
- << boost::format("ADF 435X Settings: R=%d, BS=%d, N=%d, FRAC=%d, MOD=%d, T=%d, D=%d, RFdiv=%d"
- ) % R % BS % N % FRAC % MOD % T % D % RFdiv << std::endl;
+ UHD_LOGGER_TRACE("ADF435X")
+ << boost::format("ADF 435X Frequencies (MHz): REQUESTED=%0.9f, ACTUAL=%0.9f")
+ % (target_freq/1e6) % (actual_freq/1e6)
+ << boost::format("ADF 435X Intermediates (MHz): Feedback=%0.2f, VCO=%0.2f, PFD=%0.2f, BAND=%0.2f, REF=%0.2f")
+ % (feedback_freq/1e6) % (vco_freq/1e6) % (pfd_freq/1e6) % (pfd_freq/BS/1e6) % (_reference_freq/1e6)
+ << boost::format("ADF 435X Tuning: %s") % tuning_str.c_str()
+ << boost::format("ADF 435X Settings: R=%d, BS=%d, N=%d, FRAC=%d, MOD=%d, T=%d, D=%d, RFdiv=%d")
+ % R % BS % N % FRAC % MOD % T % D % RFdiv
+ ;
UHD_ASSERT_THROW((_regs.frac_12_bit & ((uint16_t)~0xFFF)) == 0);
UHD_ASSERT_THROW((_regs.mod_12_bit & ((uint16_t)~0xFFF)) == 0);
diff --git a/host/lib/usrp/common/apply_corrections.cpp b/host/lib/usrp/common/apply_corrections.cpp
index 272e0e093..32c7e3c45 100644
--- a/host/lib/usrp/common/apply_corrections.cpp
+++ b/host/lib/usrp/common/apply_corrections.cpp
@@ -18,11 +18,10 @@
#include "apply_corrections.hpp"
#include <uhd/usrp/dboard_eeprom.hpp>
#include <uhd/utils/paths.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/csv.hpp>
#include <uhd/types/dict.hpp>
#include <boost/filesystem.hpp>
-#include <boost/foreach.hpp>
#include <boost/thread/mutex.hpp>
#include <cstdio>
#include <complex>
@@ -114,7 +113,7 @@ static void apply_fe_corrections(
bool read_data = false, skip_next = false;;
std::vector<fe_cal_t> datas;
- BOOST_FOREACH(const uhd::csv::row_type &row, rows){
+ for(const uhd::csv::row_type &row: rows){
if (not read_data and not row.empty() and row[0] == "DATA STARTS HERE"){
read_data = true;
skip_next = true;
@@ -133,7 +132,7 @@ static void apply_fe_corrections(
}
std::sort(datas.begin(), datas.end(), fe_cal_comp);
fe_cal_cache[cal_data_path.string()] = datas;
- UHD_MSG(status) << "Loaded " << cal_data_path.string() << std::endl;
+ UHD_LOGGER_INFO("CAL") << "Calibration data loaded: " << cal_data_path.string();
}
@@ -168,7 +167,7 @@ void uhd::usrp::apply_tx_fe_corrections( //overloading to work according to rfno
);
}
catch(const std::exception &e){
- UHD_MSG(error) << "Failure in apply_tx_fe_corrections: " << e.what() << std::endl;
+ UHD_LOGGER_ERROR("CAL") << "Failure in apply_tx_fe_corrections: " << e.what();
}
}
@@ -195,7 +194,7 @@ void uhd::usrp::apply_tx_fe_corrections(
);
}
catch(const std::exception &e){
- UHD_MSG(error) << "Failure in apply_tx_fe_corrections: " << e.what() << std::endl;
+ UHD_LOGGER_ERROR("CAL") << "Failure in apply_tx_fe_corrections: " << e.what();
}
}
@@ -216,7 +215,7 @@ void uhd::usrp::apply_rx_fe_corrections( //overloading to work according to rfno
);
}
catch(const std::exception &e){
- UHD_MSG(error) << "Failure in apply_tx_fe_corrections: " << e.what() << std::endl;
+ UHD_LOGGER_ERROR("CAL") << "Failure in apply_tx_fe_corrections: " << e.what();
}
}
@@ -236,6 +235,6 @@ void uhd::usrp::apply_rx_fe_corrections(
);
}
catch(const std::exception &e){
- UHD_MSG(error) << "Failure in apply_rx_fe_corrections: " << e.what() << std::endl;
+ UHD_LOGGER_ERROR("CAL") << "Failure in apply_rx_fe_corrections: " << e.what();
}
}
diff --git a/host/lib/usrp/common/async_packet_handler.hpp b/host/lib/usrp/common/async_packet_handler.hpp
index 4b162677b..01594be0b 100644
--- a/host/lib/usrp/common/async_packet_handler.hpp
+++ b/host/lib/usrp/common/async_packet_handler.hpp
@@ -22,7 +22,7 @@
#include <uhd/transport/vrt_if_packet.hpp>
#include <uhd/types/metadata.hpp>
#include <uhd/utils/byteswap.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
namespace uhd{ namespace usrp{
@@ -59,14 +59,23 @@ namespace uhd{ namespace usrp{
if (metadata.event_code &
( async_metadata_t::EVENT_CODE_UNDERFLOW
| async_metadata_t::EVENT_CODE_UNDERFLOW_IN_PACKET)
- ) UHD_MSG(fastpath) << "U";
+ )
+ {
+ UHD_LOG_FASTPATH("U")
+ }
else if (metadata.event_code &
( async_metadata_t::EVENT_CODE_SEQ_ERROR
| async_metadata_t::EVENT_CODE_SEQ_ERROR_IN_BURST)
- ) UHD_MSG(fastpath) << "S";
+ )
+ {
+ UHD_LOG_FASTPATH("S")
+ }
else if (metadata.event_code &
async_metadata_t::EVENT_CODE_TIME_ERROR
- ) UHD_MSG(fastpath) << "L";
+ )
+ {
+ UHD_LOG_FASTPATH("L")
+ }
}
diff --git a/host/lib/usrp/common/constrained_device_args.hpp b/host/lib/usrp/common/constrained_device_args.hpp
index 1bfd1df00..47c5f4cc0 100644
--- a/host/lib/usrp/common/constrained_device_args.hpp
+++ b/host/lib/usrp/common/constrained_device_args.hpp
@@ -260,7 +260,7 @@ namespace usrp {
template<typename arg_t, typename data_t>
static inline void _enforce_discrete(const arg_t& arg, const std::vector<data_t>& valid_values) {
bool match = false;
- BOOST_FOREACH(const data_t& val, valid_values) {
+ for(const data_t& val: valid_values) {
if (val == arg.get()) {
match = true;
break;
diff --git a/host/lib/usrp/common/fifo_ctrl_excelsior.cpp b/host/lib/usrp/common/fifo_ctrl_excelsior.cpp
index a9995a161..c86380354 100644
--- a/host/lib/usrp/common/fifo_ctrl_excelsior.cpp
+++ b/host/lib/usrp/common/fifo_ctrl_excelsior.cpp
@@ -18,7 +18,7 @@
#include "fifo_ctrl_excelsior.hpp"
#include "async_packet_handler.hpp"
#include <uhd/exception.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/byteswap.hpp>
#include <uhd/utils/tasks.hpp>
#include <uhd/utils/safe_call.hpp>
@@ -97,7 +97,7 @@ public:
vrt::if_hdr_unpack_le(pkt, packet_info);
}
catch(const std::exception &ex){
- UHD_MSG(error) << "FIFO ctrl bad VITA packet: " << ex.what() << std::endl;
+ UHD_LOGGER_ERROR("UHD") << "FIFO ctrl bad VITA packet: " << ex.what();
}
if (packet_info.has_sid and packet_info.sid == _config.ctrl_sid_base){
ctrl_result_t res = ctrl_result_t();
@@ -112,7 +112,7 @@ public:
standard_async_msg_prints(metadata);
}
else{
- UHD_MSG(error) << "FIFO ctrl got unknown SID: " << packet_info.sid << std::endl;
+ UHD_LOGGER_ERROR("UHD") << "FIFO ctrl got unknown SID: " << packet_info.sid ;
}
}
diff --git a/host/lib/usrp/common/fx2_ctrl.cpp b/host/lib/usrp/common/fx2_ctrl.cpp
index c69223747..9fa774851 100644
--- a/host/lib/usrp/common/fx2_ctrl.cpp
+++ b/host/lib/usrp/common/fx2_ctrl.cpp
@@ -16,7 +16,7 @@
//
#include "fx2_ctrl.hpp"
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/exception.hpp>
#include <uhd/transport/usb_control.hpp>
#include <boost/functional/hash.hpp>
@@ -177,7 +177,7 @@ public:
unsigned char reset_n = 0;
//hit the reset line
- if (load_img_msg) UHD_MSG(status) << "Loading firmware image: " << filestring << "..." << std::flush;
+ if (load_img_msg) UHD_LOGGER_INFO("FX2") << "Loading firmware image: " << filestring << "...";
usrp_control_write(FX2_FIRMWARE_LOAD, 0xe600, 0, &reset_y, 1);
while (!file.eof()) {
@@ -205,7 +205,7 @@ public:
//wait for things to settle
boost::this_thread::sleep(boost::posix_time::milliseconds(1000));
- if (load_img_msg) UHD_MSG(status) << " done" << std::endl;
+ if (load_img_msg) UHD_LOGGER_INFO("FX2") << "Firmware loaded";
return;
}
//type anything else is unhandled
@@ -242,7 +242,7 @@ public:
const int ep0_size = 64;
unsigned char buf[ep0_size];
- if (load_img_msg) UHD_MSG(status) << "Loading FPGA image: " << filestring << "..." << std::flush;
+ if (load_img_msg) UHD_LOGGER_INFO("FX2") << "Loading FPGA image: " << filestring << "...";
std::ifstream file;
file.open(filename, std::ios::in | std::ios::binary);
if (not file.good()) {
@@ -274,12 +274,12 @@ public:
usrp_fpga_reset(false); //done loading, take fpga out of reset
file.close();
- if (load_img_msg) UHD_MSG(status) << " done" << std::endl;
+ if (load_img_msg) UHD_LOGGER_INFO("FX2") << "FPGA image loaded";
}
void usrp_load_eeprom(std::string filestring)
{
- if (load_img_msg) UHD_MSG(status) << "Loading EEPROM image: " << filestring << "..." << std::flush;
+ if (load_img_msg) UHD_LOGGER_INFO("FX2") << "Loading EEPROM image: " << filestring << "...";
const char *filename = filestring.c_str();
const uint16_t i2c_addr = 0x50;
@@ -315,7 +315,7 @@ public:
boost::this_thread::sleep(boost::posix_time::milliseconds(100));
}
file.close();
- if (load_img_msg) UHD_MSG(status) << " done" << std::endl;
+ if (load_img_msg) UHD_LOGGER_INFO("FX2") << "EEPROM image loaded";
}
diff --git a/host/lib/usrp/common/max287x.hpp b/host/lib/usrp/common/max287x.hpp
index 839ed77bc..c3c4bf18c 100644
--- a/host/lib/usrp/common/max287x.hpp
+++ b/host/lib/usrp/common/max287x.hpp
@@ -403,7 +403,7 @@ public:
while (vco_freq < MIN_VCO_FREQ)
vco_freq *=2;
uint8_t vco_index = 0xFF;
- BOOST_FOREACH(const vco_map_t::value_type &vco, max2871_vco_map)
+ for(const vco_map_t::value_type &vco: max2871_vco_map)
{
if (uhd::math::fp_compare::fp_compare_epsilon<double>(vco_freq) < vco.second.stop())
{
@@ -638,13 +638,14 @@ double max287x<max287x_regs_t>::set_frequency(
//actual frequency calculation
double actual_freq = double((N + (double(FRAC)/double(MOD)))*ref_freq*(1+int(D))/(R*(1+int(T)))) * fb_divisor / RFdiv;
- UHD_LOGV(rarely)
- << boost::format("MAX287x: Intermediates: ref=%0.2f, outdiv=%f, fbdiv=%f"
- ) % ref_freq % double(RFdiv*2) % double(N + double(FRAC)/double(MOD)) << std::endl
- << boost::format("MAX287x: tune: R=%d, BS=%d, N=%d, FRAC=%d, MOD=%d, T=%d, D=%d, RFdiv=%d, type=%s"
- ) % R % BS % N % FRAC % MOD % T % D % RFdiv % ((is_int_n) ? "Integer-N" : "Fractional") << std::endl
- << boost::format("MAX287x: Frequencies (MHz): REQ=%0.2f, ACT=%0.2f, VCO=%0.2f, PFD=%0.2f, BAND=%0.2f"
- ) % (target_freq/1e6) % (actual_freq/1e6) % (vco_freq/1e6) % (pfd_freq/1e6) % (pfd_freq/BS/1e6) << std::endl;
+ UHD_LOGGER_TRACE("MAX287X")
+ << boost::format("MAX287x: Intermediates: ref=%0.2f, outdiv=%f, fbdiv=%f")
+ % ref_freq % double(RFdiv*2) % double(N + double(FRAC)/double(MOD))
+ << boost::format("MAX287x: tune: R=%d, BS=%d, N=%d, FRAC=%d, MOD=%d, T=%d, D=%d, RFdiv=%d, type=%s")
+ % R % BS % N % FRAC % MOD % T % D % RFdiv % ((is_int_n) ? "Integer-N" : "Fractional")
+ << boost::format("MAX287x: Frequencies (MHz): REQ=%0.2f, ACT=%0.2f, VCO=%0.2f, PFD=%0.2f, BAND=%0.2f")
+ % (target_freq/1e6) % (actual_freq/1e6) % (vco_freq/1e6) % (pfd_freq/1e6) % (pfd_freq/BS/1e6)
+ ;
//load the register values
_regs.rf_output_enable = max287x_regs_t::RF_OUTPUT_ENABLE_ENABLED;
diff --git a/host/lib/usrp/common/recv_packet_demuxer.cpp b/host/lib/usrp/common/recv_packet_demuxer.cpp
index 8d9dcee9e..2a4c4d705 100644
--- a/host/lib/usrp/common/recv_packet_demuxer.cpp
+++ b/host/lib/usrp/common/recv_packet_demuxer.cpp
@@ -16,7 +16,7 @@
//
#include "recv_packet_demuxer.hpp"
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/byteswap.hpp>
#include <boost/thread/mutex.hpp>
#include <uhd/transport/vrt_if_packet.hpp>
@@ -87,7 +87,7 @@ public:
if (rx_index < _queues.size()) _queues[rx_index].wrapper.push(buff);
else
{
- UHD_MSG(error) << "Got a data packet with unknown SID " << extract_sid(buff) << std::endl;
+ UHD_LOGGER_ERROR("STREAMER") << "Got a data packet with unknown SID " << extract_sid(buff) ;
recv_pkt_demux_mrb *mrb = new recv_pkt_demux_mrb();
vrt::if_packet_info_t info;
info.packet_type = vrt::if_packet_info_t::PACKET_TYPE_DATA;
diff --git a/host/lib/usrp/common/recv_packet_demuxer_3000.hpp b/host/lib/usrp/common/recv_packet_demuxer_3000.hpp
index 3ad76f1a0..5305c8ddd 100644
--- a/host/lib/usrp/common/recv_packet_demuxer_3000.hpp
+++ b/host/lib/usrp/common/recv_packet_demuxer_3000.hpp
@@ -1,5 +1,5 @@
//
-// Copyright 2013 Ettus Research LLC
+// Copyright 2013,2017 Ettus Research LLC
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
@@ -20,15 +20,14 @@
#include <uhd/config.hpp>
#include <uhd/transport/zero_copy.hpp>
-#include <stdint.h>
-#include <boost/thread.hpp>
-#include <uhd/utils/msg.hpp>
-#include <uhd/utils/atomic.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/types/time_spec.hpp>
#include <uhd/utils/byteswap.hpp>
+#include <boost/thread.hpp>
+#include <boost/enable_shared_from_this.hpp>
#include <queue>
#include <map>
-#include <boost/enable_shared_from_this.hpp>
+#include <stdint.h>
namespace uhd{ namespace usrp{
@@ -77,24 +76,6 @@ namespace uhd{ namespace usrp{
return buff;
}
}
- // Following is disabled by default as super_recv_packet_handler (caller) is not thread safe
- // Only underlying transport (libusb1_zero_copy) is thread safe
- // The onus is on the caller to super_recv_packet_handler (and therefore this) to serialise access
-#ifdef RECV_PACKET_DEMUXER_3000_THREAD_SAFE
- //----------------------------------------------------------
- //-- Try to claim the transport or wait patiently
- //----------------------------------------------------------
- if (_claimed.cas(1, 0))
- {
- boost::mutex::scoped_lock l(mutex);
- cond.timed_wait(l, boost::posix_time::microseconds(long(timeout*1e6)));
- }
-
- //----------------------------------------------------------
- //-- Wait on the transport for input buffers
- //----------------------------------------------------------
- else
-#endif // RECV_PACKET_DEMUXER_3000_THREAD_SAFE
{
buff = _xport->get_recv_buff(timeout);
if (buff)
@@ -103,17 +84,13 @@ namespace uhd{ namespace usrp{
if (new_sid != sid)
{
boost::mutex::scoped_lock l(mutex);
- if (_queues.count(new_sid) == 0) UHD_MSG(error)
+ if (_queues.count(new_sid) == 0) UHD_LOGGER_ERROR("STREAMER")
<< "recv packet demuxer unexpected sid 0x" << std::hex << new_sid << std::dec
- << std::endl;
+ ;
else _queues[new_sid].push(buff);
buff.reset();
}
}
-#ifdef RECV_PACKET_DEMUXER_3000_THREAD_SAFE
- _claimed.write(0);
- cond.notify_all();
-#endif // RECV_PACKET_DEMUXER_3000_THREAD_SAFE
}
return buff;
}
@@ -132,10 +109,6 @@ namespace uhd{ namespace usrp{
typedef std::queue<transport::managed_recv_buffer::sptr> queue_type_t;
std::map<uint32_t, queue_type_t> _queues;
transport::zero_copy_if::sptr _xport;
-#ifdef RECV_PACKET_DEMUXER_3000_THREAD_SAFE
- uhd::atomic_uint32_t _claimed;
- boost::condition_variable cond;
-#endif // RECV_PACKET_DEMUXER_3000_THREAD_SAFE
boost::mutex mutex;
};
diff --git a/host/lib/usrp/common/validate_subdev_spec.cpp b/host/lib/usrp/common/validate_subdev_spec.cpp
index fab40b204..76e61221e 100644
--- a/host/lib/usrp/common/validate_subdev_spec.cpp
+++ b/host/lib/usrp/common/validate_subdev_spec.cpp
@@ -18,7 +18,6 @@
#include "validate_subdev_spec.hpp"
#include <uhd/exception.hpp>
#include <uhd/utils/assert_has.hpp>
-#include <boost/foreach.hpp>
#include <boost/format.hpp>
using namespace uhd;
@@ -52,19 +51,19 @@ void uhd::usrp::validate_subdev_spec(
//make a list of all possible specs
subdev_spec_t all_specs;
- BOOST_FOREACH(const std::string &db, tree->list(str(boost::format("/mboards/%s/dboards") % mb))){
- BOOST_FOREACH(const std::string &sd, tree->list(str(boost::format("/mboards/%s/dboards/%s/%s_frontends") % mb % db % type))){
+ for(const std::string &db: tree->list(str(boost::format("/mboards/%s/dboards") % mb))){
+ for(const std::string &sd: tree->list(str(boost::format("/mboards/%s/dboards/%s/%s_frontends") % mb % db % type))){
all_specs.push_back(subdev_spec_pair_t(db, sd));
}
}
//validate that the spec is possible
- BOOST_FOREACH(const subdev_spec_pair_t &pair, spec){
+ for(const subdev_spec_pair_t &pair: spec){
uhd::assert_has(all_specs, pair, str(boost::format("%s subdevice specification on mboard %s") % type % mb));
}
//enable selected frontends, disable others
- BOOST_FOREACH(const subdev_spec_pair_t &pair, all_specs){
+ for(const subdev_spec_pair_t &pair: all_specs){
const bool enb = uhd::has(spec, pair);
tree->access<bool>(str(boost::format(
"/mboards/%s/dboards/%s/%s_frontends/%s/enabled"
diff --git a/host/lib/usrp/cores/dma_fifo_core_3000.cpp b/host/lib/usrp/cores/dma_fifo_core_3000.cpp
index e1a841b96..18c79f4c5 100644
--- a/host/lib/usrp/cores/dma_fifo_core_3000.cpp
+++ b/host/lib/usrp/cores/dma_fifo_core_3000.cpp
@@ -19,7 +19,7 @@
#include <uhd/exception.hpp>
#include <boost/thread/thread.hpp> //sleep
#include <uhd/utils/soft_register.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
using namespace uhd;
diff --git a/host/lib/usrp/cores/i2c_core_100.cpp b/host/lib/usrp/cores/i2c_core_100.cpp
index 029b6eaa7..e68dc2ea6 100644
--- a/host/lib/usrp/cores/i2c_core_100.cpp
+++ b/host/lib/usrp/cores/i2c_core_100.cpp
@@ -17,7 +17,7 @@
#include "i2c_core_100.hpp"
#include <uhd/exception.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <boost/thread/thread.hpp> //sleep
#define REG_I2C_PRESCALER_LO _base + 0
@@ -127,7 +127,7 @@ private:
if ((_iface->peek16(REG_I2C_CMD_STATUS) & I2C_ST_TIP) == 0) return;
boost::this_thread::sleep(boost::posix_time::milliseconds(1));
}
- UHD_MSG(error) << "i2c_core_100: i2c_wait timeout" << std::endl;
+ UHD_LOGGER_ERROR("CORES") << "i2c_core_100: i2c_wait timeout" ;
}
bool wait_chk_ack(void){
diff --git a/host/lib/usrp/cores/i2c_core_100_wb32.cpp b/host/lib/usrp/cores/i2c_core_100_wb32.cpp
index 099b80447..8e03a8c3c 100644
--- a/host/lib/usrp/cores/i2c_core_100_wb32.cpp
+++ b/host/lib/usrp/cores/i2c_core_100_wb32.cpp
@@ -17,7 +17,7 @@
#include "i2c_core_100_wb32.hpp"
#include <uhd/exception.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <boost/thread/thread.hpp> //sleep
#define REG_I2C_PRESCALER_LO _base + 0
@@ -138,7 +138,7 @@ private:
if ((_iface->peek32(REG_I2C_CMD_STATUS) & I2C_ST_TIP) == 0) return;
boost::this_thread::sleep(boost::posix_time::milliseconds(1));
}
- UHD_MSG(error) << "i2c_core_100_wb32: i2c_wait timeout" << std::endl;
+ UHD_LOGGER_ERROR("CORES") << "i2c_core_100_wb32: i2c_wait timeout" ;
}
bool wait_chk_ack(void){
diff --git a/host/lib/usrp/cores/i2c_core_200.cpp b/host/lib/usrp/cores/i2c_core_200.cpp
index eae91253c..7b28573e9 100644
--- a/host/lib/usrp/cores/i2c_core_200.cpp
+++ b/host/lib/usrp/cores/i2c_core_200.cpp
@@ -17,7 +17,7 @@
#include "i2c_core_200.hpp"
#include <uhd/exception.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <boost/thread/thread.hpp> //sleep
#include <boost/thread/mutex.hpp>
@@ -130,7 +130,7 @@ private:
if ((this->peek(REG_I2C_RD_ST) & I2C_ST_TIP) == 0) return;
boost::this_thread::sleep(boost::posix_time::milliseconds(1));
}
- UHD_MSG(error) << "i2c_core_200: i2c_wait timeout" << std::endl;
+ UHD_LOGGER_ERROR("CORES") << "i2c_core_200: i2c_wait timeout" ;
}
bool wait_chk_ack(void){
diff --git a/host/lib/usrp/cores/radio_ctrl_core_3000.cpp b/host/lib/usrp/cores/radio_ctrl_core_3000.cpp
index 2e405d735..512a860e0 100644
--- a/host/lib/usrp/cores/radio_ctrl_core_3000.cpp
+++ b/host/lib/usrp/cores/radio_ctrl_core_3000.cpp
@@ -18,7 +18,7 @@
#include "radio_ctrl_core_3000.hpp"
#include "async_packet_handler.hpp"
#include <uhd/exception.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/byteswap.hpp>
#include <uhd/utils/safe_call.hpp>
#include <uhd/transport/bounded_buffer.hpp>
@@ -165,7 +165,7 @@ private:
//load payload
pkt[packet_info.num_header_words32+0] = (_bige)? uhd::htonx(addr) : uhd::htowx(addr);
pkt[packet_info.num_header_words32+1] = (_bige)? uhd::htonx(data) : uhd::htowx(data);
- //UHD_MSG(status) << boost::format("0x%08x, 0x%08x\n") % addr % data;
+ //UHD_LOGGER_INFO("radio_ctrl") << boost::format("0x%08x, 0x%08x\n") % addr % data;
//send the buffer over the interface
_outstanding_seqs.push(_seq_out);
buff->commit(sizeof(uint32_t)*(packet_info.num_packet_words32));
@@ -243,17 +243,17 @@ private:
}
catch(const std::exception &ex)
{
- UHD_MSG(error) << "Radio ctrl bad VITA packet: " << ex.what() << std::endl;
+ UHD_LOGGER_ERROR("radio_ctrl") << "Radio ctrl bad VITA packet: " << ex.what() ;
if (buff){
UHD_VAR(buff->size());
}
else{
- UHD_MSG(status) << "buff is NULL" << std::endl;
+ UHD_LOGGER_INFO("radio_ctrl") << "buff is NULL" ;
}
- UHD_MSG(status) << std::hex << pkt[0] << std::dec << std::endl;
- UHD_MSG(status) << std::hex << pkt[1] << std::dec << std::endl;
- UHD_MSG(status) << std::hex << pkt[2] << std::dec << std::endl;
- UHD_MSG(status) << std::hex << pkt[3] << std::dec << std::endl;
+ UHD_LOGGER_INFO("radio_ctrl") << std::hex << pkt[0] << std::dec ;
+ UHD_LOGGER_INFO("radio_ctrl") << std::hex << pkt[1] << std::dec ;
+ UHD_LOGGER_INFO("radio_ctrl") << std::hex << pkt[2] << std::dec ;
+ UHD_LOGGER_INFO("radio_ctrl") << std::hex << pkt[3] << std::dec ;
}
//check the buffer
diff --git a/host/lib/usrp/cores/rx_dsp_core_200.cpp b/host/lib/usrp/cores/rx_dsp_core_200.cpp
index e781cfc6d..465a3b913 100644
--- a/host/lib/usrp/cores/rx_dsp_core_200.cpp
+++ b/host/lib/usrp/cores/rx_dsp_core_200.cpp
@@ -20,7 +20,7 @@
#include <uhd/types/dict.hpp>
#include <uhd/exception.hpp>
#include <uhd/utils/math.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/safe_call.hpp>
#include <boost/assign/list_of.hpp>
#include <boost/thread/thread.hpp> //thread sleep
@@ -195,7 +195,7 @@ public:
if (decim > 1 and hb0 == 0 and hb1 == 0)
{
- UHD_MSG(warning) << boost::format(
+ UHD_LOGGER_WARNING("CORES") << boost::format(
"The requested decimation is odd; the user should expect CIC rolloff.\n"
"Select an even decimation to ensure that a halfband filter is enabled.\n"
"decimation = dsp_rate/samp_rate -> %d = (%f MHz)/(%f MHz)\n"
diff --git a/host/lib/usrp/cores/rx_dsp_core_3000.cpp b/host/lib/usrp/cores/rx_dsp_core_3000.cpp
index fdd73a7ac..45e4c6f49 100644
--- a/host/lib/usrp/cores/rx_dsp_core_3000.cpp
+++ b/host/lib/usrp/cores/rx_dsp_core_3000.cpp
@@ -20,7 +20,7 @@
#include <uhd/types/dict.hpp>
#include <uhd/exception.hpp>
#include <uhd/utils/math.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/safe_call.hpp>
#include <boost/assign/list_of.hpp>
#include <boost/thread/thread.hpp> //thread sleep
@@ -169,7 +169,7 @@ public:
_iface->poke32(REG_DSP_RX_DECIM, (hb0 << 9) /*small HB */ | (hb1 << 8) /*large HB*/ | (decim & 0xff));
if (decim > 1 and hb0 == 0 and hb1 == 0) {
- UHD_MSG(warning) << boost::format(
+ UHD_LOGGER_WARNING("CORES") << boost::format(
"The requested decimation is odd; the user should expect CIC rolloff.\n"
"Select an even decimation to ensure that a halfband filter is enabled.\n"
"decimation = dsp_rate/samp_rate -> %d = (%f MHz)/(%f MHz)\n"
@@ -189,7 +189,7 @@ public:
_iface->poke32(REG_DSP_RX_DECIM, (hb_enable << 8) | (decim & 0xff));
if (decim > 1 and hb0 == 0 and hb1 == 0 and hb2 == 0) {
- UHD_MSG(warning) << boost::format(
+ UHD_LOGGER_WARNING("CORES") << boost::format(
"The requested decimation is odd; the user should expect passband CIC rolloff.\n"
"Select an even decimation to ensure that a halfband filter is enabled.\n"
"Decimations factorable by 4 will enable 2 halfbands, those factorable by 8 will enable 3 halfbands.\n"
diff --git a/host/lib/usrp/cores/rx_vita_core_3000.cpp b/host/lib/usrp/cores/rx_vita_core_3000.cpp
index 57868ff54..e10913a22 100644
--- a/host/lib/usrp/cores/rx_vita_core_3000.cpp
+++ b/host/lib/usrp/cores/rx_vita_core_3000.cpp
@@ -16,7 +16,7 @@
//
#include "rx_vita_core_3000.hpp"
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/safe_call.hpp>
#include <boost/assign/list_of.hpp>
#include <boost/tuple/tuple.hpp>
@@ -96,7 +96,7 @@ struct rx_vita_core_3000_impl : rx_vita_core_3000
{
if (not _is_setup)
{
- //UHD_MSG(warning) << "rx vita core 3000 issue stream command - not setup yet!";
+ //UHD_LOGGER_WARNING("CORES") << "rx vita core 3000 issue stream command - not setup yet!";
return;
}
UHD_ASSERT_THROW(stream_cmd.num_samps <= 0x0fffffff);
diff --git a/host/lib/usrp/cores/spi_core_100.cpp b/host/lib/usrp/cores/spi_core_100.cpp
index 22b163b14..4d61821b4 100644
--- a/host/lib/usrp/cores/spi_core_100.cpp
+++ b/host/lib/usrp/cores/spi_core_100.cpp
@@ -17,7 +17,7 @@
#include "spi_core_100.hpp"
#include <uhd/exception.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <boost/thread/thread.hpp> //sleep
#define REG_SPI_TXRX0 _base + 0
@@ -80,7 +80,7 @@ private:
if ((_iface->peek16(REG_SPI_CTRL) & SPI_CTRL_GO_BSY) == 0) return;
boost::this_thread::sleep(boost::posix_time::milliseconds(1));
}
- UHD_MSG(error) << "spi_core_100: spi_wait timeout" << std::endl;
+ UHD_LOGGER_ERROR("CORES") << "spi_core_100: spi_wait timeout" ;
}
wb_iface::sptr _iface;
diff --git a/host/lib/usrp/cores/spi_core_3000.cpp b/host/lib/usrp/cores/spi_core_3000.cpp
index 78b0af1a3..2abbac317 100644
--- a/host/lib/usrp/cores/spi_core_3000.cpp
+++ b/host/lib/usrp/cores/spi_core_3000.cpp
@@ -17,7 +17,7 @@
#include "spi_core_3000.hpp"
#include <uhd/exception.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <boost/thread/thread.hpp> //sleep
#define SPI_DIV _base + 0
diff --git a/host/lib/usrp/cores/time_core_3000.cpp b/host/lib/usrp/cores/time_core_3000.cpp
index 25142b9fe..296923756 100644
--- a/host/lib/usrp/cores/time_core_3000.cpp
+++ b/host/lib/usrp/cores/time_core_3000.cpp
@@ -17,7 +17,7 @@
#include "time_core_3000.hpp"
#include <uhd/utils/safe_call.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <boost/thread/thread.hpp>
#define REG_TIME_HI _base + 0
@@ -63,21 +63,21 @@ struct time_core_3000_impl : time_core_3000
void self_test(void)
{
const size_t sleep_millis = 100;
- UHD_MSG(status) << "Performing timer loopback test... " << std::flush;
+ UHD_LOGGER_INFO("CORES") << "Performing timer loopback test... ";
const time_spec_t time0 = this->get_time_now();
boost::this_thread::sleep(boost::posix_time::milliseconds(sleep_millis));
const time_spec_t time1 = this->get_time_now();
const double approx_secs = (time1 - time0).get_real_secs();
const bool test_fail = (approx_secs > 0.15) or (approx_secs < 0.05);
- UHD_MSG(status) << ((test_fail)? " fail" : "pass") << std::endl;
+ UHD_LOGGER_INFO("CORES") << "Timer loopback test " << ((test_fail)? "failed" : "passed");
//useful warning for debugging actual rate
const size_t ticks_elapsed = size_t(_tick_rate*approx_secs);
const size_t approx_rate = size_t(ticks_elapsed/(sleep_millis/1e3));
- if (test_fail) UHD_MSG(warning)
+ if (test_fail) UHD_LOGGER_WARNING("CORES")
<< "Expecting clock rate: " << (_tick_rate/1e6) << " MHz\n"
<< "Approximate clock rate: " << (approx_rate/1e6) << " MHz\n"
- << std::endl;
+ ;
}
uhd::time_spec_t get_time_now(void)
diff --git a/host/lib/usrp/cores/tx_dsp_core_200.cpp b/host/lib/usrp/cores/tx_dsp_core_200.cpp
index 4e1743ee1..f1bf560dc 100644
--- a/host/lib/usrp/cores/tx_dsp_core_200.cpp
+++ b/host/lib/usrp/cores/tx_dsp_core_200.cpp
@@ -20,7 +20,7 @@
#include <uhd/types/dict.hpp>
#include <uhd/exception.hpp>
#include <uhd/utils/math.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <boost/assign/list_of.hpp>
#include <boost/math/special_functions/round.hpp>
#include <boost/thread/thread.hpp> //sleep
@@ -135,7 +135,7 @@ public:
if (interp > 1 and hb0 == 0 and hb1 == 0)
{
- UHD_MSG(warning) << boost::format(
+ UHD_LOGGER_WARNING("CORES") << boost::format(
"The requested interpolation is odd; the user should expect CIC rolloff.\n"
"Select an even interpolation to ensure that a halfband filter is enabled.\n"
"interpolation = dsp_rate/samp_rate -> %d = (%f MHz)/(%f MHz)\n"
diff --git a/host/lib/usrp/cores/tx_dsp_core_3000.cpp b/host/lib/usrp/cores/tx_dsp_core_3000.cpp
index 67ff418b3..c30e8dfc1 100644
--- a/host/lib/usrp/cores/tx_dsp_core_3000.cpp
+++ b/host/lib/usrp/cores/tx_dsp_core_3000.cpp
@@ -20,7 +20,7 @@
#include <uhd/types/dict.hpp>
#include <uhd/exception.hpp>
#include <uhd/utils/math.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <boost/assign/list_of.hpp>
#include <boost/math/special_functions/round.hpp>
#include <boost/thread/thread.hpp> //sleep
@@ -103,7 +103,7 @@ public:
if (interp > 1 and hb0 == 0 and hb1 == 0)
{
- UHD_MSG(warning) << boost::format(
+ UHD_LOGGER_WARNING("CORES") << boost::format(
"The requested interpolation is odd; the user should expect CIC rolloff.\n"
"Select an even interpolation to ensure that a halfband filter is enabled.\n"
"interpolation = dsp_rate/samp_rate -> %d = (%f MHz)/(%f MHz)\n"
diff --git a/host/lib/usrp/dboard/db_basic_and_lf.cpp b/host/lib/usrp/dboard/db_basic_and_lf.cpp
index 941a80ea4..a3d9fb316 100644
--- a/host/lib/usrp/dboard/db_basic_and_lf.cpp
+++ b/host/lib/usrp/dboard/db_basic_and_lf.cpp
@@ -19,7 +19,7 @@
#include <uhd/types/ranges.hpp>
#include <uhd/utils/assert_has.hpp>
#include <uhd/utils/static.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/usrp/dboard_base.hpp>
#include <uhd/usrp/dboard_manager.hpp>
#include <boost/assign/list_of.hpp>
diff --git a/host/lib/usrp/dboard/db_cbx.cpp b/host/lib/usrp/dboard/db_cbx.cpp
index c27cbf58a..cc98205e9 100644
--- a/host/lib/usrp/dboard/db_cbx.cpp
+++ b/host/lib/usrp/dboard/db_cbx.cpp
@@ -40,7 +40,7 @@ sbx_xcvr::cbx::~cbx(void){
void sbx_xcvr::cbx::write_lo_regs(dboard_iface::unit_t unit, const std::vector<uint32_t> &regs)
{
- BOOST_FOREACH(uint32_t reg, regs)
+ for(uint32_t reg: regs)
{
self_base->get_iface()->write_spi(unit, spi_config_t::EDGE_RISE, reg, 32);
}
@@ -51,9 +51,9 @@ void sbx_xcvr::cbx::write_lo_regs(dboard_iface::unit_t unit, const std::vector<u
* Tuning
**********************************************************************/
double sbx_xcvr::cbx::set_lo_freq(dboard_iface::unit_t unit, double target_freq) {
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("CBX") << boost::format(
"CBX tune: target frequency %f MHz"
- ) % (target_freq/1e6) << std::endl;
+ ) % (target_freq/1e6) ;
//clip the input
target_freq = cbx_freq_range.clip(target_freq);
diff --git a/host/lib/usrp/dboard/db_dbsrx.cpp b/host/lib/usrp/dboard/db_dbsrx.cpp
index f296820c5..ecfeb5308 100644
--- a/host/lib/usrp/dboard/db_dbsrx.cpp
+++ b/host/lib/usrp/dboard/db_dbsrx.cpp
@@ -24,7 +24,7 @@
#include <uhd/utils/static.hpp>
#include <uhd/utils/assert_has.hpp>
#include <uhd/utils/algorithm.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/types/ranges.hpp>
#include <uhd/types/sensors.hpp>
#include <uhd/types/dict.hpp>
@@ -98,9 +98,9 @@ private:
//get the register data
for(int i=0; i<num_bytes; i++){
regs_vector[1+i] = _max2118_write_regs.get_reg(start_addr+i);
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("DBSRX") << boost::format(
"DBSRX: send reg 0x%02x, value 0x%04x, start_addr = 0x%04x, num_bytes %d"
- ) % int(start_addr+i) % int(regs_vector[1+i]) % int(start_addr) % num_bytes << std::endl;
+ ) % int(start_addr+i) % int(regs_vector[1+i]) % int(start_addr) % num_bytes ;
}
//send the data
@@ -130,9 +130,9 @@ private:
if (i + start_addr >= status_addr){
_max2118_read_regs.set_reg(i + start_addr, regs_vector[i]);
}
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("DBSRX") << boost::format(
"DBSRX: read reg 0x%02x, value 0x%04x, start_addr = 0x%04x, num_bytes %d"
- ) % int(start_addr+i) % int(regs_vector[i]) % int(start_addr) % num_bytes << std::endl;
+ ) % int(start_addr+i) % int(regs_vector[i]) % int(start_addr) % num_bytes ;
}
}
}
@@ -147,9 +147,9 @@ private:
//mask and return lock detect
bool locked = 5 >= _max2118_read_regs.adc and _max2118_read_regs.adc >= 2;
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("DBSRX") << boost::format(
"DBSRX: locked %d"
- ) % locked << std::endl;
+ ) % locked ;
return sensor_value_t("LO", locked, "locked", "unlocked");
}
@@ -175,7 +175,7 @@ UHD_STATIC_BLOCK(reg_dbsrx_dboard){
dbsrx::dbsrx(ctor_args_t args) : rx_dboard_base(args){
//warn user about incorrect DBID on USRP1, requires R193 populated
if (this->get_iface()->get_special_props().soft_clock_divider and this->get_rx_id() == 0x000D)
- UHD_MSG(warning) << boost::format(
+ UHD_LOGGER_WARNING("DBSRX") << boost::format(
"DBSRX: incorrect dbid\n"
"Expected dbid 0x0002 and R193\n"
"found dbid == %d\n"
@@ -184,7 +184,7 @@ dbsrx::dbsrx(ctor_args_t args) : rx_dboard_base(args){
//warn user about incorrect DBID on non-USRP1, requires R194 populated
if (not this->get_iface()->get_special_props().soft_clock_divider and this->get_rx_id() == 0x0002)
- UHD_MSG(warning) << boost::format(
+ UHD_LOGGER_WARNING("DBSRX") << boost::format(
"DBSRX: incorrect dbid\n"
"Expected dbid 0x000D and R194\n"
"found dbid == %d\n"
@@ -205,7 +205,7 @@ dbsrx::dbsrx(ctor_args_t args) : rx_dboard_base(args){
.set("DBSRX");
this->get_rx_subtree()->create<sensor_value_t>("sensors/lo_locked")
.set_publisher(boost::bind(&dbsrx::get_locked, this));
- BOOST_FOREACH(const std::string &name, dbsrx_gain_ranges.keys()){
+ for(const std::string &name: dbsrx_gain_ranges.keys()){
this->get_rx_subtree()->create<double>("gains/"+name+"/value")
.set_coercer(boost::bind(&dbsrx::set_gain, this, _1, name))
.set(dbsrx_gain_ranges[name].start());
@@ -266,7 +266,7 @@ double dbsrx::set_lo_freq(double target_freq){
//choose refclock
std::vector<double> clock_rates = this->get_iface()->get_clock_rates(dboard_iface::UNIT_RX);
const double max_clock_rate = uhd::sorted(clock_rates).back();
- BOOST_FOREACH(ref_clock, uhd::reversed(uhd::sorted(clock_rates))){
+ for(auto ref_clock: uhd::reversed(uhd::sorted(clock_rates))){
//USRP1 feeds the DBSRX clock from a FPGA GPIO line.
//make sure that this clock does not exceed rate limit.
if (this->get_iface()->get_special_props().soft_clock_divider){
@@ -279,17 +279,17 @@ double dbsrx::set_lo_freq(double target_freq){
m = 31;
while ((ref_clock/m < 1e6 or ref_clock/m > 2.5e6) and m > 0){ m--; }
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("DBSRX") << boost::format(
"DBSRX: trying ref_clock %f and m_divider %d"
- ) % (ref_clock) % m << std::endl;
+ ) % (ref_clock) % m ;
if (m >= 32) continue;
//choose R
- for(r = 0; r <= 6; r += 1) {
+ for(auto r = 0; r <= 6; r += 1) {
//compute divider from setting
R = 1 << (r+1);
- UHD_LOGV(often) << boost::format("DBSRX R:%d\n") % R << std::endl;
+ UHD_LOGGER_TRACE("DBSRX") << boost::format("DBSRX R:%d\n") % R ;
//compute PFD compare frequency = ref_clock/R
pfd_freq = ref_clock / R;
@@ -315,9 +315,9 @@ double dbsrx::set_lo_freq(double target_freq){
UHD_ASSERT_THROW((pfd_freq >= dbsrx_pfd_freq_range.start()) and (pfd_freq <= dbsrx_pfd_freq_range.stop()));
UHD_ASSERT_THROW((N >= 256) and (N <= 32768));
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("DBSRX") << boost::format(
"DBSRX: choose ref_clock (current: %f, new: %f) and m_divider %d"
- ) % (this->get_iface()->get_clock_rate(dboard_iface::UNIT_RX)) % ref_clock % m << std::endl;
+ ) % (this->get_iface()->get_clock_rate(dboard_iface::UNIT_RX)) % ref_clock % m ;
//if ref_clock or m divider changed, we need to update the filter settings
if (ref_clock != this->get_iface()->get_clock_rate(dboard_iface::UNIT_RX) or m != _max2118_write_regs.m_divider) update_filter_settings = true;
@@ -337,9 +337,9 @@ double dbsrx::set_lo_freq(double target_freq){
int scaler = actual_freq > 1125e6 ? 2 : 4;
_max2118_write_regs.div2 = scaler == 4 ? max2118_write_regs_t::DIV2_DIV4 : max2118_write_regs_t::DIV2_DIV2;
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("DBSRX") << boost::format(
"DBSRX: scaler %d, actual_freq %f MHz, register bit: %d"
- ) % scaler % (actual_freq/1e6) % int(_max2118_write_regs.div2) << std::endl;
+ ) % scaler % (actual_freq/1e6) % int(_max2118_write_regs.div2) ;
//compute vco frequency and select vco
double vco_freq = actual_freq * scaler;
@@ -366,9 +366,9 @@ double dbsrx::set_lo_freq(double target_freq){
//check vtune for lock condition
read_reg(0x0, 0x0);
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("DBSRX") << boost::format(
"DBSRX: initial guess for vco %d, vtune adc %d"
- ) % int(_max2118_write_regs.osc_band) % int(_max2118_read_regs.adc) << std::endl;
+ ) % int(_max2118_write_regs.osc_band) % int(_max2118_read_regs.adc) ;
//if we are out of lock for chosen vco, change vco
while ((_max2118_read_regs.adc == 0) or (_max2118_read_regs.adc == 7)){
@@ -376,7 +376,7 @@ double dbsrx::set_lo_freq(double target_freq){
//vtune is too low, try lower frequency vco
if (_max2118_read_regs.adc == 0){
if (_max2118_write_regs.osc_band == 0){
- UHD_MSG(warning) << boost::format(
+ UHD_LOGGER_WARNING("DBSRX") << boost::format(
"DBSRX: Tuning exceeded vco range, _max2118_write_regs.osc_band == %d\n"
) % int(_max2118_write_regs.osc_band);
UHD_ASSERT_THROW(_max2118_read_regs.adc != 0); //just to cause a throw
@@ -388,7 +388,7 @@ double dbsrx::set_lo_freq(double target_freq){
//vtune is too high, try higher frequency vco
if (_max2118_read_regs.adc == 7){
if (_max2118_write_regs.osc_band == 7){
- UHD_MSG(warning) << boost::format(
+ UHD_LOGGER_WARNING("DBSRX") << boost::format(
"DBSRX: Tuning exceeded vco range, _max2118_write_regs.osc_band == %d\n"
) % int(_max2118_write_regs.osc_band);
UHD_ASSERT_THROW(_max2118_read_regs.adc != 7); //just to cause a throw
@@ -397,9 +397,9 @@ double dbsrx::set_lo_freq(double target_freq){
_max2118_write_regs.osc_band += 1;
}
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("DBSRX") << boost::format(
"DBSRX: trying vco %d, vtune adc %d"
- ) % int(_max2118_write_regs.osc_band) % int(_max2118_read_regs.adc) << std::endl;
+ ) % int(_max2118_write_regs.osc_band) % int(_max2118_read_regs.adc) ;
//update vco selection and check vtune
send_reg(0x2, 0x2);
@@ -409,9 +409,9 @@ double dbsrx::set_lo_freq(double target_freq){
boost::this_thread::sleep(boost::posix_time::milliseconds(10));
}
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("DBSRX") << boost::format(
"DBSRX: final vco %d, vtune adc %d"
- ) % int(_max2118_write_regs.osc_band) % int(_max2118_read_regs.adc) << std::endl;
+ ) % int(_max2118_write_regs.osc_band) % int(_max2118_read_regs.adc) ;
//select charge pump bias current
if (_max2118_read_regs.adc <= 2) _max2118_write_regs.cp_current = max2118_write_regs_t::CP_CURRENT_I_CP_100UA;
@@ -425,7 +425,7 @@ double dbsrx::set_lo_freq(double target_freq){
_lo_freq = this->get_iface()->get_clock_rate(dboard_iface::UNIT_RX) / std::pow(2.0,(1 + _max2118_write_regs.r_divider)) * _max2118_write_regs.get_n_divider();
//debug output of calculated variables
- UHD_LOGV(often)
+ UHD_LOGGER_TRACE("DBSRX")
<< boost::format("DBSRX tune:\n")
<< boost::format(" VCO=%d, CP=%d, PFD Freq=%fMHz\n") % int(_max2118_write_regs.osc_band) % _max2118_write_regs.cp_current % (pfd_freq/1e6)
<< boost::format(" R=%d, N=%f, scaler=%d, div2=%d\n") % R % N % scaler % int(_max2118_write_regs.div2)
@@ -433,7 +433,7 @@ double dbsrx::set_lo_freq(double target_freq){
<< boost::format(" Target Freq=%fMHz\n") % (target_freq/1e6)
<< boost::format(" Actual Freq=%fMHz\n") % (_lo_freq/1e6)
<< boost::format(" VCO Freq=%fMHz\n") % (vco_freq/1e6)
- << std::endl;
+ ;
if (update_filter_settings) set_bandwidth(_bandwidth);
get_locked();
@@ -463,9 +463,9 @@ static int gain_to_gc2_vga_reg(double &gain){
gain = double(boost::math::iround(gain));
}
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("DBSRX") << boost::format(
"DBSRX GC2 Gain: %f dB, reg: %d"
- ) % gain % reg << std::endl;
+ ) % gain % reg ;
return reg;
}
@@ -487,9 +487,9 @@ static double gain_to_gc1_rfvga_dac(double &gain){
//calculate the voltage for the aux dac
double dac_volts = gain*slope + min_volts;
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("DBSRX") << boost::format(
"DBSRX GC1 Gain: %f dB, dac_volts: %f V"
- ) % gain % dac_volts << std::endl;
+ ) % gain % dac_volts ;
//the actual gain setting
gain = (dac_volts - min_volts)/slope;
@@ -533,9 +533,9 @@ double dbsrx::set_bandwidth(double bandwidth){
//determine actual bandwidth
_bandwidth = double((ref_clock/(_max2118_write_regs.m_divider))*(4+0.145*_max2118_write_regs.f_dac));
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("DBSRX") << boost::format(
"DBSRX Filter Bandwidth: %f MHz, m: %d, f_dac: %d\n"
- ) % (_bandwidth/1e6) % int(_max2118_write_regs.m_divider) % int(_max2118_write_regs.f_dac) << std::endl;
+ ) % (_bandwidth/1e6) % int(_max2118_write_regs.m_divider) % int(_max2118_write_regs.f_dac) ;
this->send_reg(0x3, 0x4);
diff --git a/host/lib/usrp/dboard/db_dbsrx2.cpp b/host/lib/usrp/dboard/db_dbsrx2.cpp
index 21b0fd02d..8cc209218 100644
--- a/host/lib/usrp/dboard/db_dbsrx2.cpp
+++ b/host/lib/usrp/dboard/db_dbsrx2.cpp
@@ -92,9 +92,9 @@ private:
//get the register data
for(int i=0; i<num_bytes; i++){
regs_vector[1+i] = _max2112_write_regs.get_reg(start_addr+i);
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("DBSRX") << boost::format(
"DBSRX2: send reg 0x%02x, value 0x%04x, start_addr = 0x%04x, num_bytes %d"
- ) % int(start_addr+i) % int(regs_vector[1+i]) % int(start_addr) % num_bytes << std::endl;
+ ) % int(start_addr+i) % int(regs_vector[1+i]) % int(start_addr) % num_bytes ;
}
//send the data
@@ -132,15 +132,10 @@ private:
for(uint8_t i=0; i < num_bytes; i++){
if (i + start_addr >= status_addr){
_max2112_read_regs.set_reg(i + start_addr, regs_vector[i]);
- /*
- UHD_LOGV(always) << boost::format(
- "DBSRX2: set reg 0x%02x, value 0x%04x"
- ) % int(i + start_addr) % int(_max2112_read_regs.get_reg(i + start_addr)) << std::endl;
- */
}
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("DBSRX") << boost::format(
"DBSRX2: read reg 0x%02x, value 0x%04x, start_addr = 0x%04x, num_bytes %d"
- ) % int(start_addr+i) % int(regs_vector[i]) % int(start_addr) % num_bytes << std::endl;
+ ) % int(start_addr+i) % int(regs_vector[i]) % int(start_addr) % num_bytes ;
}
}
}
@@ -155,9 +150,9 @@ private:
//mask and return lock detect
bool locked = (_max2112_read_regs.ld & _max2112_read_regs.vasa & _max2112_read_regs.vase) != 0;
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("DBSRX") << boost::format(
"DBSRX2 locked: %d"
- ) % locked << std::endl;
+ ) % locked ;
return sensor_value_t("LO", locked, "locked", "unlocked");
}
@@ -189,10 +184,10 @@ dbsrx2::dbsrx2(ctor_args_t args) : rx_dboard_base(args){
// Register properties
////////////////////////////////////////////////////////////////////
this->get_rx_subtree()->create<std::string>("name")
- .set("DBSRX2");
+ .set("DBSRX");
this->get_rx_subtree()->create<sensor_value_t>("sensors/lo_locked")
.set_publisher(boost::bind(&dbsrx2::get_locked, this));
- BOOST_FOREACH(const std::string &name, dbsrx2_gain_ranges.keys()){
+ for(const std::string &name: dbsrx2_gain_ranges.keys()){
this->get_rx_subtree()->create<double>("gains/"+name+"/value")
.set_coercer(boost::bind(&dbsrx2::set_gain, this, _1, name))
.set(dbsrx2_gain_ranges[name].start());
@@ -270,14 +265,14 @@ double dbsrx2::set_lo_freq(double target_freq){
_max2112_write_regs.d24 = scaler == 4 ? max2112_write_regs_t::D24_DIV4 : max2112_write_regs_t::D24_DIV2;
//debug output of calculated variables
- UHD_LOGV(often)
+ UHD_LOGGER_TRACE("DBSRX")
<< boost::format("DBSRX2 tune:\n")
<< boost::format(" R=%d, N=%f, scaler=%d, ext_div=%d\n") % R % N % scaler % ext_div
<< boost::format(" int=%d, frac=%d, d24=%d\n") % intdiv % fracdiv % int(_max2112_write_regs.d24)
<< boost::format(" Ref Freq=%fMHz\n") % (ref_freq/1e6)
<< boost::format(" Target Freq=%fMHz\n") % (target_freq/1e6)
<< boost::format(" Actual Freq=%fMHz\n") % (_lo_freq/1e6)
- << std::endl;
+ ;
//send the registers 0x0 through 0x7
//writing register 0x4 (F divider LSB) starts the VCO auto seletion so it must be written last
@@ -304,10 +299,10 @@ static int gain_to_bbg_vga_reg(double &gain){
gain = double(reg);
- UHD_LOGV(often)
+ UHD_LOGGER_TRACE("DBSRX")
<< boost::format("DBSRX2 BBG Gain:\n")
<< boost::format(" %f dB, bbg: %d") % gain % reg
- << std::endl;
+ ;
return reg;
}
@@ -329,10 +324,10 @@ static double gain_to_gc1_rfvga_dac(double &gain){
//calculate the voltage for the aux dac
double dac_volts = gain*slope + min_volts;
- UHD_LOGV(often)
+ UHD_LOGGER_TRACE("DBSRX")
<< boost::format("DBSRX2 GC1 Gain:\n")
<< boost::format(" %f dB, dac_volts: %f V") % gain % dac_volts
- << std::endl;
+ ;
//the actual gain setting
gain = (dac_volts - min_volts)/slope;
@@ -369,10 +364,10 @@ double dbsrx2::set_bandwidth(double bandwidth){
_max2112_write_regs.lp = int((bandwidth/1e6 - 4)/0.29 + 12);
_bandwidth = double(4 + (_max2112_write_regs.lp - 12) * 0.29)*1e6;
- UHD_LOGV(often)
+ UHD_LOGGER_TRACE("DBSRX")
<< boost::format("DBSRX2 Bandwidth:\n")
<< boost::format(" %f MHz, lp: %f V") % (_bandwidth/1e6) % int(_max2112_write_regs.lp)
- << std::endl;
+ ;
this->send_reg(0x8, 0x8);
diff --git a/host/lib/usrp/dboard/db_rfx.cpp b/host/lib/usrp/dboard/db_rfx.cpp
index 9bbd73425..e8ceb7d49 100644
--- a/host/lib/usrp/dboard/db_rfx.cpp
+++ b/host/lib/usrp/dboard/db_rfx.cpp
@@ -40,7 +40,7 @@
#include <uhd/utils/log.hpp>
#include <uhd/utils/static.hpp>
#include <uhd/utils/algorithm.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/usrp/dboard_id.hpp>
#include <uhd/usrp/dboard_base.hpp>
#include <uhd/usrp/dboard_manager.hpp>
@@ -184,7 +184,7 @@ rfx_xcvr::rfx_xcvr(
this->get_rx_subtree()->create<sensor_value_t>("sensors/lo_locked")
.set_publisher(boost::bind(&rfx_xcvr::get_locked, this, dboard_iface::UNIT_RX));
- BOOST_FOREACH(const std::string &name, _rx_gain_ranges.keys()){
+ for(const std::string &name: _rx_gain_ranges.keys()){
this->get_rx_subtree()->create<double>("gains/"+name+"/value")
.set_coercer(boost::bind(&rfx_xcvr::set_rx_gain, this, _1, name))
.set(_rx_gain_ranges[name].start());
@@ -339,9 +339,9 @@ double rfx_xcvr::set_lo_freq(
dboard_iface::unit_t unit,
double target_freq
){
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("RFX") << boost::format(
"RFX tune: target frequency %f MHz"
- ) % (target_freq/1e6) << std::endl;
+ ) % (target_freq/1e6) ;
//clip the input
target_freq = _freq_range.clip(target_freq);
@@ -379,9 +379,9 @@ double rfx_xcvr::set_lo_freq(
* fvco*R/fref = P*B + A = N
*/
for(R = 2; R <= 32; R+=2){
- BOOST_FOREACH(BS, bandsel_to_enum.keys()){
+ for(auto BS: bandsel_to_enum.keys()){
if (ref_freq/R/BS > 1e6) continue; //constraint on band select clock
- BOOST_FOREACH(P, prescaler_to_enum.keys()){
+ for(auto P: prescaler_to_enum.keys()){
//calculate B and A from N
double N = target_freq*R/ref_freq;
B = int(std::floor(N/P));
@@ -396,9 +396,9 @@ double rfx_xcvr::set_lo_freq(
}
} done_loop:
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("RFX") << boost::format(
"RFX tune: R=%d, BS=%d, P=%d, B=%d, A=%d, DIV2=%d"
- ) % R % BS % P % B % A % int(_div2[unit] && (!is_rx_rfx400)) << std::endl;
+ ) % R % BS % P % B % A % int(_div2[unit] && (!is_rx_rfx400)) ;
//load the register values
adf4360_regs_t regs;
@@ -433,7 +433,7 @@ double rfx_xcvr::set_lo_freq(
(adf4360_regs_t::ADDR_CONTROL)
(adf4360_regs_t::ADDR_NCOUNTER)
;
- BOOST_FOREACH(adf4360_regs_t::addr_t addr, addrs){
+ for(adf4360_regs_t::addr_t addr: addrs){
this->get_iface()->write_spi(
unit, spi_config_t::EDGE_RISE,
regs.get_reg(addr), 24
@@ -442,8 +442,8 @@ double rfx_xcvr::set_lo_freq(
//return the actual frequency
if (_div2[unit]) actual_freq /= 2;
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("RFX") << boost::format(
"RFX tune: actual frequency %f MHz"
- ) % (actual_freq/1e6) << std::endl;
+ ) % (actual_freq/1e6) ;
return actual_freq;
}
diff --git a/host/lib/usrp/dboard/db_sbx_common.cpp b/host/lib/usrp/dboard/db_sbx_common.cpp
index efc84d7e6..8df4028e7 100644
--- a/host/lib/usrp/dboard/db_sbx_common.cpp
+++ b/host/lib/usrp/dboard/db_sbx_common.cpp
@@ -53,9 +53,9 @@ static int rx_pga0_gain_to_iobits(double &gain){
int attn_code = int(floor(attn*2));
int iobits = ((~attn_code) << RX_ATTN_SHIFT) & RX_ATTN_MASK;
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("SBX") << boost::format(
"SBX RX Attenuation: %f dB, Code: %d, IO Bits %x, Mask: %x"
- ) % attn % attn_code % (iobits & RX_ATTN_MASK) % RX_ATTN_MASK << std::endl;
+ ) % attn % attn_code % (iobits & RX_ATTN_MASK) % RX_ATTN_MASK ;
//the actual gain setting
gain = sbx_rx_gain_ranges["PGA0"].stop() - double(attn_code)/2;
@@ -74,9 +74,9 @@ static int tx_pga0_gain_to_iobits(double &gain){
int attn_code = int(floor(attn*2));
int iobits = ((~attn_code) << TX_ATTN_SHIFT) & TX_ATTN_MASK;
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("SBX") << boost::format(
"SBX TX Attenuation: %f dB, Code: %d, IO Bits %x, Mask: %x"
- ) % attn % attn_code % (iobits & TX_ATTN_MASK) % TX_ATTN_MASK << std::endl;
+ ) % attn % attn_code % (iobits & TX_ATTN_MASK) % TX_ATTN_MASK ;
//the actual gain setting
gain = sbx_tx_gain_ranges["PGA0"].stop() - double(attn_code)/2;
@@ -160,7 +160,7 @@ sbx_xcvr::sbx_xcvr(ctor_args_t args) : xcvr_dboard_base(args){
this->get_rx_subtree()->create<sensor_value_t>("sensors/lo_locked")
.set_publisher(boost::bind(&sbx_xcvr::get_locked, this, dboard_iface::UNIT_RX));
- BOOST_FOREACH(const std::string &name, sbx_rx_gain_ranges.keys()){
+ for(const std::string &name: sbx_rx_gain_ranges.keys()){
this->get_rx_subtree()->create<double>("gains/"+name+"/value")
.set_coercer(boost::bind(&sbx_xcvr::set_rx_gain, this, _1, name))
.set(sbx_rx_gain_ranges[name].start());
@@ -201,7 +201,7 @@ sbx_xcvr::sbx_xcvr(ctor_args_t args) : xcvr_dboard_base(args){
this->get_tx_subtree()->create<sensor_value_t>("sensors/lo_locked")
.set_publisher(boost::bind(&sbx_xcvr::get_locked, this, dboard_iface::UNIT_TX));
- BOOST_FOREACH(const std::string &name, sbx_tx_gain_ranges.keys()){
+ for(const std::string &name: sbx_tx_gain_ranges.keys()){
this->get_tx_subtree()->create<double>("gains/"+name+"/value")
.set_coercer(boost::bind(&sbx_xcvr::set_tx_gain, this, _1, name))
.set(sbx_tx_gain_ranges[name].start());
@@ -240,9 +240,9 @@ sbx_xcvr::sbx_xcvr(ctor_args_t args) : xcvr_dboard_base(args){
//Initialize ATR registers after direction and pin ctrl configuration
update_atr();
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("SBX") << boost::format(
"SBX GPIO Direction: RX: 0x%08x, TX: 0x%08x"
- ) % RXIO_MASK % TXIO_MASK << std::endl;
+ ) % RXIO_MASK % TXIO_MASK ;
}
sbx_xcvr::~sbx_xcvr(void){
diff --git a/host/lib/usrp/dboard/db_sbx_common.hpp b/host/lib/usrp/dboard/db_sbx_common.hpp
index ad64e2267..0e4dc5ace 100644
--- a/host/lib/usrp/dboard/db_sbx_common.hpp
+++ b/host/lib/usrp/dboard/db_sbx_common.hpp
@@ -84,7 +84,7 @@
#include <uhd/utils/log.hpp>
#include <uhd/utils/static.hpp>
#include <uhd/utils/algorithm.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/usrp/dboard_base.hpp>
#include <uhd/usrp/dboard_manager.hpp>
#include <boost/assign/list_of.hpp>
diff --git a/host/lib/usrp/dboard/db_sbx_version3.cpp b/host/lib/usrp/dboard/db_sbx_version3.cpp
index bb2ba6bd6..f35c09946 100644
--- a/host/lib/usrp/dboard/db_sbx_version3.cpp
+++ b/host/lib/usrp/dboard/db_sbx_version3.cpp
@@ -40,7 +40,7 @@ sbx_xcvr::sbx_version3::~sbx_version3(void){
void sbx_xcvr::sbx_version3::write_lo_regs(dboard_iface::unit_t unit, const std::vector<uint32_t> &regs)
{
- BOOST_FOREACH(uint32_t reg, regs)
+ for(uint32_t reg: regs)
{
self_base->get_iface()->write_spi(unit, spi_config_t::EDGE_RISE, reg, 32);
}
@@ -50,9 +50,9 @@ void sbx_xcvr::sbx_version3::write_lo_regs(dboard_iface::unit_t unit, const std:
* Tuning
**********************************************************************/
double sbx_xcvr::sbx_version3::set_lo_freq(dboard_iface::unit_t unit, double target_freq) {
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("SBX") << boost::format(
"SBX tune: target frequency %f MHz"
- ) % (target_freq/1e6) << std::endl;
+ ) % (target_freq/1e6) ;
/*
* If the user sets 'mode_n=integer' in the tuning args, the user wishes to
diff --git a/host/lib/usrp/dboard/db_sbx_version4.cpp b/host/lib/usrp/dboard/db_sbx_version4.cpp
index e5b6e081c..746de780b 100644
--- a/host/lib/usrp/dboard/db_sbx_version4.cpp
+++ b/host/lib/usrp/dboard/db_sbx_version4.cpp
@@ -41,7 +41,7 @@ sbx_xcvr::sbx_version4::~sbx_version4(void){
void sbx_xcvr::sbx_version4::write_lo_regs(dboard_iface::unit_t unit, const std::vector<uint32_t> &regs)
{
- BOOST_FOREACH(uint32_t reg, regs)
+ for(uint32_t reg: regs)
{
self_base->get_iface()->write_spi(unit, spi_config_t::EDGE_RISE, reg, 32);
}
@@ -52,9 +52,9 @@ void sbx_xcvr::sbx_version4::write_lo_regs(dboard_iface::unit_t unit, const std:
* Tuning
**********************************************************************/
double sbx_xcvr::sbx_version4::set_lo_freq(dboard_iface::unit_t unit, double target_freq) {
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("SBX") << boost::format(
"SBX tune: target frequency %f MHz"
- ) % (target_freq/1e6) << std::endl;
+ ) % (target_freq/1e6) ;
/*
* If the user sets 'mode_n=integer' in the tuning args, the user wishes to
diff --git a/host/lib/usrp/dboard/db_tvrx.cpp b/host/lib/usrp/dboard/db_tvrx.cpp
index 5c0600c61..bae047bbd 100644
--- a/host/lib/usrp/dboard/db_tvrx.cpp
+++ b/host/lib/usrp/dboard/db_tvrx.cpp
@@ -31,7 +31,7 @@
#include <uhd/utils/static.hpp>
#include <uhd/utils/assert_has.hpp>
#include <uhd/utils/algorithm.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/types/ranges.hpp>
#include <uhd/types/sensors.hpp>
#include <uhd/types/dict.hpp>
@@ -107,7 +107,7 @@ static const boost::array<double, 17> tvrx_gains_volts =
static uhd::dict<std::string, gain_range_t> get_tvrx_gain_ranges(void) {
double rfmax = 0.0, rfmin = FLT_MAX;
- BOOST_FOREACH(const std::string range, tvrx_rf_gains_db.keys()) {
+ for(const std::string range: tvrx_rf_gains_db.keys()) {
double my_max = tvrx_rf_gains_db[range].back(); //we're assuming it's monotonic
double my_min = tvrx_rf_gains_db[range].front(); //if it's not this is wrong wrong wrong
if(my_max > rfmax) rfmax = my_max;
@@ -153,9 +153,9 @@ private:
//get the register data
for(int i=0; i<4; i++){
regs_vector[i] = _tuner_4937di5_regs.get_reg(i);
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
"tvrx: send reg 0x%02x, value 0x%04x"
- ) % int(i) % int(regs_vector[i]) << std::endl;
+ ) % int(i) % int(regs_vector[i]) ;
}
//send the data
@@ -188,7 +188,7 @@ tvrx::tvrx(ctor_args_t args) : rx_dboard_base(args){
this->get_rx_subtree()->create<std::string>("name")
.set("TVRX");
this->get_rx_subtree()->create<int>("sensors"); //phony property so this dir exists
- BOOST_FOREACH(const std::string &name, get_tvrx_gain_ranges().keys()){
+ for(const std::string &name: get_tvrx_gain_ranges().keys()){
this->get_rx_subtree()->create<double>("gains/"+name+"/value")
.set_coercer(boost::bind(&tvrx::set_gain, this, _1, name));
this->get_rx_subtree()->create<meta_range_t>("gains/"+name+"/range")
@@ -232,7 +232,7 @@ tvrx::tvrx(ctor_args_t args) : rx_dboard_base(args){
this->get_rx_subtree()->access<double>("freq/value").set(tvrx_freq_range.start());
//set default gains
- BOOST_FOREACH(const std::string &name, get_tvrx_gain_ranges().keys()){
+ for(const std::string &name: get_tvrx_gain_ranges().keys()){
this->get_rx_subtree()->access<double>("gains/"+name+"/value")
.set(get_tvrx_gain_ranges()[name].start());
}
@@ -247,9 +247,9 @@ tvrx::~tvrx(void){
*/
static std::string get_band(double freq) {
- BOOST_FOREACH(const std::string &band, tvrx_freq_ranges.keys()) {
+ for(const std::string &band: tvrx_freq_ranges.keys()) {
if(freq >= tvrx_freq_ranges[band].start() && freq <= tvrx_freq_ranges[band].stop()){
- UHD_LOGV(often) << "Band: " << band << std::endl;
+ UHD_LOGGER_TRACE("TVRX") << "Band: " << band ;
return band;
}
}
@@ -291,7 +291,7 @@ static double gain_interp(double gain, const boost::array<double, 17>& db_vector
//use the volts per dB slope to find the final interpolated voltage
volts = volts_vector[gain_step] + (slope * (gain - db_vector[gain_step]));
- UHD_LOGV(often) << "Gain interp: gain: " << gain << ", gain_step: " << int(gain_step) << ", slope: " << slope << ", volts: " << volts << std::endl;
+ UHD_LOGGER_TRACE("TVRX") << "Gain interp: gain: " << gain << ", gain_step: " << int(gain_step) << ", slope: " << slope << ", volts: " << volts ;
return volts;
}
@@ -317,9 +317,9 @@ static double rf_gain_to_voltage(double gain, double lo_freq){
dac_volts = uhd::clip<double>(dac_volts, 0.0, 3.3);
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
"tvrx RF AGC gain: %f dB, dac_volts: %f V"
- ) % gain % dac_volts << std::endl;
+ ) % gain % dac_volts ;
return dac_volts;
}
@@ -340,9 +340,9 @@ static double if_gain_to_voltage(double gain){
dac_volts = uhd::clip<double>(dac_volts, 0.0, 3.3);
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
"tvrx IF AGC gain: %f dB, dac_volts: %f V"
- ) % gain % dac_volts << std::endl;
+ ) % gain % dac_volts ;
return dac_volts;
}
@@ -396,7 +396,7 @@ double tvrx::set_freq(double freq) {
//not FAR off, but we do this to be consistent
if(prev_band != new_band) set_gain(_gains["RF"], "RF");
- UHD_LOGV(often) << boost::format("set_freq: target LO: %f f_ref: %f divisor: %i actual LO: %f") % target_lo_freq % f_ref % divisor % actual_lo_freq << std::endl;
+ UHD_LOGGER_TRACE("TVRX") << boost::format("set_freq: target LO: %f f_ref: %f divisor: %i actual LO: %f") % target_lo_freq % f_ref % divisor % actual_lo_freq ;
_lo_freq = actual_lo_freq; //for rx props
diff --git a/host/lib/usrp/dboard/db_tvrx2.cpp b/host/lib/usrp/dboard/db_tvrx2.cpp
index 1bac81153..2b5524069 100644
--- a/host/lib/usrp/dboard/db_tvrx2.cpp
+++ b/host/lib/usrp/dboard/db_tvrx2.cpp
@@ -52,7 +52,7 @@
#include "tda18272hnm_regs.hpp"
#include <uhd/utils/static.hpp>
#include <uhd/utils/log.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/utils/safe_call.hpp>
#include <uhd/utils/assert_has.hpp>
#include <uhd/utils/algorithm.hpp>
@@ -803,9 +803,9 @@ private:
//return irq status
bool irq = _tda18272hnm_regs.irq_status == tda18272hnm_regs_t::IRQ_STATUS_SET;
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
"TVRX2 (%s): IRQ %d"
- ) % (get_subdev_name()) % irq << std::endl;
+ ) % (get_subdev_name()) % irq ;
return irq;
}
@@ -821,9 +821,9 @@ private:
//return POR state
bool por = _tda18272hnm_regs.por == tda18272hnm_regs_t::POR_RESET;
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
"TVRX2 (%s): POR %d"
- ) % (get_subdev_name()) % int(_tda18272hnm_regs.por) << std::endl;
+ ) % (get_subdev_name()) % int(_tda18272hnm_regs.por) ;
return por;
}
@@ -838,9 +838,9 @@ private:
//return lock detect
bool locked = _tda18272hnm_regs.lo_lock == tda18272hnm_regs_t::LO_LOCK_LOCKED;
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
"TVRX2 (%s): locked %d"
- ) % (get_subdev_name()) % locked << std::endl;
+ ) % (get_subdev_name()) % locked ;
return sensor_value_t("LO", locked, "locked", "unlocked");
}
@@ -881,9 +881,9 @@ private:
//read temp in degC
read_reg(0x3, 0x3);
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
"TVRX2 (%s): Temperature %f C"
- ) % (get_subdev_name()) % (double(_tda18272hnm_regs.tm_d)) << std::endl;
+ ) % (get_subdev_name()) % (double(_tda18272hnm_regs.tm_d)) ;
//Disable Temperature reading
_tda18272hnm_regs.tm_on = tda18272hnm_regs_t::TM_ON_SENSOR_OFF;
@@ -962,7 +962,7 @@ tvrx2::tvrx2(ctor_args_t args) : rx_dboard_base(args){
.set_publisher(boost::bind(&tvrx2::get_rssi, this));
this->get_rx_subtree()->create<sensor_value_t>("sensors/temperature")
.set_publisher(boost::bind(&tvrx2::get_temp, this));
- BOOST_FOREACH(const std::string &name, tvrx2_gain_ranges.keys()){
+ for(const std::string &name: tvrx2_gain_ranges.keys()){
this->get_rx_subtree()->create<double>("gains/"+name+"/value")
.set_coercer(boost::bind(&tvrx2::set_gain, this, _1, name));
this->get_rx_subtree()->create<meta_range_t>("gains/"+name+"/range")
@@ -999,42 +999,42 @@ tvrx2::tvrx2(ctor_args_t args) : rx_dboard_base(args){
if (ref_clock == 64.0e6) {
this->get_iface()->set_gpio_out(dboard_iface::UNIT_RX, REFCLOCK_DIV4);
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
"TVRX2 (%s): Dividing Refclock by 4"
- ) % (get_subdev_name()) << std::endl;
+ ) % (get_subdev_name()) ;
_freq_scalar = (4*16.0e6)/(this->get_iface()->get_clock_rate(dboard_iface::UNIT_RX));
} else if (ref_clock == 100e6) {
this->get_iface()->set_gpio_out(dboard_iface::UNIT_RX, REFCLOCK_DIV6);
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
"TVRX2 (%s): Dividing Refclock by 6"
- ) % (get_subdev_name()) << std::endl;
+ ) % (get_subdev_name()) ;
_freq_scalar = (6*16.0e6)/this->get_iface()->get_clock_rate(dboard_iface::UNIT_RX);
} else if (ref_clock == 200e6) {
- UHD_MSG(warning) << boost::format("ref_clock was 200e6, setting ref_clock divider for 100e6.") << std::endl;
+ UHD_LOGGER_WARNING("TVRX") << boost::format("ref_clock was 200e6, setting ref_clock divider for 100e6.") ;
this->get_iface()->set_clock_rate(dboard_iface::UNIT_RX, 100e6);
this->get_iface()->set_gpio_out(dboard_iface::UNIT_RX, REFCLOCK_DIV6);
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
"TVRX2 (%s): Dividing Refclock by 6"
- ) % (get_subdev_name()) << std::endl;
+ ) % (get_subdev_name()) ;
_freq_scalar = (6*16.0e6)/this->get_iface()->get_clock_rate(dboard_iface::UNIT_RX);
} else {
this->get_iface()->set_gpio_out(dboard_iface::UNIT_RX, REFCLOCK_DIV6);
- UHD_MSG(warning) << boost::format("Unsupported ref_clock %0.2f, valid options 64e6, 100e6, 200e6") % ref_clock << std::endl;
+ UHD_LOGGER_WARNING("TVRX") << boost::format("Unsupported ref_clock %0.2f, valid options 64e6, 100e6, 200e6") % ref_clock ;
_freq_scalar = 1.0;
}
//enable only the clocks we need
this->get_iface()->set_clock_enabled(dboard_iface::UNIT_RX, true);
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
"TVRX2 (%s): Refclock %f Hz, scalar = %f"
- ) % (get_subdev_name()) % (this->get_iface()->get_clock_rate(dboard_iface::UNIT_RX)) % _freq_scalar << std::endl;
+ ) % (get_subdev_name()) % (this->get_iface()->get_clock_rate(dboard_iface::UNIT_RX)) % _freq_scalar ;
_tda18272hnm_regs.irq_polarity = tda18272hnm_regs_t::IRQ_POLARITY_RAISED_VCC;
_tda18272hnm_regs.irq_clear = tda18272hnm_regs_t::IRQ_CLEAR_TRUE;
@@ -1066,7 +1066,7 @@ bool tvrx2::set_enabled(bool enable){
test_rf_filter_robustness();
- BOOST_FOREACH(const std::string &name, tvrx2_gain_ranges.keys()){
+ for(const std::string &name: tvrx2_gain_ranges.keys()){
this->get_rx_subtree()->access<double>("gains/"+name+"/value")
.set(tvrx2_gain_ranges[name].start());
}
@@ -1092,9 +1092,9 @@ bool tvrx2::set_enabled(bool enable){
}
tvrx2::~tvrx2(void){
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
"TVRX2 (%s): Called Destructor"
- ) % (get_subdev_name()) << std::endl;
+ ) % (get_subdev_name()) ;
UHD_SAFE_CALL(if (_enabled) set_enabled(false);)
}
@@ -1134,9 +1134,9 @@ void tvrx2::send_reg(uint8_t start_reg, uint8_t stop_reg){
//get the register data
for(int i=0; i<num_bytes; i++){
regs_vector[1+i] = _tda18272hnm_regs.get_reg(start_addr+i);
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
"TVRX2 (%s, 0x%02x): send reg 0x%02x, value 0x%04x, start_addr = 0x%04x, num_bytes %d"
- ) % (get_subdev_name()) % int(tvrx2_sd_name_to_i2c_addr[get_subdev_name()]) % int(start_addr+i) % int(regs_vector[1+i]) % int(start_addr) % num_bytes << std::endl;
+ ) % (get_subdev_name()) % int(tvrx2_sd_name_to_i2c_addr[get_subdev_name()]) % int(start_addr+i) % int(regs_vector[1+i]) % int(start_addr) % num_bytes ;
}
//send the data
@@ -1177,9 +1177,9 @@ void tvrx2::read_reg(uint8_t start_reg, uint8_t stop_reg){
if (i + start_addr >= status_addr){
_tda18272hnm_regs.set_reg(i + start_addr, regs_vector[i]);
}
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
"TVRX2 (%s, 0x%02x): read reg 0x%02x, value 0x%04x, start_addr = 0x%04x, num_bytes %d"
- ) % (get_subdev_name()) % int(tvrx2_sd_name_to_i2c_addr[get_subdev_name()]) % int(start_addr+i) % int(regs_vector[i]) % int(start_addr) % num_bytes << std::endl;
+ ) % (get_subdev_name()) % int(tvrx2_sd_name_to_i2c_addr[get_subdev_name()]) % int(start_addr+i) % int(regs_vector[i]) % int(start_addr) % num_bytes ;
}
}
}
@@ -1271,14 +1271,14 @@ void tvrx2::tvrx2_tda18272_init_rfcal(void)
// Loop through rfcal_log_* registers, initialize _rfcal_results
- BOOST_FOREACH(const uint32_t &result, result_to_cal_regs.keys())
+ for(const uint32_t &result: result_to_cal_regs.keys())
_rfcal_results[result].delta_c = result_to_cal_regs[result] > 63 ? result_to_cal_regs[result] - 128 : result_to_cal_regs[result];
/* read byte 0x26-0x2B */
read_reg(0x26, 0x2B);
// Loop through rfcal_byte_* registers, initialize _rfcal_coeffs
- BOOST_FOREACH(const uint32_t &subband, _rfcal_coeffs.keys())
+ for(const uint32_t &subband: _rfcal_coeffs.keys())
{
freq_range_t subband_freqs;
@@ -1374,7 +1374,7 @@ void tvrx2::tvrx2_tda18272_tune_rf_filter(uint32_t uRF)
_tda18272hnm_regs.gain_taper = gain_taper;
_tda18272hnm_regs.rf_filter_band = RFBand;
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
"\nTVRX2 (%s): Software Calibration:\n"
"\tRF Filter Bypass = %d\n"
"\tRF Filter Cap = %d\n"
@@ -1385,14 +1385,14 @@ void tvrx2::tvrx2_tda18272_tune_rf_filter(uint32_t uRF)
% int(_tda18272hnm_regs.rf_filter_cap)
% int(_tda18272hnm_regs.rf_filter_band)
% int(_tda18272hnm_regs.gain_taper)
- << std::endl;
+ ;
send_reg(0x2c, 0x2f);
}
void tvrx2::soft_calibration(void){
- UHD_LOGV(often) << boost::format(
- "\nTVRX2 (%s): Software Calibration: Initialize Tuner, Calibrate and Standby\n") % (get_subdev_name()) << std::endl;
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
+ "\nTVRX2 (%s): Software Calibration: Initialize Tuner, Calibrate and Standby\n") % (get_subdev_name()) ;
_tda18272hnm_regs.sm = tda18272hnm_regs_t::SM_NORMAL;
_tda18272hnm_regs.sm_lna = tda18272hnm_regs_t::SM_LNA_ON;
@@ -1473,7 +1473,7 @@ void tvrx2::test_rf_filter_robustness(void){
("UHFHigh_1", 0x43)
;
- BOOST_FOREACH(const std::string &name, filter_cal_regs.keys()){
+ for(const std::string &name: filter_cal_regs.keys()){
uint8_t cal_result = _tda18272hnm_regs.get_reg(filter_cal_regs[name]);
if (cal_result & 0x80) {
_filter_ratings.set(name, "E");
@@ -1515,12 +1515,12 @@ void tvrx2::test_rf_filter_robustness(void){
}
std::stringstream robustness_message;
- robustness_message << boost::format("TVRX2 (%s): RF Filter Robustness Results:") % (get_subdev_name()) << std::endl;
- BOOST_FOREACH(const std::string &name, uhd::sorted(_filter_ratings.keys())){
- robustness_message << boost::format("\t%s:\tMargin = %0.2f,\tRobustness = %c") % name % (_filter_margins[name]) % (_filter_ratings[name]) << std::endl;
+ robustness_message << boost::format("TVRX2 (%s): RF Filter Robustness Results:") % (get_subdev_name());
+ for(const std::string &name: uhd::sorted(_filter_ratings.keys())){
+ robustness_message << boost::format("\t%s:\tMargin = %0.2f,\tRobustness = %c") % name % (_filter_margins[name]) % (_filter_ratings[name]);
}
- UHD_LOGV(often) << robustness_message.str();
+ UHD_LOGGER_DEBUG("TVRX") << robustness_message.str();
}
/***********************************************************************
@@ -1528,8 +1528,8 @@ void tvrx2::test_rf_filter_robustness(void){
**********************************************************************/
void tvrx2::transition_0(void){
//Transition 0: Initialize Tuner and place in standby
- UHD_LOGV(often) << boost::format(
- "\nTVRX2 (%s): Transition 0: Initialize Tuner, Calibrate and Standby\n") % (get_subdev_name()) << std::endl;
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
+ "\nTVRX2 (%s): Transition 0: Initialize Tuner, Calibrate and Standby\n") % (get_subdev_name()) ;
//Check for Power-On Reset, if reset, initialze tuner
if (get_power_reset()) {
@@ -1582,8 +1582,8 @@ void tvrx2::transition_0(void){
void tvrx2::transition_1(void){
//Transition 1: Select TV Standard
- UHD_LOGV(often) << boost::format(
- "\nTVRX2 (%s): Transition 1: Select TV Standard\n") % (get_subdev_name()) << std::endl;
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
+ "\nTVRX2 (%s): Transition 1: Select TV Standard\n") % (get_subdev_name()) ;
//send magic xtal_cal_dac setting
send_reg(0x65, 0x65);
@@ -1613,8 +1613,8 @@ void tvrx2::transition_1(void){
void tvrx2::transition_2(int rf_freq){
//Transition 2: Select RF Frequency after changing TV Standard
- UHD_LOGV(often) << boost::format(
- "\nTVRX2 (%s): Transition 2: Select RF Frequency after changing TV Standard\n") % (get_subdev_name()) << std::endl;
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
+ "\nTVRX2 (%s): Transition 2: Select RF Frequency after changing TV Standard\n") % (get_subdev_name()) ;
//send magic xtal_cal_dac setting
send_reg(0x65, 0x65);
@@ -1651,8 +1651,8 @@ void tvrx2::transition_2(int rf_freq){
void tvrx2::transition_3(void){
//Transition 3: Standby Mode
- UHD_LOGV(often) << boost::format(
- "\nTVRX2 (%s): Transition 3: Standby Mode\n") % (get_subdev_name()) << std::endl;
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
+ "\nTVRX2 (%s): Transition 3: Standby Mode\n") % (get_subdev_name()) ;
//send magic xtal_cal_dac setting
send_reg(0x65, 0x65);
@@ -1670,8 +1670,8 @@ void tvrx2::transition_3(void){
void tvrx2::transition_4(int rf_freq){
//Transition 4: Change RF Frequency without changing TV Standard
- UHD_LOGV(often) << boost::format(
- "\nTVRX2 (%s): Transition 4: Change RF Frequency without changing TV Standard\n") % (get_subdev_name()) << std::endl;
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
+ "\nTVRX2 (%s): Transition 4: Change RF Frequency without changing TV Standard\n") % (get_subdev_name()) ;
//send magic xtal_cal_dac setting
send_reg(0x65, 0x65);
@@ -1699,8 +1699,8 @@ void tvrx2::wait_irq(void){
int timeout = 20; //irq waiting timeout in milliseconds
//int irq = (this->get_iface()->read_gpio(dboard_iface::UNIT_RX) & int(tvrx2_sd_name_to_irq_io[get_subdev_name()]));
bool irq = get_irq();
- UHD_LOGV(often) << boost::format(
- "\nTVRX2 (%s): Waiting on IRQ, subdev = %d, mask = 0x%x, Status: 0x%x\n") % (get_subdev_name()) % get_subdev_name() % (int(tvrx2_sd_name_to_irq_io[get_subdev_name()])) % irq << std::endl;
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
+ "\nTVRX2 (%s): Waiting on IRQ, subdev = %d, mask = 0x%x, Status: 0x%x\n") % (get_subdev_name()) % get_subdev_name() % (int(tvrx2_sd_name_to_irq_io[get_subdev_name()])) % irq ;
while (not irq and timeout > 0) {
//irq = (this->get_iface()->read_gpio(dboard_iface::UNIT_RX) & tvrx2_sd_name_to_irq_io[get_subdev_name()]);
@@ -1709,13 +1709,13 @@ void tvrx2::wait_irq(void){
timeout -= 1;
}
- UHD_LOGV(often) << boost::format(
- "\nTVRX2 (%s): IRQ Raised, subdev = %d, mask = 0x%x, Status: 0x%x, Timeout: %d\n") % (get_subdev_name()) % get_subdev_name() % (int(tvrx2_sd_name_to_irq_io[get_subdev_name()])) % irq % timeout << std::endl;
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
+ "\nTVRX2 (%s): IRQ Raised, subdev = %d, mask = 0x%x, Status: 0x%x, Timeout: %d\n") % (get_subdev_name()) % get_subdev_name() % (int(tvrx2_sd_name_to_irq_io[get_subdev_name()])) % irq % timeout ;
read_reg(0xA, 0xB);
//UHD_ASSERT_THROW(timeout > 0);
- if(timeout <= 0) UHD_MSG(warning) << boost::format(
- "\nTVRX2 (%s): Timeout waiting on IRQ\n") % (get_subdev_name()) << std::endl;
+ if(timeout <= 0) UHD_LOGGER_WARNING("TVRX") << boost::format(
+ "\nTVRX2 (%s): Timeout waiting on IRQ\n") % (get_subdev_name()) ;
_tda18272hnm_regs.irq_clear = tda18272hnm_regs_t::IRQ_CLEAR_TRUE;
send_reg(0xA, 0xA);
@@ -1723,8 +1723,8 @@ void tvrx2::wait_irq(void){
irq = (this->get_iface()->read_gpio(dboard_iface::UNIT_RX) & tvrx2_sd_name_to_irq_io[get_subdev_name()]) > 0;
- UHD_LOGV(often) << boost::format(
- "\nTVRX2 (%s): Cleared IRQ, subdev = %d, mask = 0x%x, Status: 0x%x\n") % (get_subdev_name()) % get_subdev_name() % (int(tvrx2_sd_name_to_irq_io[get_subdev_name()])) % irq << std::endl;
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
+ "\nTVRX2 (%s): Cleared IRQ, subdev = %d, mask = 0x%x, Status: 0x%x\n") % (get_subdev_name()) % get_subdev_name() % (int(tvrx2_sd_name_to_irq_io[get_subdev_name()])) % irq ;
}
@@ -1748,20 +1748,20 @@ double tvrx2::set_lo_freq(double target_freq){
_lo_freq = get_scaled_rf_freq() + get_scaled_if_freq(); // - _bandwidth/2;
//debug output of calculated variables
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
"\nTVRX2 (%s): LO Frequency\n"
"\tRequested: \t%f\n"
"\tComputed: \t%f\n"
"\tReadback: \t%f\n"
- "\tIF Frequency: \t%f\n") % (get_subdev_name()) % target_freq % double(int(target_freq/1e3)*1e3) % get_scaled_rf_freq() % get_scaled_if_freq() << std::endl;
+ "\tIF Frequency: \t%f\n") % (get_subdev_name()) % target_freq % double(int(target_freq/1e3)*1e3) % get_scaled_rf_freq() % get_scaled_if_freq() ;
get_locked();
test_rf_filter_robustness();
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
"\nTVRX2 (%s): RSSI = %f dBm\n"
- ) % (get_subdev_name()) % (get_rssi().to_real()) << std::endl;
+ ) % (get_subdev_name()) % (get_rssi().to_real()) ;
return _lo_freq;
}
@@ -1783,9 +1783,9 @@ static double gain_to_if_gain_dac(double &gain){
//calculate the voltage for the aux dac
double dac_volts = gain*slope + min_volts;
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
"TVRX2 IF Gain: %f dB, dac_volts: %f V"
- ) % gain % dac_volts << std::endl;
+ ) % gain % dac_volts ;
//the actual gain setting
gain = (dac_volts - min_volts)/slope;
@@ -1847,9 +1847,9 @@ double tvrx2::set_bandwidth(double bandwidth){
//update register
send_reg(0x13, 0x13);
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("TVRX") << boost::format(
"TVRX2 (%s) Bandwidth (lp_fc): %f Hz, reg: %d"
- ) % (get_subdev_name()) % _bandwidth % (int(_tda18272hnm_regs.lp_fc)) << std::endl;
+ ) % (get_subdev_name()) % _bandwidth % (int(_tda18272hnm_regs.lp_fc)) ;
return _bandwidth;
}
diff --git a/host/lib/usrp/dboard/db_twinrx.cpp b/host/lib/usrp/dboard/db_twinrx.cpp
index 20adbf849..cedc26c36 100644
--- a/host/lib/usrp/dboard/db_twinrx.cpp
+++ b/host/lib/usrp/dboard/db_twinrx.cpp
@@ -265,7 +265,7 @@ public:
// Add workers to expert
//---------------------------------------------------------
//Channel (front-end) specific
- BOOST_FOREACH(const std::string& fe, _fe_names) {
+ for(const std::string& fe: _fe_names) {
expert_factory::add_worker_node<twinrx_freq_path_expert>(_expert, _expert->node_retriever(), fe);
expert_factory::add_worker_node<twinrx_freq_coercion_expert>(_expert, _expert->node_retriever(), fe);
expert_factory::add_worker_node<twinrx_chan_gain_expert>(_expert, _expert->node_retriever(), fe);
diff --git a/host/lib/usrp/dboard/db_ubx.cpp b/host/lib/usrp/dboard/db_ubx.cpp
index 3dd0b1c84..d25aabe98 100644
--- a/host/lib/usrp/dboard/db_ubx.cpp
+++ b/host/lib/usrp/dboard/db_ubx.cpp
@@ -27,7 +27,7 @@
#include <uhd/usrp/dboard_manager.hpp>
#include <uhd/utils/assert_has.hpp>
#include <uhd/utils/log.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/utils/static.hpp>
#include <uhd/utils/safe_call.hpp>
#include <boost/assign/list_of.hpp>
@@ -300,7 +300,7 @@ public:
{
std::vector<double> rates = _iface->get_clock_rates(dboard_iface::UNIT_RX);
double highest_rate = 0.0;
- BOOST_FOREACH(double rate, rates)
+ for(double rate: rates)
{
if (rate <= pfd_freq_max and rate > highest_rate)
highest_rate = rate;
@@ -312,7 +312,7 @@ public:
{
std::vector<double> rates = _iface->get_clock_rates(dboard_iface::UNIT_TX);
double highest_rate = 0.0;
- BOOST_FOREACH(double rate, rates)
+ for(double rate: rates)
{
if (rate <= pfd_freq_max and rate > highest_rate)
highest_rate = rate;
@@ -370,7 +370,7 @@ public:
_rxlo1 = max287x_iface::make<max2870>(boost::bind(&ubx_xcvr::write_spi_regs, this, RXLO1, _1));
_rxlo2 = max287x_iface::make<max2870>(boost::bind(&ubx_xcvr::write_spi_regs, this, RXLO2, _1));
std::vector<max287x_iface::sptr> los = boost::assign::list_of(_txlo1)(_txlo2)(_rxlo1)(_rxlo2);
- BOOST_FOREACH(max287x_iface::sptr lo, los)
+ for(max287x_iface::sptr lo: los)
{
lo->set_auto_retune(false);
lo->set_muxout_mode(max287x_iface::MUXOUT_DLD);
@@ -384,7 +384,7 @@ public:
_rxlo1 = max287x_iface::make<max2871>(boost::bind(&ubx_xcvr::write_spi_regs, this, RXLO1, _1));
_rxlo2 = max287x_iface::make<max2871>(boost::bind(&ubx_xcvr::write_spi_regs, this, RXLO2, _1));
std::vector<max287x_iface::sptr> los = boost::assign::list_of(_txlo1)(_txlo2)(_rxlo1)(_rxlo2);
- BOOST_FOREACH(max287x_iface::sptr lo, los)
+ for(max287x_iface::sptr lo: los)
{
lo->set_auto_retune(false);
//lo->set_cycle_slip_mode(true); // tried it - caused longer lock times
@@ -547,7 +547,7 @@ private:
{
boost::mutex::scoped_lock lock(_spi_mutex);
ROUTE_SPI(_iface, dest);
- BOOST_FOREACH(uint32_t value, values)
+ for(uint32_t value: values)
WRITE_SPI(_iface, value);
}
@@ -749,7 +749,7 @@ private:
_ubx_tx_atten_val = ((attn_code & 0x3F) << 10);
set_gpio_field(TX_GAIN, attn_code);
write_gpio();
- UHD_LOGV(rarely) << boost::format("UBX TX Gain: %f dB, Code: %d, IO Bits 0x%04x") % gain % attn_code % _ubx_tx_atten_val << std::endl;
+ UHD_LOGGER_TRACE("UBX") << boost::format("UBX TX Gain: %f dB, Code: %d, IO Bits 0x%04x") % gain % attn_code % _ubx_tx_atten_val ;
_tx_gain = gain;
return gain;
}
@@ -762,7 +762,7 @@ private:
_ubx_rx_atten_val = ((attn_code & 0x3F) << 10);
set_gpio_field(RX_GAIN, attn_code);
write_gpio();
- UHD_LOGV(rarely) << boost::format("UBX RX Gain: %f dB, Code: %d, IO Bits 0x%04x") % gain % attn_code % _ubx_rx_atten_val << std::endl;
+ UHD_LOGGER_TRACE("UBX") << boost::format("UBX RX Gain: %f dB, Code: %d, IO Bits 0x%04x") % gain % attn_code % _ubx_rx_atten_val ;
_rx_gain = gain;
return gain;
}
@@ -786,18 +786,18 @@ private:
property_tree::sptr subtree = this->get_tx_subtree();
device_addr_t tune_args = subtree->access<device_addr_t>("tune_args").get();
is_int_n = boost::iequals(tune_args.get("mode_n",""), "integer");
- UHD_LOGV(rarely) << boost::format("UBX TX: the requested frequency is %f MHz") % (freq/1e6) << std::endl;
+ UHD_LOGGER_TRACE("UBX") << boost::format("UBX TX: the requested frequency is %f MHz") % (freq/1e6) ;
double target_pfd_freq = _tx_target_pfd_freq;
if (is_int_n and tune_args.has_key("int_n_step"))
{
target_pfd_freq = tune_args.cast<double>("int_n_step", _tx_target_pfd_freq);
if (target_pfd_freq > _tx_target_pfd_freq)
{
- UHD_MSG(warning)
+ UHD_LOGGER_WARNING("UBX")
<< boost::format("Requested int_n_step of %f MHz too large, clipping to %f MHz")
% (target_pfd_freq/1e6)
% (_tx_target_pfd_freq/1e6)
- << std::endl;
+ ;
target_pfd_freq = _tx_target_pfd_freq;
}
}
@@ -925,7 +925,7 @@ private:
_txlo1_freq = freq_lo1;
_txlo2_freq = freq_lo2;
- UHD_LOGV(rarely) << boost::format("UBX TX: the actual frequency is %f MHz") % (_tx_freq/1e6) << std::endl;
+ UHD_LOGGER_TRACE("UBX") << boost::format("UBX TX: the actual frequency is %f MHz") % (_tx_freq/1e6) ;
return _tx_freq;
}
@@ -938,7 +938,7 @@ private:
double ref_freq = _iface->get_clock_rate(dboard_iface::UNIT_RX);
bool is_int_n = false;
- UHD_LOGV(rarely) << boost::format("UBX RX: the requested frequency is %f MHz") % (freq/1e6) << std::endl;
+ UHD_LOGGER_TRACE("UBX") << boost::format("UBX RX: the requested frequency is %f MHz") % (freq/1e6) ;
property_tree::sptr subtree = this->get_rx_subtree();
device_addr_t tune_args = subtree->access<device_addr_t>("tune_args").get();
@@ -949,11 +949,11 @@ private:
target_pfd_freq = tune_args.cast<double>("int_n_step", _rx_target_pfd_freq);
if (target_pfd_freq > _rx_target_pfd_freq)
{
- UHD_MSG(warning)
+ UHD_LOGGER_WARNING("UBX")
<< boost::format("Requested int_n_step of %f Mhz too large, clipping to %f MHz")
% (target_pfd_freq/1e6)
% (_rx_target_pfd_freq/1e6)
- << std::endl;
+ ;
target_pfd_freq = _rx_target_pfd_freq;
}
}
@@ -1121,7 +1121,7 @@ private:
_rxlo1_freq = freq_lo1;
_rxlo2_freq = freq_lo2;
- UHD_LOGV(rarely) << boost::format("UBX RX: the actual frequency is %f MHz") % (_rx_freq/1e6) << std::endl;
+ UHD_LOGGER_TRACE("UBX") << boost::format("UBX RX: the actual frequency is %f MHz") % (_rx_freq/1e6) ;
return _rx_freq;
}
diff --git a/host/lib/usrp/dboard/db_unknown.cpp b/host/lib/usrp/dboard/db_unknown.cpp
index 2ed50cd91..1a63c9233 100644
--- a/host/lib/usrp/dboard/db_unknown.cpp
+++ b/host/lib/usrp/dboard/db_unknown.cpp
@@ -18,12 +18,11 @@
#include <uhd/types/ranges.hpp>
#include <uhd/utils/assert_has.hpp>
#include <uhd/utils/static.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/usrp/dboard_base.hpp>
#include <uhd/usrp/dboard_manager.hpp>
#include <boost/assign/list_of.hpp>
#include <boost/format.hpp>
-#include <boost/foreach.hpp>
#include <boost/tuple/tuple.hpp>
#include <vector>
@@ -43,13 +42,13 @@ static void warn_if_old_rfx(const dboard_id_t &dboard_id, const std::string &xx)
(old_ids_t("Flex 1800 Classic", 0x0030, 0x0031))
(old_ids_t("Flex 2400 Classic", 0x0007, 0x000b))
;
- BOOST_FOREACH(const old_ids_t &old_id, old_rfx_ids){
+ for(const old_ids_t &old_id: old_rfx_ids){
std::string name; dboard_id_t rx_id, tx_id;
boost::tie(name, rx_id, tx_id) = old_id;
if (
(xx == "RX" and rx_id == dboard_id) or
(xx == "TX" and tx_id == dboard_id)
- ) UHD_MSG(warning) << boost::format(
+ ) UHD_LOGGER_WARNING("unknown_db") << boost::format(
"Detected %s daughterboard %s\n"
"This board requires modification to use.\n"
"See the daughterboard application notes.\n"
diff --git a/host/lib/usrp/dboard/db_wbx_common.cpp b/host/lib/usrp/dboard/db_wbx_common.cpp
index 5afbb1f88..1f665b7e4 100644
--- a/host/lib/usrp/dboard/db_wbx_common.cpp
+++ b/host/lib/usrp/dboard/db_wbx_common.cpp
@@ -21,7 +21,7 @@
#include <uhd/types/sensors.hpp>
#include <uhd/utils/assert_has.hpp>
#include <uhd/utils/algorithm.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
using namespace uhd;
using namespace uhd::usrp;
@@ -42,9 +42,9 @@ static int rx_pga0_gain_to_iobits(double &gain){
int attn_code = boost::math::iround(attn*2);
int iobits = ((~attn_code) << RX_ATTN_SHIFT) & RX_ATTN_MASK;
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("WBX") << boost::format(
"WBX RX Attenuation: %f dB, Code: %d, IO Bits %x, Mask: %x"
- ) % attn % attn_code % (iobits & RX_ATTN_MASK) % RX_ATTN_MASK << std::endl;
+ ) % attn % attn_code % (iobits & RX_ATTN_MASK) % RX_ATTN_MASK ;
//the actual gain setting
gain = wbx_rx_gain_ranges["PGA0"].stop() - double(attn_code)/2;
@@ -70,7 +70,7 @@ wbx_base::wbx_base(ctor_args_t args) : xcvr_dboard_base(args){
this->get_rx_subtree()->create<device_addr_t>("tune_args").set(device_addr_t());
this->get_rx_subtree()->create<sensor_value_t>("sensors/lo_locked")
.set_publisher(boost::bind(&wbx_base::get_locked, this, dboard_iface::UNIT_RX));
- BOOST_FOREACH(const std::string &name, wbx_rx_gain_ranges.keys()){
+ for(const std::string &name: wbx_rx_gain_ranges.keys()){
this->get_rx_subtree()->create<double>("gains/"+name+"/value")
.set_coercer(boost::bind(&wbx_base::set_rx_gain, this, _1, name))
.set(wbx_rx_gain_ranges[name].start());
@@ -158,7 +158,7 @@ sensor_value_t wbx_base::get_locked(dboard_iface::unit_t unit){
}
void wbx_base::wbx_versionx::write_lo_regs(dboard_iface::unit_t unit, const std::vector<uint32_t> &regs) {
- BOOST_FOREACH(uint32_t reg, regs) {
+ for(uint32_t reg: regs) {
self_base->get_iface()->write_spi(unit, spi_config_t::EDGE_RISE, reg, 32);
}
}
diff --git a/host/lib/usrp/dboard/db_wbx_version2.cpp b/host/lib/usrp/dboard/db_wbx_version2.cpp
index 489291881..d50ae481b 100644
--- a/host/lib/usrp/dboard/db_wbx_version2.cpp
+++ b/host/lib/usrp/dboard/db_wbx_version2.cpp
@@ -23,7 +23,7 @@
#include <uhd/types/sensors.hpp>
#include <uhd/utils/assert_has.hpp>
#include <uhd/utils/algorithm.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/usrp/dboard_base.hpp>
#include <boost/assign/list_of.hpp>
#include <boost/format.hpp>
@@ -58,9 +58,9 @@ static double tx_pga0_gain_to_dac_volts(double &gain){
//calculate the voltage for the aux dac
double dac_volts = gain*slope + min_volts;
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("WBX") << boost::format(
"WBX TX Gain: %f dB, dac_volts: %f V"
- ) % gain % dac_volts << std::endl;
+ ) % gain % dac_volts ;
//the actual gain setting
gain = (dac_volts - min_volts)/slope;
@@ -91,7 +91,7 @@ wbx_base::wbx_version2::wbx_version2(wbx_base *_self_wbx_base) {
// Register TX properties
////////////////////////////////////////////////////////////////////
this->get_tx_subtree()->create<std::string>("name").set("WBXv2 TX");
- BOOST_FOREACH(const std::string &name, wbx_v2_tx_gain_ranges.keys()){
+ for(const std::string &name: wbx_v2_tx_gain_ranges.keys()){
self_base->get_tx_subtree()->create<double>("gains/"+name+"/value")
.set_coercer(boost::bind(&wbx_base::wbx_version2::set_tx_gain, this, _1, name))
.set(wbx_v2_tx_gain_ranges[name].start());
@@ -165,9 +165,9 @@ double wbx_base::wbx_version2::set_lo_freq(dboard_iface::unit_t unit, double tar
//clip to tuning range
target_freq = wbx_v2_freq_range.clip(target_freq);
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("WBX") << boost::format(
"WBX tune: target frequency %f MHz"
- ) % (target_freq/1e6) << std::endl;
+ ) % (target_freq/1e6) ;
/*
* If the user sets 'mode_n=integer' in the tuning args, the user wishes to
diff --git a/host/lib/usrp/dboard/db_wbx_version3.cpp b/host/lib/usrp/dboard/db_wbx_version3.cpp
index 1bd326e6f..1863dcb44 100644
--- a/host/lib/usrp/dboard/db_wbx_version3.cpp
+++ b/host/lib/usrp/dboard/db_wbx_version3.cpp
@@ -22,7 +22,7 @@
#include <uhd/types/sensors.hpp>
#include <uhd/utils/assert_has.hpp>
#include <uhd/utils/algorithm.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/usrp/dboard_base.hpp>
#include <boost/assign/list_of.hpp>
#include <boost/format.hpp>
@@ -63,9 +63,9 @@ static int tx_pga0_gain_to_iobits(double &gain){
(attn_code & 1 ? 0 : TX_ATTN_1)
) & TX_ATTN_MASK;
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("WBX") << boost::format(
"WBX TX Attenuation: %f dB, Code: %d, IO Bits %x, Mask: %x"
- ) % attn % attn_code % (iobits & TX_ATTN_MASK) % TX_ATTN_MASK << std::endl;
+ ) % attn % attn_code % (iobits & TX_ATTN_MASK) % TX_ATTN_MASK ;
//the actual gain setting
gain = wbx_v3_tx_gain_ranges["PGA0"].stop() - double(attn_code);
@@ -96,7 +96,7 @@ wbx_base::wbx_version3::wbx_version3(wbx_base *_self_wbx_base) {
// Register TX properties
////////////////////////////////////////////////////////////////////
this->get_tx_subtree()->create<std::string>("name").set("WBXv3 TX");
- BOOST_FOREACH(const std::string &name, wbx_v3_tx_gain_ranges.keys()){
+ for(const std::string &name: wbx_v3_tx_gain_ranges.keys()){
self_base->get_tx_subtree()->create<double>("gains/"+name+"/value")
.set_coercer(boost::bind(&wbx_base::wbx_version3::set_tx_gain, this, _1, name))
.set(wbx_v3_tx_gain_ranges[name].start());
@@ -196,9 +196,9 @@ double wbx_base::wbx_version3::set_lo_freq(dboard_iface::unit_t unit, double tar
//clip to tuning range
target_freq = wbx_v3_freq_range.clip(target_freq);
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("WBX") << boost::format(
"WBX tune: target frequency %f MHz"
- ) % (target_freq/1e6) << std::endl;
+ ) % (target_freq/1e6) ;
/*
* If the user sets 'mode_n=integer' in the tuning args, the user wishes to
diff --git a/host/lib/usrp/dboard/db_wbx_version4.cpp b/host/lib/usrp/dboard/db_wbx_version4.cpp
index 3cc0f1887..9b31baf89 100644
--- a/host/lib/usrp/dboard/db_wbx_version4.cpp
+++ b/host/lib/usrp/dboard/db_wbx_version4.cpp
@@ -22,7 +22,7 @@
#include <uhd/types/sensors.hpp>
#include <uhd/utils/assert_has.hpp>
#include <uhd/utils/algorithm.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/usrp/dboard_base.hpp>
#include <boost/assign/list_of.hpp>
#include <boost/format.hpp>
@@ -64,9 +64,9 @@ static int tx_pga0_gain_to_iobits(double &gain){
(attn_code & 1 ? 0 : TX_ATTN_1)
) & TX_ATTN_MASK;
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("WBX") << boost::format(
"WBX TX Attenuation: %f dB, Code: %d, IO Bits %x, Mask: %x"
- ) % attn % attn_code % (iobits & TX_ATTN_MASK) % TX_ATTN_MASK << std::endl;
+ ) % attn % attn_code % (iobits & TX_ATTN_MASK) % TX_ATTN_MASK ;
//the actual gain setting
gain = wbx_v4_tx_gain_ranges["PGA0"].stop() - double(attn_code);
@@ -103,7 +103,7 @@ wbx_base::wbx_version4::wbx_version4(wbx_base *_self_wbx_base) {
//get_tx_id() will always return GDB ID, so use RX ID to determine WBXv4 vs. WBX-120
if(rx_id == 0x0063) this->get_tx_subtree()->create<std::string>("name").set("WBXv4 TX");
else if(rx_id == 0x0081) this->get_tx_subtree()->create<std::string>("name").set("WBX-120 TX");
- BOOST_FOREACH(const std::string &name, wbx_v4_tx_gain_ranges.keys()){
+ for(const std::string &name: wbx_v4_tx_gain_ranges.keys()){
self_base->get_tx_subtree()->create<double>("gains/"+name+"/value")
.set_coercer(boost::bind(&wbx_base::wbx_version4::set_tx_gain, this, _1, name))
.set(wbx_v4_tx_gain_ranges[name].start());
@@ -204,9 +204,9 @@ double wbx_base::wbx_version4::set_lo_freq(dboard_iface::unit_t unit, double tar
//clip to tuning range
target_freq = wbx_v4_freq_range.clip(target_freq);
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("WBX") << boost::format(
"WBX tune: target frequency %f MHz"
- ) % (target_freq/1e6) << std::endl;
+ ) % (target_freq/1e6) ;
/*
* If the user sets 'mode_n=integer' in the tuning args, the user wishes to
diff --git a/host/lib/usrp/dboard/db_xcvr2450.cpp b/host/lib/usrp/dboard/db_xcvr2450.cpp
index ee69dbdad..fe6eca07b 100644
--- a/host/lib/usrp/dboard/db_xcvr2450.cpp
+++ b/host/lib/usrp/dboard/db_xcvr2450.cpp
@@ -135,9 +135,9 @@ private:
void spi_reset(void);
void send_reg(uint8_t addr){
uint32_t value = _max2829_regs.get_reg(addr);
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("XCVR2450") << boost::format(
"XCVR2450: send reg 0x%02x, value 0x%05x"
- ) % int(addr) % value << std::endl;
+ ) % int(addr) % value ;
this->get_iface()->write_spi(
dboard_iface::UNIT_RX,
spi_config_t::EDGE_RISE,
@@ -234,7 +234,7 @@ xcvr2450::xcvr2450(ctor_args_t args) : xcvr_dboard_base(args){
.set_publisher(boost::bind(&xcvr2450::get_locked, this));
this->get_rx_subtree()->create<sensor_value_t>("sensors/rssi")
.set_publisher(boost::bind(&xcvr2450::get_rssi, this));
- BOOST_FOREACH(const std::string &name, xcvr_rx_gain_ranges.keys()){
+ for(const std::string &name: xcvr_rx_gain_ranges.keys()){
this->get_rx_subtree()->create<double>("gains/"+name+"/value")
.set_coercer(boost::bind(&xcvr2450::set_rx_gain, this, _1, name))
.set(xcvr_rx_gain_ranges[name].start());
@@ -270,7 +270,7 @@ xcvr2450::xcvr2450(ctor_args_t args) : xcvr_dboard_base(args){
.set("XCVR2450 TX");
this->get_tx_subtree()->create<sensor_value_t>("sensors/lo_locked")
.set_publisher(boost::bind(&xcvr2450::get_locked, this));
- BOOST_FOREACH(const std::string &name, xcvr_tx_gain_ranges.keys()){
+ for(const std::string &name: xcvr_tx_gain_ranges.keys()){
this->get_tx_subtree()->create<double>("gains/"+name+"/value")
.set_coercer(boost::bind(&xcvr2450::set_tx_gain, this, _1, name))
.set(xcvr_tx_gain_ranges[name].start());
@@ -391,20 +391,20 @@ double xcvr2450::set_lo_freq_core(double target_freq){
double N = double(intdiv) + double(fracdiv)/double(1 << 16);
_lo_freq = (N*ref_freq)/(scaler*R*_ad9515div);
- UHD_LOGV(often)
+ UHD_LOGGER_TRACE("XCVR2450")
<< boost::format("XCVR2450 tune:\n")
<< boost::format(" R=%d, N=%f, ad9515=%d, scaler=%f\n") % R % N % _ad9515div % scaler
<< boost::format(" Ref Freq=%fMHz\n") % (ref_freq/1e6)
<< boost::format(" Target Freq=%fMHz\n") % (target_freq/1e6)
<< boost::format(" Actual Freq=%fMHz\n") % (_lo_freq/1e6)
- << std::endl;
+ ;
//high-high band or low-high band?
if(_lo_freq > (5.35e9 + 5.47e9)/2.0){
- UHD_LOGV(often) << "XCVR2450 tune: Using high-high band" << std::endl;
+ UHD_LOGGER_TRACE("XCVR2450") << "XCVR2450 tune: Using high-high band" ;
_max2829_regs.band_select_802_11a = max2829_regs_t::BAND_SELECT_802_11A_5_47GHZ_TO_5_875GHZ;
}else{
- UHD_LOGV(often) << "XCVR2450 tune: Using low-high band" << std::endl;
+ UHD_LOGGER_TRACE("XCVR2450") << "XCVR2450 tune: Using low-high band" ;
_max2829_regs.band_select_802_11a = max2829_regs_t::BAND_SELECT_802_11A_4_9GHZ_TO_5_35GHZ;
}
@@ -655,9 +655,9 @@ double xcvr2450::set_rx_bandwidth(double bandwidth){
//update register
send_reg(0x7);
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("XCVR2450") << boost::format(
"XCVR2450 RX Bandwidth (lp_fc): %f Hz, coarse reg: %d, fine reg: %d"
- ) % _rx_bandwidth % (int(_max2829_regs.rx_lpf_coarse_adj)) % (int(_max2829_regs.rx_lpf_fine_adj)) << std::endl;
+ ) % _rx_bandwidth % (int(_max2829_regs.rx_lpf_coarse_adj)) % (int(_max2829_regs.rx_lpf_fine_adj)) ;
return 2.0*_rx_bandwidth;
}
@@ -675,9 +675,9 @@ double xcvr2450::set_tx_bandwidth(double bandwidth){
//update register
send_reg(0x7);
- UHD_LOGV(often) << boost::format(
+ UHD_LOGGER_TRACE("XCVR2450") << boost::format(
"XCVR2450 TX Bandwidth (lp_fc): %f Hz, coarse reg: %d"
- ) % _tx_bandwidth % (int(_max2829_regs.tx_lpf_coarse_adj)) << std::endl;
+ ) % _tx_bandwidth % (int(_max2829_regs.tx_lpf_coarse_adj)) ;
//convert lowpass back to complex bandpass bandwidth
return 2.0*_tx_bandwidth;
diff --git a/host/lib/usrp/dboard/twinrx/twinrx_ctrl.cpp b/host/lib/usrp/dboard/twinrx/twinrx_ctrl.cpp
index 3bb5931bc..00be82e1a 100644
--- a/host/lib/usrp/dboard/twinrx/twinrx_ctrl.cpp
+++ b/host/lib/usrp/dboard/twinrx/twinrx_ctrl.cpp
@@ -87,10 +87,10 @@ public:
//Initialize clocks and LO
bool found_rate = false;
- BOOST_FOREACH(double rate, _db_iface->get_clock_rates(dboard_iface::UNIT_TX)) {
+ for(double rate: _db_iface->get_clock_rates(dboard_iface::UNIT_TX)) {
found_rate |= uhd::math::frequencies_are_equal(rate, TWINRX_DESIRED_REFERENCE_FREQ);
}
- BOOST_FOREACH(double rate, _db_iface->get_clock_rates(dboard_iface::UNIT_RX)) {
+ for(double rate: _db_iface->get_clock_rates(dboard_iface::UNIT_RX)) {
found_rate |= uhd::math::frequencies_are_equal(rate, TWINRX_DESIRED_REFERENCE_FREQ);
}
if (not found_rate) {
@@ -506,7 +506,7 @@ private: //Functions
void _write_lo_spi(dboard_iface::unit_t unit, const std::vector<uint32_t> &regs)
{
- BOOST_FOREACH(uint32_t reg, regs) {
+ for(uint32_t reg: regs) {
spi_config_t spi_config = spi_config_t(spi_config_t::EDGE_RISE);
spi_config.use_custom_divider = true;
spi_config.divider = 67;
diff --git a/host/lib/usrp/dboard/twinrx/twinrx_experts.cpp b/host/lib/usrp/dboard/twinrx/twinrx_experts.cpp
index f4a1b8ac4..c6a2d42c0 100644
--- a/host/lib/usrp/dboard/twinrx/twinrx_experts.cpp
+++ b/host/lib/usrp/dboard/twinrx/twinrx_experts.cpp
@@ -18,6 +18,7 @@
#include "twinrx_experts.hpp"
#include "twinrx_gain_tables.hpp"
#include <uhd/utils/math.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/exception.hpp>
#include <uhd/types/dict.hpp>
#include <uhd/types/ranges.hpp>
@@ -486,7 +487,7 @@ void twinrx_ant_gain_expert::resolve()
(_ch0_preamp2 != _ch1_preamp2) or
(_ch0_lb_preamp_presel != _ch1_lb_preamp_presel))
{
- UHD_MSG(warning) << "incompatible gain settings for antenna sharing. temporarily using Ch0 settings for Ch1.";
+ UHD_LOGGER_WARNING("TWINRX") << "incompatible gain settings for antenna sharing. temporarily using Ch0 settings for Ch1.";
}
_ant0_input_atten = _ch0_input_atten;
_ant0_preamp1 = _ch0_preamp1;
@@ -504,7 +505,7 @@ void twinrx_ant_gain_expert::resolve()
(_ch0_preamp2 != _ch1_preamp2) or
(_ch0_lb_preamp_presel != _ch1_lb_preamp_presel))
{
- UHD_MSG(warning) << "incompatible gain settings for antenna sharing. temporarily using Ch0 settings for Ch1.";
+ UHD_LOGGER_WARNING("TWINRX") << "incompatible gain settings for antenna sharing. temporarily using Ch0 settings for Ch1.";
}
_ant1_input_atten = _ch0_input_atten;
_ant1_preamp1 = _ch0_preamp1;
diff --git a/host/lib/usrp/dboard_eeprom.cpp b/host/lib/usrp/dboard_eeprom.cpp
index 9c748f556..900d28e80 100644
--- a/host/lib/usrp/dboard_eeprom.cpp
+++ b/host/lib/usrp/dboard_eeprom.cpp
@@ -19,7 +19,6 @@
#include <uhd/usrp/dboard_eeprom.hpp>
#include <uhd/exception.hpp>
#include <uhd/utils/log.hpp>
-#include <boost/foreach.hpp>
#include <boost/format.hpp>
#include <boost/lexical_cast.hpp>
#include <algorithm>
@@ -68,7 +67,7 @@ static uint8_t checksum(const byte_vector_t &bytes){
for (size_t i = 0; i < std::min(bytes.size(), size_t(DB_EEPROM_CHKSUM)); i++){
sum -= int(bytes.at(i));
}
- UHD_LOGV(often) << boost::format("sum: 0x%02x") % sum << std::endl;
+ UHD_LOG_DEBUG("DB_EEPROM", boost::format("byte sum: 0x%02x") % sum)
return uint8_t(sum);
}
@@ -82,11 +81,8 @@ void dboard_eeprom_t::load(i2c_iface &iface, uint8_t addr){
std::ostringstream ss;
for (size_t i = 0; i < bytes.size(); i++){
- ss << boost::format(
- "eeprom byte[0x%02x] = 0x%02x") % i % int(bytes.at(i)
- ) << std::endl;
+ UHD_LOG_TRACE("DB_EEPROM",boost::format("eeprom byte[0x%02x] = 0x%02x") % i % int(bytes.at(i)))
}
- UHD_LOGV(often) << ss.str() << std::endl;
try{
UHD_ASSERT_THROW(bytes.size() >= DB_EEPROM_CLEN);
diff --git a/host/lib/usrp/dboard_manager.cpp b/host/lib/usrp/dboard_manager.cpp
index 56cd08fd7..87b0c9f9c 100644
--- a/host/lib/usrp/dboard_manager.cpp
+++ b/host/lib/usrp/dboard_manager.cpp
@@ -17,7 +17,7 @@
#include "dboard_ctor_args.hpp"
#include <uhd/usrp/dboard_manager.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/log.hpp>
#include <uhd/utils/safe_call.hpp>
#include <uhd/utils/static.hpp>
@@ -26,7 +26,6 @@
#include <boost/tuple/tuple.hpp>
#include <boost/format.hpp>
#include <boost/bind.hpp>
-#include <boost/foreach.hpp>
#include <boost/assign/list_of.hpp>
using namespace uhd;
@@ -97,7 +96,7 @@ static void register_dboard_key(
const std::vector<std::string> &subdev_names,
dboard_manager::dboard_ctor_t db_container_ctor
){
- UHD_LOGV(always) << "registering: " << name << std::endl;
+ // UHD_LOGGER_TRACE("DBMGR") << "registering: " << name;
if (get_id_to_args_map().has_key(dboard_key)){
if (dboard_key.is_xcvr()) throw uhd::key_error(str(boost::format(
@@ -156,7 +155,7 @@ void dboard_manager::register_dboard_restricted(
std::string dboard_id_t::to_cname(void) const{
std::string cname;
- BOOST_FOREACH(const dboard_key_t &key, get_id_to_args_map().keys()){
+ for(const dboard_key_t &key: get_id_to_args_map().keys()){
if (
(not key.is_xcvr() and *this == key.xx_id()) or
(key.is_xcvr() and (*this == key.rx_id() or *this == key.tx_id()))
@@ -248,11 +247,11 @@ dboard_manager_impl::dboard_manager_impl(
this->init(rx_dboard_id, tx_dboard_id, subtree, defer_db_init);
}
catch(const std::exception &e){
- UHD_MSG(error) << boost::format(
+ UHD_LOGGER_ERROR("DBMGR") << boost::format(
"The daughterboard manager encountered a recoverable error in init.\n"
"Loading the \"unknown\" daughterboard implementations to continue.\n"
"The daughterboard cannot operate until this error is resolved.\n"
- ) << e.what() << std::endl;
+ ) << e.what() ;
//clean up the stuff added by the call above
if (subtree->exists("rx_frontends")) subtree->remove("rx_frontends");
if (subtree->exists("tx_frontends")) subtree->remove("tx_frontends");
@@ -266,7 +265,7 @@ void dboard_manager_impl::init(
){
//find the dboard key matches for the dboard ids
dboard_key_t rx_dboard_key, tx_dboard_key, xcvr_dboard_key;
- BOOST_FOREACH(const dboard_key_t &key, get_id_to_args_map().keys()){
+ for(const dboard_key_t &key: get_id_to_args_map().keys()){
if (key.is_xcvr()){
if (rx_dboard_id == key.rx_id() and tx_dboard_id == key.tx_id()) xcvr_dboard_key = key;
if (rx_dboard_id == key.rx_id()) rx_dboard_key = key; //kept to handle warning
@@ -280,7 +279,7 @@ void dboard_manager_impl::init(
//warn for invalid dboard id xcvr combinations
if (not xcvr_dboard_key.is_xcvr() and (rx_dboard_key.is_xcvr() or tx_dboard_key.is_xcvr())){
- UHD_MSG(warning) << boost::format(
+ UHD_LOGGER_WARNING("DBMGR") << boost::format(
"Unknown transceiver board ID combination.\n"
"Is your daughter-board mounted properly?\n"
"RX dboard ID: %s\n"
@@ -322,7 +321,7 @@ void dboard_manager_impl::init(
db_ctor_args.tx_container = db_ctor_args.rx_container; //Same TX and RX container
//create the xcvr object for each subdevice
- BOOST_FOREACH(const std::string &subdev, subdevs){
+ for(const std::string &subdev: subdevs){
db_ctor_args.sd_name = subdev;
db_ctor_args.rx_subtree = subtree->subtree("rx_frontends/" + db_ctor_args.sd_name);
db_ctor_args.tx_subtree = subtree->subtree("tx_frontends/" + db_ctor_args.sd_name);
@@ -373,7 +372,7 @@ void dboard_manager_impl::init(
}
//make the rx subdevs
- BOOST_FOREACH(const std::string &subdev, rx_subdevs){
+ for(const std::string &subdev: rx_subdevs){
db_ctor_args.sd_name = subdev;
db_ctor_args.rx_subtree = subtree->subtree("rx_frontends/" + db_ctor_args.sd_name);
_rx_dboards[subdev] = rx_dboard_ctor(&db_ctor_args);
@@ -412,7 +411,7 @@ void dboard_manager_impl::init(
}
//make the tx subdevs
- BOOST_FOREACH(const std::string &subdev, tx_subdevs){
+ for(const std::string &subdev: tx_subdevs){
db_ctor_args.sd_name = subdev;
db_ctor_args.tx_subtree = subtree->subtree("tx_frontends/" + db_ctor_args.sd_name);
_tx_dboards[subdev] = tx_dboard_ctor(&db_ctor_args);
@@ -436,11 +435,11 @@ void dboard_manager_impl::init(
}
void dboard_manager_impl::initialize_dboards(void) {
- BOOST_FOREACH(dboard_base::sptr& _rx_container, _rx_containers) {
+ for(dboard_base::sptr& _rx_container: _rx_containers) {
_rx_container->initialize();
}
- BOOST_FOREACH(dboard_base::sptr& _tx_container, _tx_containers) {
+ for(dboard_base::sptr& _tx_container: _tx_containers) {
_tx_container->initialize();
}
}
@@ -457,7 +456,7 @@ void dboard_manager_impl::set_nice_dboard_if(void){
;
//set nice settings on each unit
- BOOST_FOREACH(dboard_iface::unit_t unit, units){
+ for(dboard_iface::unit_t unit: units){
_iface->set_gpio_ddr(unit, 0x0000); //all inputs
_iface->set_gpio_out(unit, 0x0000); //all low
_iface->set_pin_ctrl(unit, 0x0000); //all gpio
diff --git a/host/lib/usrp/device3/device3_impl.cpp b/host/lib/usrp/device3/device3_impl.cpp
index 50598a519..35faf601f 100644
--- a/host/lib/usrp/device3/device3_impl.cpp
+++ b/host/lib/usrp/device3/device3_impl.cpp
@@ -18,17 +18,16 @@
#include "device3_impl.hpp"
#include "graph_impl.hpp"
#include "ctrl_iface.hpp"
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/rfnoc/block_ctrl_base.hpp>
#include <boost/make_shared.hpp>
#include <algorithm>
-#define UHD_DEVICE3_LOG() UHD_LOGV(never)
+#define UHD_DEVICE3_LOG() UHD_LOGGER_TRACE("DEVICE3")
using namespace uhd::usrp;
device3_impl::device3_impl()
- : _sid_framer(0)
{
_type = uhd::device::USRP;
_async_md.reset(new async_md_type(1000/*messages deep*/));
@@ -70,7 +69,7 @@ void device3_impl::merge_channel_defs(
// - All block indices that are in chan_ids may be overwritten in the channel definition
// - If the channels in chan_ids are not yet in the property tree channel list,
// they are appended.
- BOOST_FOREACH(const std::string &chan_idx, curr_channels) {
+ for(const std::string &chan_idx: curr_channels) {
if (_tree->exists(chans_root / chan_idx)) {
rfnoc::block_id_t chan_block_id = _tree->access<rfnoc::block_id_t>(chans_root / chan_idx).get();
if (std::find(chan_ids.begin(), chan_ids.end(), chan_block_id) != chan_ids.end()) {
@@ -105,8 +104,7 @@ void device3_impl::enumerate_rfnoc_blocks(
size_t n_blocks,
size_t base_port,
const uhd::sid_t &base_sid,
- uhd::device_addr_t transport_args,
- uhd::endianness_t endianness
+ uhd::device_addr_t transport_args
) {
// entries that are already connected to this block
uhd::sid_t ctrl_sid = base_sid;
@@ -120,7 +118,7 @@ void device3_impl::enumerate_rfnoc_blocks(
// TODO: Clear out all the old block control classes
// 3) Create new block controllers
for (size_t i = 0; i < n_blocks; i++) {
- UHD_DEVICE3_LOG() << "[RFNOC] ------- Block Setup -----------" << std::endl;
+ UHD_DEVICE3_LOG() << "[RFNOC] ------- Block Setup -----------" ;
// First, make a transport for port number zero, because we always need that:
ctrl_sid.set_dst_xbarport(base_port + i);
ctrl_sid.set_dst_blockport(0);
@@ -131,24 +129,24 @@ void device3_impl::enumerate_rfnoc_blocks(
);
UHD_DEVICE3_LOG() << str(boost::format("Setting up NoC-Shell Control for port #0 (SID: %s)...") % xport.send_sid.to_pp_string_hex());
uhd::rfnoc::ctrl_iface::sptr ctrl = uhd::rfnoc::ctrl_iface::make(
- endianness == ENDIANNESS_BIG,
+ xport.endianness == uhd::ENDIANNESS_BIG,
xport.send,
xport.recv,
xport.send_sid,
str(boost::format("CE_%02d_Port_%02X") % i % ctrl_sid.get_dst_endpoint())
);
- UHD_DEVICE3_LOG() << "OK" << std::endl;
+ UHD_DEVICE3_LOG() << "OK" ;
uint64_t noc_id = ctrl->peek64(uhd::rfnoc::SR_READBACK_REG_ID);
- UHD_DEVICE3_LOG() << str(boost::format("Port %d: Found NoC-Block with ID %016X.") % int(ctrl_sid.get_dst_endpoint()) % noc_id) << std::endl;
+ UHD_DEVICE3_LOG() << str(boost::format("Port %d: Found NoC-Block with ID %016X.") % int(ctrl_sid.get_dst_endpoint()) % noc_id) ;
uhd::rfnoc::make_args_t make_args;
uhd::rfnoc::blockdef::sptr block_def = uhd::rfnoc::blockdef::make_from_noc_id(noc_id);
if (not block_def) {
- UHD_DEVICE3_LOG() << "Using default block configuration." << std::endl;
+ UHD_DEVICE3_LOG() << "Using default block configuration." ;
block_def = uhd::rfnoc::blockdef::make_from_noc_id(uhd::rfnoc::DEFAULT_NOC_ID);
}
UHD_ASSERT_THROW(block_def);
make_args.ctrl_ifaces[0] = ctrl;
- BOOST_FOREACH(const size_t port_number, block_def->get_all_port_numbers()) {
+ for(const size_t port_number: block_def->get_all_port_numbers()) {
if (port_number == 0) { // We've already set this up
continue;
}
@@ -160,21 +158,23 @@ void device3_impl::enumerate_rfnoc_blocks(
);
UHD_DEVICE3_LOG() << str(boost::format("Setting up NoC-Shell Control for port #%d (SID: %s)...") % port_number % xport1.send_sid.to_pp_string_hex());
uhd::rfnoc::ctrl_iface::sptr ctrl1 = uhd::rfnoc::ctrl_iface::make(
- endianness == ENDIANNESS_BIG,
+ xport1.endianness == uhd::ENDIANNESS_BIG,
xport1.send,
xport1.recv,
xport1.send_sid,
str(boost::format("CE_%02d_Port_%02d") % i % ctrl_sid.get_dst_endpoint())
);
- UHD_DEVICE3_LOG() << "OK" << std::endl;
+ UHD_DEVICE3_LOG() << "OK" ;
make_args.ctrl_ifaces[port_number] = ctrl1;
}
make_args.base_address = xport.send_sid.get_dst();
make_args.device_index = device_index;
make_args.tree = subtree;
- make_args.is_big_endian = (endianness == ENDIANNESS_BIG);
- _rfnoc_block_ctrl.push_back(uhd::rfnoc::block_ctrl_base::make(make_args, noc_id));
+ { //Critical section for block_ctrl vector access
+ boost::lock_guard<boost::mutex> lock(_block_ctrl_mutex);
+ _rfnoc_block_ctrl.push_back(uhd::rfnoc::block_ctrl_base::make(make_args, noc_id));
+ }
}
}
diff --git a/host/lib/usrp/device3/device3_impl.hpp b/host/lib/usrp/device3/device3_impl.hpp
index 043379108..196d1fd4e 100644
--- a/host/lib/usrp/device3/device3_impl.hpp
+++ b/host/lib/usrp/device3/device3_impl.hpp
@@ -135,7 +135,6 @@ protected:
virtual uhd::device_addr_t get_tx_hints(size_t) { return uhd::device_addr_t(); };
virtual uhd::device_addr_t get_rx_hints(size_t) { return uhd::device_addr_t(); };
- virtual uhd::endianness_t get_transport_endianness(size_t mb_index) = 0;
//! Is called after a streamer is generated
virtual void post_streamer_hooks(uhd::direction_t) {};
@@ -168,16 +167,12 @@ protected:
size_t n_blocks,
size_t base_port,
const uhd::sid_t &base_sid,
- uhd::device_addr_t transport_args,
- uhd::endianness_t endianness
+ uhd::device_addr_t transport_args
);
/***********************************************************************
* Members
**********************************************************************/
- //! A counter, designed to create unique SIDs
- size_t _sid_framer;
-
// TODO: Maybe move these to private
uhd::dict<std::string, boost::weak_ptr<uhd::rx_streamer> > _rx_streamers;
uhd::dict<std::string, boost::weak_ptr<uhd::tx_streamer> > _tx_streamers;
diff --git a/host/lib/usrp/device3/device3_io_impl.cpp b/host/lib/usrp/device3/device3_io_impl.cpp
index 198ee4022..374232972 100644
--- a/host/lib/usrp/device3/device3_io_impl.cpp
+++ b/host/lib/usrp/device3/device3_io_impl.cpp
@@ -23,7 +23,7 @@
#include <uhd/rfnoc/sink_block_ctrl_base.hpp>
#include <uhd/utils/byteswap.hpp>
#include <uhd/utils/log.hpp>
-#include <uhd/utils/msg.hpp>
+
#include "../common/async_packet_handler.hpp"
#include "../../transport/super_recv_packet_handler.hpp"
#include "../../transport/super_send_packet_handler.hpp"
@@ -34,7 +34,8 @@
#include <uhd/transport/zero_copy_flow_ctrl.hpp>
#include <boost/atomic.hpp>
-#define UHD_STREAMER_LOG() UHD_LOGV(never)
+#define UHD_TX_STREAMER_LOG() UHD_LOGGER_TRACE("STREAMER")
+#define UHD_RX_STREAMER_LOG() UHD_LOGGER_TRACE("STREAMER")
using namespace uhd;
using namespace uhd::usrp;
@@ -140,7 +141,7 @@ void generate_channel_list(
}
// Add all remaining args to all channel args
- BOOST_FOREACH(device_addr_t &chan_arg, chan_args_) {
+ for(device_addr_t &chan_arg: chan_args_) {
chan_arg = chan_arg.to_string() + "," + args.args.to_string();
}
@@ -251,7 +252,7 @@ static void handle_rx_flowctrl(
// Super-verbose mode:
//static size_t fc_pkt_count = 0;
- //UHD_MSG(status) << "sending flow ctrl packet " << fc_pkt_count++ << ", acking " << str(boost::format("%04d\tseq_sw==0x%08x") % last_seq % seq32) << std::endl;
+ //UHD_LOGGER_INFO("STREAMER") << "sending flow ctrl packet " << fc_pkt_count++ << ", acking " << str(boost::format("%04d\tseq_sw==0x%08x") % last_seq % seq32) ;
//load packet info
vrt::if_packet_info_t packet_info;
@@ -336,7 +337,7 @@ static size_t get_tx_flow_control_window(
static bool tx_flow_ctrl(
boost::shared_ptr<tx_fc_cache_t> fc_cache,
- zero_copy_if::sptr async_xport,
+ zero_copy_if::sptr async_xport,
uint32_t (*endian_conv)(uint32_t),
void (*unpack)(const uint32_t *packet_buff, vrt::if_packet_info_t &),
managed_buffer::sptr
@@ -351,35 +352,38 @@ static bool tx_flow_ctrl(
return true;
}
- // Look for a flow control message to update the space available in the buffer.
+ // Look for a flow control message to update the space available in the buffer.
// A minimal timeout is used because larger timeouts can cause the thread to be
// scheduled out for too long at high data rates and result in underruns.
- managed_recv_buffer::sptr buff = async_xport->get_recv_buff(0.000001);
- if (buff)
- {
- vrt::if_packet_info_t if_packet_info;
- if_packet_info.num_packet_words32 = buff->size()/sizeof(uint32_t);
- const uint32_t *packet_buff = buff->cast<const uint32_t *>();
- try {
- unpack(packet_buff, if_packet_info);
- }
- catch(const std::exception &ex)
- {
- UHD_MSG(error) << "Error unpacking async flow control packet: " << ex.what() << std::endl;
- continue;
- }
-
- if (if_packet_info.packet_type != vrt::if_packet_info_t::PACKET_TYPE_FC)
- {
- UHD_MSG(error) << "Unexpected packet type received by flow control handler: " << if_packet_info.packet_type << std::endl;
- continue;
- }
-
- // update the amount of space
- size_t seq_ack = endian_conv(packet_buff[if_packet_info.num_header_words32+1]);
- fc_cache->space += (seq_ack - fc_cache->last_seq_ack) & HW_SEQ_NUM_MASK;
- fc_cache->last_seq_ack = seq_ack;
- }
+ managed_recv_buffer::sptr buff = async_xport->get_recv_buff(0.000001);
+ if (buff)
+ {
+ vrt::if_packet_info_t if_packet_info;
+ if_packet_info.num_packet_words32 = buff->size()/sizeof(uint32_t);
+ const uint32_t *packet_buff = buff->cast<const uint32_t *>();
+ try {
+ unpack(packet_buff, if_packet_info);
+ }
+ catch(const std::exception &ex)
+ {
+ UHD_LOG_ERROR("TX FLOW CTRL", "Error unpacking async flow control packet: " << ex.what());
+ continue;
+ }
+
+ if (if_packet_info.packet_type != vrt::if_packet_info_t::PACKET_TYPE_FC)
+ {
+ UHD_LOG_ERROR(
+ "TX FLOW CTRL",
+ "Unexpected packet type received by flow control handler: " << if_packet_info.packet_type
+ );
+ continue;
+ }
+
+ // update the amount of space
+ size_t seq_ack = endian_conv(packet_buff[if_packet_info.num_header_words32+1]);
+ fc_cache->space += (seq_ack - fc_cache->last_seq_ack) & HW_SEQ_NUM_MASK;
+ fc_cache->last_seq_ack = seq_ack;
+ }
}
return false;
}
@@ -434,7 +438,7 @@ static void handle_tx_async_msgs(
}
catch(const std::exception &ex)
{
- UHD_MSG(error) << "Error parsing async message packet: " << ex.what() << std::endl;
+ UHD_LOGGER_ERROR("STREAMER") << "Error parsing async message packet: " << ex.what() ;
return;
}
@@ -454,10 +458,13 @@ static void handle_tx_async_msgs(
async_info->stream_channel
);
- // Filter out any flow control messages and cache the rest
+ // Filter out any flow control messages and cache the rest
if (metadata.event_code == DEVICE3_ASYNC_EVENT_CODE_FLOW_CTRL)
{
- UHD_MSG(error) << "Unexpected flow control message found in async message handling" << std::endl;
+ UHD_LOG_ERROR(
+ "TX ASYNC",
+ "Unexpected flow control message found in async message handling"
+ );
} else {
async_info->async_queue->push_with_pop_on_full(metadata);
metadata.channel = async_info->device_channel;
@@ -478,8 +485,8 @@ bool device3_impl::recv_async_msg(
**********************************************************************/
void device3_impl::update_rx_streamers(double /* rate */)
{
- BOOST_FOREACH(const std::string &block_id, _rx_streamers.keys()) {
- UHD_STREAMER_LOG() << "[Device3] updating RX streamer to " << block_id << std::endl;
+ for(const std::string &block_id: _rx_streamers.keys()) {
+ UHD_RX_STREAMER_LOG() << "updating RX streamer to " << block_id;
boost::shared_ptr<sph::recv_packet_streamer> my_streamer =
boost::dynamic_pointer_cast<sph::recv_packet_streamer>(_rx_streamers[block_id].lock());
if (my_streamer) {
@@ -496,7 +503,7 @@ void device3_impl::update_rx_streamers(double /* rate */)
if (scaling == rfnoc::scalar_node_ctrl::SCALE_UNDEFINED) {
scaling = 1/32767.;
}
- UHD_STREAMER_LOG() << " New tick_rate == " << tick_rate << " New samp_rate == " << samp_rate << " New scaling == " << scaling << std::endl;
+ UHD_RX_STREAMER_LOG() << " New tick_rate == " << tick_rate << " New samp_rate == " << samp_rate << " New scaling == " << scaling ;
my_streamer->set_tick_rate(tick_rate);
my_streamer->set_samp_rate(samp_rate);
@@ -525,7 +532,7 @@ rx_streamer::sptr device3_impl::get_rx_stream(const stream_args_t &args_)
for (size_t stream_i = 0; stream_i < chan_list.size(); stream_i++) {
// Get block ID and mb index
uhd::rfnoc::block_id_t block_id = chan_list[stream_i];
- UHD_STREAMER_LOG() << "[RX Streamer] chan " << stream_i << " connecting to " << block_id << std::endl;
+ UHD_RX_STREAMER_LOG() << "chan " << stream_i << " connecting to " << block_id ;
// Update args so args.args is always valid for this particular channel:
args.args = chan_args[stream_i];
size_t mb_index = block_id.get_device_no();
@@ -553,20 +560,20 @@ rx_streamer::sptr device3_impl::get_rx_stream(const stream_args_t &args_)
//allocate sid and create transport
uhd::sid_t stream_address = blk_ctrl->get_address(block_port);
- UHD_STREAMER_LOG() << "[RX Streamer] creating rx stream " << rx_hints.to_string() << std::endl;
+ UHD_RX_STREAMER_LOG() << "creating rx stream " << rx_hints.to_string() ;
both_xports_t xport = make_transport(stream_address, RX_DATA, rx_hints);
- UHD_STREAMER_LOG() << std::hex << "[RX Streamer] data_sid = " << xport.send_sid << std::dec << " actual recv_buff_size = " << xport.recv_buff_size << std::endl;
+ UHD_RX_STREAMER_LOG() << std::hex << "data_sid = " << xport.send_sid << std::dec << " actual recv_buff_size = " << xport.recv_buff_size ;
// Configure the block
blk_ctrl->set_destination(xport.send_sid.get_src(), block_port);
blk_ctrl->sr_write(uhd::rfnoc::SR_RESP_OUT_DST_SID, xport.send_sid.get_src(), block_port);
- UHD_STREAMER_LOG() << "[RX Streamer] resp_out_dst_sid == " << xport.send_sid.get_src() << std::endl;
+ UHD_RX_STREAMER_LOG() << "resp_out_dst_sid == " << xport.send_sid.get_src() ;
// Find all upstream radio nodes and set their response in SID to the host
std::vector<boost::shared_ptr<uhd::rfnoc::radio_ctrl> > upstream_radio_nodes = blk_ctrl->find_upstream_node<uhd::rfnoc::radio_ctrl>();
- UHD_STREAMER_LOG() << "[RX Streamer] Number of upstream radio nodes: " << upstream_radio_nodes.size() << std::endl;
- BOOST_FOREACH(const boost::shared_ptr<uhd::rfnoc::radio_ctrl> &node, upstream_radio_nodes) {
+ UHD_RX_STREAMER_LOG() << "Number of upstream radio nodes: " << upstream_radio_nodes.size();
+ for(const boost::shared_ptr<uhd::rfnoc::radio_ctrl> &node: upstream_radio_nodes) {
node->sr_write(uhd::rfnoc::SR_RESP_OUT_DST_SID, xport.send_sid.get_src(), block_port);
}
@@ -575,7 +582,7 @@ rx_streamer::sptr device3_impl::get_rx_stream(const stream_args_t &args_)
const size_t bpp = xport.recv->get_recv_frame_size() - stream_options.rx_max_len_hdr; // bytes per packet
const size_t bpi = convert::get_bytes_per_item(args.otw_format); // bytes per item
const size_t spp = std::min(args.args.cast<size_t>("spp", bpp/bpi), bpp/bpi); // samples per packet
- UHD_STREAMER_LOG() << "[RX Streamer] spp == " << spp << std::endl;
+ UHD_RX_STREAMER_LOG() << "spp == " << spp ;
//make the new streamer given the samples per packet
if (not my_streamer)
@@ -584,7 +591,7 @@ rx_streamer::sptr device3_impl::get_rx_stream(const stream_args_t &args_)
//init some streamer stuff
std::string conv_endianness;
- if (get_transport_endianness(mb_index) == ENDIANNESS_BIG) {
+ if (xport.endianness == ENDIANNESS_BIG) {
my_streamer->set_vrt_unpacker(&vrt::chdr::if_hdr_unpack_be);
conv_endianness = "be";
} else {
@@ -604,7 +611,7 @@ rx_streamer::sptr device3_impl::get_rx_stream(const stream_args_t &args_)
const size_t pkt_size = spp * bpi + stream_options.rx_max_len_hdr;
const size_t fc_window = get_rx_flow_control_window(pkt_size, xport.recv_buff_size, rx_hints);
const size_t fc_handle_window = std::max<size_t>(1, fc_window / stream_options.rx_fc_request_freq);
- UHD_STREAMER_LOG()<< "[RX Streamer] Flow Control Window (minus one) = " << fc_window-1 << ", Flow Control Handler Window = " << fc_handle_window << std::endl;
+ UHD_RX_STREAMER_LOG()<< "Flow Control Window (minus one) = " << fc_window-1 << ", Flow Control Handler Window = " << fc_handle_window ;
blk_ctrl->configure_flow_control_out(
fc_window-1, // Leave one space for overrun packets TODO make this obsolete
block_port
@@ -637,7 +644,7 @@ rx_streamer::sptr device3_impl::get_rx_stream(const stream_args_t &args_)
&handle_rx_flowctrl,
xport.send_sid,
xport.send,
- get_transport_endianness(mb_index),
+ xport.endianness,
fc_cache,
_1
),
@@ -680,8 +687,8 @@ rx_streamer::sptr device3_impl::get_rx_stream(const stream_args_t &args_)
**********************************************************************/
void device3_impl::update_tx_streamers(double /* rate */)
{
- BOOST_FOREACH(const std::string &block_id, _tx_streamers.keys()) {
- UHD_STREAMER_LOG() << "[Device3] updating TX streamer: " << block_id << std::endl;
+ for(const std::string &block_id: _tx_streamers.keys()) {
+ UHD_TX_STREAMER_LOG() << "updating TX streamer: " << block_id;
boost::shared_ptr<sph::send_packet_streamer> my_streamer =
boost::dynamic_pointer_cast<sph::send_packet_streamer>(_tx_streamers[block_id].lock());
if (my_streamer) {
@@ -697,7 +704,7 @@ void device3_impl::update_tx_streamers(double /* rate */)
if (scaling == rfnoc::scalar_node_ctrl::SCALE_UNDEFINED) {
scaling = 32767.;
}
- UHD_STREAMER_LOG() << " New tick_rate == " << tick_rate << " New samp_rate == " << samp_rate << " New scaling == " << scaling << std::endl;
+ UHD_TX_STREAMER_LOG() << "New tick_rate == " << tick_rate << " New samp_rate == " << samp_rate << " New scaling == " << scaling ;
my_streamer->set_tick_rate(tick_rate);
my_streamer->set_samp_rate(samp_rate);
my_streamer->set_scale_factor(scaling);
@@ -746,7 +753,6 @@ tx_streamer::sptr device3_impl::get_tx_stream(const uhd::stream_args_t &args_)
args.args = chan_args[stream_i];
size_t mb_index = block_id.get_device_no();
size_t suggested_block_port = args.args.cast<size_t>("block_port", rfnoc::ANY_PORT);
- uhd::endianness_t endianness = get_transport_endianness(mb_index);
// Access to this channel's block control
uhd::rfnoc::sink_block_ctrl_base::sptr blk_ctrl =
@@ -771,17 +777,17 @@ tx_streamer::sptr device3_impl::get_tx_stream(const uhd::stream_args_t &args_)
//allocate sid and create transport
uhd::sid_t stream_address = blk_ctrl->get_address(block_port);
- UHD_STREAMER_LOG() << "[TX Streamer] creating tx stream " << tx_hints.to_string() << std::endl;
+ UHD_TX_STREAMER_LOG() << "creating tx stream " << tx_hints.to_string() ;
both_xports_t xport = make_transport(stream_address, TX_DATA, tx_hints);
- both_xports_t async_xport = make_transport(stream_address, ASYNC_MSG, device_addr_t(""));
- UHD_STREAMER_LOG() << std::hex << "[TX Streamer] data_sid = " << xport.send_sid << std::dec << std::endl;
+ both_xports_t async_xport = make_transport(stream_address, ASYNC_MSG, device_addr_t(""));
+ UHD_TX_STREAMER_LOG() << std::hex << "[TX Streamer] data_sid = " << xport.send_sid << std::dec << std::endl;
// To calculate the max number of samples per packet, we assume the maximum header length
// to avoid fragmentation should the entire header be used.
const size_t bpp = tx_hints.cast<size_t>("bpp", xport.send->get_send_frame_size()) - stream_options.tx_max_len_hdr;
const size_t bpi = convert::get_bytes_per_item(args.otw_format); // bytes per item
const size_t spp = std::min(args.args.cast<size_t>("spp", bpp/bpi), bpp/bpi); // samples per packet
- UHD_STREAMER_LOG() << "[TX Streamer] spp == " << spp << std::endl;
+ UHD_TX_STREAMER_LOG() << "spp == " << spp ;
//make the new streamer given the samples per packet
if (not my_streamer)
@@ -792,7 +798,7 @@ tx_streamer::sptr device3_impl::get_tx_stream(const uhd::stream_args_t &args_)
//init some streamer stuff
std::string conv_endianness;
- if (get_transport_endianness(mb_index) == ENDIANNESS_BIG) {
+ if (xport.endianness == ENDIANNESS_BIG) {
my_streamer->set_vrt_packer(&vrt::chdr::if_hdr_pack_be);
conv_endianness = "be";
} else {
@@ -817,7 +823,7 @@ tx_streamer::sptr device3_impl::get_tx_stream(const uhd::stream_args_t &args_)
tx_hints // This can override the value reported by the block!
);
const size_t fc_handle_window = std::max<size_t>(1, fc_window / stream_options.tx_fc_response_freq);
- UHD_STREAMER_LOG() << "[TX Streamer] Flow Control Window = " << fc_window << ", Flow Control Handler Window = " << fc_handle_window << std::endl;
+ UHD_TX_STREAMER_LOG() << "Flow Control Window = " << fc_window << ", Flow Control Handler Window = " << fc_handle_window ;
blk_ctrl->configure_flow_control_in(
stream_options.tx_fc_response_cycles,
fc_handle_window, /*pkts*/
@@ -841,14 +847,14 @@ tx_streamer::sptr device3_impl::get_tx_stream(const uhd::stream_args_t &args_)
&handle_tx_async_msgs,
async_tx_info,
my_streamer->_async_xport.recv,
- endianness,
+ xport.endianness,
tick_rate_retriever
)
);
blk_ctrl->sr_write(uhd::rfnoc::SR_CLEAR_RX_FC, 0xc1ea12, block_port);
blk_ctrl->sr_write(uhd::rfnoc::SR_RESP_IN_DST_SID, my_streamer->_async_xport.recv_sid.get_dst(), block_port);
- UHD_STREAMER_LOG() << "[TX Streamer] resp_in_dst_sid == " << boost::format("0x%04X") % xport.recv_sid.get_dst() << std::endl;
+ UHD_TX_STREAMER_LOG() << "resp_in_dst_sid == " << boost::format("0x%04X") % xport.recv_sid.get_dst() ;
// FIXME: Once there is a better way to map the radio block and port
// to the channel or another way to receive asynchronous messages that
@@ -859,8 +865,8 @@ tx_streamer::sptr device3_impl::get_tx_stream(const uhd::stream_args_t &args_)
uhd::rfnoc::block_id_t radio_id(args.args["radio_id"]);
size_t radio_port = args.args.cast<size_t>("radio_port", 0);
std::vector<boost::shared_ptr<uhd::rfnoc::radio_ctrl> > downstream_radio_nodes = blk_ctrl->find_downstream_node<uhd::rfnoc::radio_ctrl>();
- UHD_STREAMER_LOG() << "[TX Streamer] Number of downstream radio nodes: " << downstream_radio_nodes.size() << std::endl;
- BOOST_FOREACH(const boost::shared_ptr<uhd::rfnoc::radio_ctrl> &node, downstream_radio_nodes) {
+ UHD_TX_STREAMER_LOG() << "Number of downstream radio nodes: " << downstream_radio_nodes.size();
+ for(const boost::shared_ptr<uhd::rfnoc::radio_ctrl> &node: downstream_radio_nodes) {
if (node->get_block_id() == radio_id) {
node->sr_write(uhd::rfnoc::SR_RESP_IN_DST_SID, my_streamer->_async_xport.recv_sid.get_dst(), radio_port);
}
@@ -873,8 +879,8 @@ tx_streamer::sptr device3_impl::get_tx_stream(const uhd::stream_args_t &args_)
// soon as possible.
// Find all downstream radio nodes and set their response SID to the host
std::vector<boost::shared_ptr<uhd::rfnoc::radio_ctrl> > downstream_radio_nodes = blk_ctrl->find_downstream_node<uhd::rfnoc::radio_ctrl>();
- UHD_STREAMER_LOG() << "[TX Streamer] Number of downstream radio nodes: " << downstream_radio_nodes.size() << std::endl;
- BOOST_FOREACH(const boost::shared_ptr<uhd::rfnoc::radio_ctrl> &node, downstream_radio_nodes) {
+ UHD_TX_STREAMER_LOG() << "Number of downstream radio nodes: " << downstream_radio_nodes.size();
+ for(const boost::shared_ptr<uhd::rfnoc::radio_ctrl> &node: downstream_radio_nodes) {
node->sr_write(uhd::rfnoc::SR_RESP_IN_DST_SID, my_streamer->_async_xport.recv_sid.get_dst(), block_port);
}
}
@@ -884,12 +890,12 @@ tx_streamer::sptr device3_impl::get_tx_stream(const uhd::stream_args_t &args_)
my_streamer->_xport.send = zero_copy_flow_ctrl::make(
my_streamer->_xport.send,
boost::bind(
- &tx_flow_ctrl,
- fc_cache,
- my_streamer->_xport.recv,
- (endianness == ENDIANNESS_BIG ? uhd::ntohx<uint32_t> : uhd::wtohx<uint32_t>),
- (endianness == ENDIANNESS_BIG ? vrt::chdr::if_hdr_unpack_be : vrt::chdr::if_hdr_unpack_le),
- _1),
+ &tx_flow_ctrl,
+ fc_cache,
+ my_streamer->_xport.recv,
+ (xport.endianness == ENDIANNESS_BIG ? uhd::ntohx<uint32_t> : uhd::wtohx<uint32_t>),
+ (xport.endianness == ENDIANNESS_BIG ? vrt::chdr::if_hdr_unpack_be : vrt::chdr::if_hdr_unpack_le),
+ _1),
NULL);
//Give the streamer a functor to get the send buffer
diff --git a/host/lib/usrp/e100/clock_ctrl.cpp b/host/lib/usrp/e100/clock_ctrl.cpp
index 0dbd6a5d3..a3f630462 100644
--- a/host/lib/usrp/e100/clock_ctrl.cpp
+++ b/host/lib/usrp/e100/clock_ctrl.cpp
@@ -17,13 +17,12 @@
#include "clock_ctrl.hpp"
#include "ad9522_regs.hpp"
-#include <uhd/utils/msg.hpp>
+
#include <uhd/utils/log.hpp>
#include <uhd/utils/assert_has.hpp>
#include <stdint.h>
#include "e100_regs.hpp" //spi slave constants
#include <boost/assign/list_of.hpp>
-#include <boost/foreach.hpp>
#include <boost/format.hpp>
#include <boost/thread/thread.hpp>
#include <boost/math/common_factor_rt.hpp> //gcd
@@ -137,11 +136,11 @@ static clock_settings_type get_clock_settings(double rate){
cs.chan_divider /= cs.vco_divider;
}
- UHD_LOGV(always)
- << "gcd " << gcd << std::endl
- << "X " << X << std::endl
- << "Y " << Y << std::endl
- << cs.to_pp_string() << std::endl
+ UHD_LOGGER_DEBUG("E100")
+ << "gcd: " << gcd
+ << " X: " << X
+ << " Y: " << Y
+ << cs.to_pp_string()
;
//filter limits on the counters
@@ -155,7 +154,7 @@ static clock_settings_type get_clock_settings(double rate){
if (cs.get_vco_rate() < 1400e6 + vco_bound_pad) continue;
if (cs.get_out_rate() != rate) continue;
- UHD_MSG(status) << "USRP-E100 clock control: " << i << std::endl << cs.to_pp_string() << std::endl;
+ UHD_LOGGER_INFO("E100") << "USRP-E100 clock control: " << i << cs.to_pp_string() ;
return cs;
}
}
@@ -195,7 +194,7 @@ public:
this->use_internal_ref();
//initialize the FPGA clock rate
- UHD_MSG(status) << boost::format("Initializing FPGA clock to %fMHz...") % (master_clock_rate/1e6) << std::endl;
+ UHD_LOGGER_INFO("E100") << boost::format("Initializing FPGA clock to %fMHz...") % (master_clock_rate/1e6) ;
this->set_fpga_clock_rate(master_clock_rate);
this->enable_test_clock(ENABLE_THE_TEST_OUT);
@@ -460,7 +459,7 @@ private:
void send_reg(uint16_t addr){
uint32_t reg = _ad9522_regs.get_write_reg(addr);
- UHD_LOGV(often) << "clock control write reg: " << std::hex << reg << std::endl;
+ UHD_LOGGER_TRACE("E100") << "clock control write reg: " << std::hex << reg ;
_iface->write_spi(
UE_SPI_SS_AD9522,
spi_config_t::EDGE_RISE,
@@ -487,7 +486,7 @@ private:
_ad9522_regs.set_reg(addr, reg);
if (_ad9522_regs.vco_calibration_finished) goto wait_for_ld;
}
- UHD_MSG(error) << "USRP-E100 clock control: VCO calibration timeout" << std::endl;
+ UHD_LOGGER_ERROR("E100") << "USRP-E100 clock control: VCO calibration timeout" ;
wait_for_ld:
//wait for digital lock detect:
for (size_t ms10 = 0; ms10 < 100; ms10++){
@@ -499,7 +498,7 @@ private:
_ad9522_regs.set_reg(addr, reg);
if (_ad9522_regs.digital_lock_detect) return;
}
- UHD_MSG(error) << "USRP-E100 clock control: lock detection timeout" << std::endl;
+ UHD_LOGGER_ERROR("E100") << "USRP-E100 clock control: lock detection timeout" ;
}
void soft_sync(void){
@@ -521,7 +520,7 @@ private:
;
//write initial register values and latch/update
- BOOST_FOREACH(const range_t &range, ranges){
+ for(const range_t &range: ranges){
for(uint16_t addr = range.first; addr <= range.second; addr++){
this->send_reg(addr);
}
diff --git a/host/lib/usrp/e100/codec_ctrl.cpp b/host/lib/usrp/e100/codec_ctrl.cpp
index 7dce01e46..5bd394fb1 100644
--- a/host/lib/usrp/e100/codec_ctrl.cpp
+++ b/host/lib/usrp/e100/codec_ctrl.cpp
@@ -260,7 +260,7 @@ void e100_codec_ctrl_impl::write_aux_dac(aux_dac_t which, double volts){
**********************************************************************/
void e100_codec_ctrl_impl::send_reg(uint8_t addr){
uint32_t reg = _ad9862_regs.get_write_reg(addr);
- UHD_LOGV(often) << "codec control write reg: " << std::hex << reg << std::endl;
+ UHD_LOGGER_TRACE("E100") << "codec control write reg: " << std::hex << reg ;
_iface->write_spi(
UE_SPI_SS_AD9862,
spi_config_t::EDGE_RISE,
@@ -270,13 +270,13 @@ void e100_codec_ctrl_impl::send_reg(uint8_t addr){
void e100_codec_ctrl_impl::recv_reg(uint8_t addr){
uint32_t reg = _ad9862_regs.get_read_reg(addr);
- UHD_LOGV(often) << "codec control read reg: " << std::hex << reg << std::endl;
+ UHD_LOGGER_TRACE("E100") << "codec control read reg: " << std::hex << reg ;
uint32_t ret = _iface->read_spi(
UE_SPI_SS_AD9862,
spi_config_t::EDGE_RISE,
reg, 16
);
- UHD_LOGV(often) << "codec control read ret: " << std::hex << ret << std::endl;
+ UHD_LOGGER_TRACE("E100") << "codec control read ret: " << std::hex << ret ;
_ad9862_regs.set_reg(addr, uint16_t(ret));
}
diff --git a/host/lib/usrp/e100/e100_ctrl.cpp b/host/lib/usrp/e100/e100_ctrl.cpp
index 4217286f8..41525300c 100644
--- a/host/lib/usrp/e100/e100_ctrl.cpp
+++ b/host/lib/usrp/e100/e100_ctrl.cpp
@@ -19,7 +19,7 @@
#include "e100_regs.hpp"
#include <uhd/exception.hpp>
#include <uhd/utils/log.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <sys/ioctl.h> //ioctl
#include <fcntl.h> //open, close
#include <linux/usrp_e.h> //ioctl structures and constants
@@ -27,7 +27,6 @@
#include <boost/thread/thread.hpp> //sleep
#include <boost/thread/mutex.hpp>
#include <boost/thread/condition_variable.hpp>
-#include <boost/foreach.hpp>
#include <boost/format.hpp>
#include <fstream>
@@ -219,7 +218,7 @@ public:
void write_uart(const std::string &buf){
const ssize_t ret = ::write(_node_fd, buf.c_str(), buf.size());
- if (size_t(ret) != buf.size()) UHD_LOG << ret;
+ if (size_t(ret) != buf.size()) UHD_LOGGER_DEBUG("E100")<< ret;
}
std::string read_uart(double timeout){
@@ -330,7 +329,7 @@ public:
* Structors
******************************************************************/
e100_ctrl_impl(const std::string &node){
- UHD_MSG(status) << "Opening device node " << node << "..." << std::endl;
+ UHD_LOGGER_INFO("E100") << "Opening device node " << node << "..." ;
//open the device node and check file descriptor
if ((_node_fd = ::open(node.c_str(), O_RDWR)) < 0){
@@ -350,7 +349,7 @@ public:
edge_file << "rising" << std::endl << std::flush;
edge_file.close();
_irq_fd = ::open("/sys/class/gpio/gpio147/value", O_RDONLY);
- if (_irq_fd < 0) UHD_MSG(error) << "Unable to open GPIO for IRQ\n";
+ if (_irq_fd < 0) UHD_LOGGER_ERROR("E100") << "Unable to open GPIO for IRQ\n";
}
~e100_ctrl_impl(void){
diff --git a/host/lib/usrp/e100/e100_impl.cpp b/host/lib/usrp/e100/e100_impl.cpp
index 25c967cfa..ddb21fc35 100644
--- a/host/lib/usrp/e100/e100_impl.cpp
+++ b/host/lib/usrp/e100/e100_impl.cpp
@@ -18,7 +18,7 @@
#include "apply_corrections.hpp"
#include "e100_impl.hpp"
#include "e100_regs.hpp"
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/exception.hpp>
#include <uhd/utils/static.hpp>
#include <uhd/utils/paths.hpp>
@@ -140,7 +140,7 @@ e100_impl::e100_impl(const uhd::device_addr_t &device_addr){
e100_fpga_image = find_image_path(device_addr.get("fpga", default_fpga_file_name));
}
catch(...){
- UHD_MSG(error) << boost::format("Could not find FPGA image. %s\n") % print_utility_error("uhd_images_downloader.py");
+ UHD_LOGGER_ERROR("E100") << boost::format("Could not find FPGA image. %s") % print_utility_error("uhd_images_downloader.py");
throw;
}
e100_load_fpga(e100_fpga_image);
@@ -151,10 +151,10 @@ e100_impl::e100_impl(const uhd::device_addr_t &device_addr){
bool dboard_clocks_diff = true;
if (mb_eeprom.get("revision", "0") == "3") dboard_clocks_diff = false;
else if (mb_eeprom.get("revision", "0") == "4") dboard_clocks_diff = true;
- else UHD_MSG(warning)
+ else UHD_LOGGER_WARNING("E100")
<< "Unknown E1XX revision number!\n"
<< "defaulting to differential dboard clocks to be safe.\n"
- << std::endl;
+ ;
const double master_clock_rate = device_addr.cast<double>("master_clock_rate", E100_DEFAULT_CLOCK_RATE);
_aux_spi_iface = e100_ctrl::make_aux_spi_iface();
_clock_ctrl = e100_clock_ctrl::make(_aux_spi_iface, master_clock_rate, dboard_clocks_diff);
@@ -179,7 +179,7 @@ e100_impl::e100_impl(const uhd::device_addr_t &device_addr){
//Perform wishbone readback tests, these tests also write the hash
bool test_fail = false;
- UHD_MSG(status) << "Performing control readback test... " << std::flush;
+ UHD_LOGGER_INFO("E100") << "Performing control readback test... ";
size_t hash = time(NULL);
for (size_t i = 0; i < 100; i++){
boost::hash_combine(hash, i);
@@ -187,12 +187,12 @@ e100_impl::e100_impl(const uhd::device_addr_t &device_addr){
test_fail = _fifo_ctrl->peek32(REG_RB_CONFIG0) != uint32_t(hash);
if (test_fail) break; //exit loop on any failure
}
- UHD_MSG(status) << ((test_fail)? " fail" : "pass") << std::endl;
+ UHD_LOGGER_INFO("E100") << "Control readback test " << ((test_fail)? "failed" : "passed");
- if (test_fail) UHD_MSG(error) << boost::format(
- "The FPGA is either clocked improperly\n"
- "or the FPGA build is not compatible.\n"
- "Subsequent errors may follow...\n"
+ if (test_fail) UHD_LOGGER_ERROR("E100") << boost::format(
+ "The FPGA is either clocked improperly "
+ "or the FPGA build is not compatible. "
+ "Subsequent errors may follow..."
);
//check that the compatibility is correct
@@ -260,16 +260,16 @@ e100_impl::e100_impl(const uhd::device_addr_t &device_addr){
static const fs::path GPSDO_VOLATILE_PATH("/media/ram/e100_internal_gpsdo.cache");
if (not fs::exists(GPSDO_VOLATILE_PATH))
{
- UHD_MSG(status) << "Detecting internal GPSDO.... " << std::flush;
+ UHD_LOGGER_INFO("E100") << "Detecting internal GPSDO.... ";
try{
_gps = gps_ctrl::make(e100_ctrl::make_gps_uart_iface(E100_UART_DEV_NODE));
}
catch(std::exception &e){
- UHD_MSG(error) << "An error occurred making GPSDO control: " << e.what() << std::endl;
+ UHD_LOGGER_ERROR("E100") << "An error occurred making GPSDO control: " << e.what();
}
if (_gps and _gps->gps_detected())
{
- BOOST_FOREACH(const std::string &name, _gps->get_sensors())
+ for(const std::string &name: _gps->get_sensors())
{
_tree->create<sensor_value_t>(mb_path / "sensors" / name)
.set_publisher(boost::bind(&gps_ctrl::get_sensor, _gps, name));
@@ -434,12 +434,12 @@ e100_impl::e100_impl(const uhd::device_addr_t &device_addr){
//bind frontend corrections to the dboard freq props
const fs_path db_tx_fe_path = mb_path / "dboards" / "A" / "tx_frontends";
- BOOST_FOREACH(const std::string &name, _tree->list(db_tx_fe_path)){
+ for(const std::string &name: _tree->list(db_tx_fe_path)){
_tree->access<double>(db_tx_fe_path / name / "freq" / "value")
.add_coerced_subscriber(boost::bind(&e100_impl::set_tx_fe_corrections, this, _1));
}
const fs_path db_rx_fe_path = mb_path / "dboards" / "A" / "rx_frontends";
- BOOST_FOREACH(const std::string &name, _tree->list(db_rx_fe_path)){
+ for(const std::string &name: _tree->list(db_rx_fe_path)){
_tree->access<double>(db_rx_fe_path / name / "freq" / "value")
.add_coerced_subscriber(boost::bind(&e100_impl::set_rx_fe_corrections, this, _1));
}
@@ -460,10 +460,10 @@ e100_impl::e100_impl(const uhd::device_addr_t &device_addr){
.add_coerced_subscriber(boost::bind(&e100_clock_ctrl::set_fpga_clock_rate, _clock_ctrl, _1));
//reset cordic rates and their properties to zero
- BOOST_FOREACH(const std::string &name, _tree->list(mb_path / "rx_dsps")){
+ for(const std::string &name: _tree->list(mb_path / "rx_dsps")){
_tree->access<double>(mb_path / "rx_dsps" / name / "freq" / "value").set(0.0);
}
- BOOST_FOREACH(const std::string &name, _tree->list(mb_path / "tx_dsps")){
+ for(const std::string &name: _tree->list(mb_path / "tx_dsps")){
_tree->access<double>(mb_path / "tx_dsps" / name / "freq" / "value").set(0.0);
}
@@ -475,10 +475,10 @@ e100_impl::e100_impl(const uhd::device_addr_t &device_addr){
//GPS installed: use external ref, time, and init time spec
if (_gps and _gps->gps_detected()){
_time64->enable_gpsdo();
- UHD_MSG(status) << "Setting references to the internal GPSDO" << std::endl;
+ UHD_LOGGER_INFO("E100") << "Setting references to the internal GPSDO";
_tree->access<std::string>(mb_path / "time_source/value").set("gpsdo");
_tree->access<std::string>(mb_path / "clock_source/value").set("gpsdo");
- UHD_MSG(status) << "Initializing time to the internal GPSDO" << std::endl;
+ UHD_LOGGER_INFO("E100") << "Initializing time to the internal GPSDO";
_time64->set_time_next_pps(time_spec_t(time_t(_gps->get_sensor("gps_time").to_int()+1)));
}
diff --git a/host/lib/usrp/e100/e100_mmap_zero_copy.cpp b/host/lib/usrp/e100/e100_mmap_zero_copy.cpp
index 57e4e32d9..37a87a669 100644
--- a/host/lib/usrp/e100/e100_mmap_zero_copy.cpp
+++ b/host/lib/usrp/e100/e100_mmap_zero_copy.cpp
@@ -42,14 +42,14 @@ public:
_mem(mem), _info(info) { /* NOP */ }
void release(void){
- if (fp_verbose) UHD_LOGV(always) << "recv buff: release" << std::endl;
+ if (fp_verbose) UHD_LOGGER_DEBUG("E100") << "recv buff: release" ;
_info->flags = RB_KERNEL; //release the frame
}
UHD_INLINE bool ready(void){return _info->flags & RB_USER;}
UHD_INLINE sptr get_new(void){
- if (fp_verbose) UHD_LOGV(always) << " make_recv_buff: " << _info->len << std::endl;
+ if (fp_verbose) UHD_LOGGER_DEBUG("E100") << " make_recv_buff: " << _info->len ;
_info->flags = RB_USER_PROCESS; //claim the frame
return make(this, _mem, _info->len);
}
@@ -69,18 +69,18 @@ public:
_mem(mem), _info(info), _len(len), _fd(fd) { /* NOP */ }
void release(void){
- if (fp_verbose) UHD_LOGV(always) << "send buff: commit " << size() << std::endl;
+ if (fp_verbose) UHD_LOGGER_DEBUG("E100") << "send buff: commit " << size() ;
_info->len = _len;//size();
_info->flags = RB_USER; //release the frame
if (::write(_fd, NULL, 0) < 0){ //notifies the kernel
- UHD_LOGV(rarely) << UHD_THROW_SITE_INFO("write error") << std::endl;
+ UHD_LOGGER_ERROR("E100") << UHD_THROW_SITE_INFO("write error") ;
}
}
UHD_INLINE bool ready(void){return _info->flags & RB_KERNEL;}
UHD_INLINE sptr get_new(void){
- if (fp_verbose) UHD_LOGV(always) << " make_send_buff: " << _len << std::endl;
+ if (fp_verbose) UHD_LOGGER_DEBUG("E100") << " make_send_buff: " << _len ;
_info->flags = RB_USER_PROCESS; //claim the frame
return make(this, _mem, _len);
}
@@ -111,15 +111,13 @@ public:
(_rb_size.num_rx_frames + _rb_size.num_tx_frames) * _frame_size;
//print sizes summary
- UHD_LOG
- << "page_size: " << page_size << std::endl
- << "frame_size: " << _frame_size << std::endl
- << "num_pages_rx_flags: " << _rb_size.num_pages_rx_flags << std::endl
- << "num_rx_frames: " << _rb_size.num_rx_frames << std::endl
- << "num_pages_tx_flags: " << _rb_size.num_pages_tx_flags << std::endl
- << "num_tx_frames: " << _rb_size.num_tx_frames << std::endl
- << "map_size: " << _map_size << std::endl
- ;
+ UHD_LOGGER_DEBUG("E100") << "page_size: " << page_size;
+ UHD_LOGGER_DEBUG("E100") << "frame_size: " << _frame_size;
+ UHD_LOGGER_DEBUG("E100") << "num_pages_rx_flags: " << _rb_size.num_pages_rx_flags;
+ UHD_LOGGER_DEBUG("E100") << "num_rx_frames: " << _rb_size.num_rx_frames;
+ UHD_LOGGER_DEBUG("E100") << "num_pages_tx_flags: " << _rb_size.num_pages_tx_flags;
+ UHD_LOGGER_DEBUG("E100") << "num_tx_frames: " << _rb_size.num_tx_frames;
+ UHD_LOGGER_DEBUG("E100") << "map_size: " << _map_size;
//call mmap to get the memory
_mapped_mem = ::mmap(
@@ -134,12 +132,10 @@ public:
size_t send_buff_off = send_info_off + (_rb_size.num_pages_tx_flags * page_size);
//print offset summary
- UHD_LOG
- << "recv_info_off: " << recv_info_off << std::endl
- << "recv_buff_off: " << recv_buff_off << std::endl
- << "send_info_off: " << send_info_off << std::endl
- << "send_buff_off: " << send_buff_off << std::endl
- ;
+ UHD_LOGGER_DEBUG("E100") << "recv_info_off: " << recv_info_off;
+ UHD_LOGGER_DEBUG("E100") << "recv_buff_off: " << recv_buff_off;
+ UHD_LOGGER_DEBUG("E100") << "send_info_off: " << send_info_off;
+ UHD_LOGGER_DEBUG("E100") << "send_buff_off: " << send_buff_off;
//pointers to sections in the mapped memory
ring_buffer_info (*recv_info)[], (*send_info)[];
@@ -170,12 +166,12 @@ public:
}
~e100_mmap_zero_copy_impl(void){
- UHD_LOG << "cleanup: munmap" << std::endl;
+ UHD_LOGGER_DEBUG("E100")<< "cleanup: munmap" ;
::munmap(_mapped_mem, _map_size);
}
managed_recv_buffer::sptr get_recv_buff(double timeout){
- if (fp_verbose) UHD_LOGV(always) << "get_recv_buff: " << _recv_index << std::endl;
+ if (fp_verbose) UHD_LOGGER_DEBUG("E100") << "get_recv_buff: " << _recv_index ;
e100_mmap_zero_copy_mrb &mrb = *_mrb_pool[_recv_index];
//poll/wait for a ready frame
@@ -185,7 +181,7 @@ public:
pfd.fd = _fd;
pfd.events = POLLIN;
ssize_t poll_ret = ::poll(&pfd, 1, size_t(timeout*1e3/poll_breakout));
- if (fp_verbose) UHD_LOGV(always) << " POLLIN: " << poll_ret << std::endl;
+ if (fp_verbose) UHD_LOGGER_DEBUG("E100") << " POLLIN: " << poll_ret ;
if (poll_ret > 0) goto found_user_frame; //good poll, continue on
}
return managed_recv_buffer::sptr(); //timed-out for real
@@ -207,7 +203,7 @@ public:
}
managed_send_buffer::sptr get_send_buff(double timeout){
- if (fp_verbose) UHD_LOGV(always) << "get_send_buff: " << _send_index << std::endl;
+ if (fp_verbose) UHD_LOGGER_DEBUG("E100") << "get_send_buff: " << _send_index ;
e100_mmap_zero_copy_msb &msb = *_msb_pool[_send_index];
//poll/wait for a ready frame
@@ -216,7 +212,7 @@ public:
pfd.fd = _fd;
pfd.events = POLLOUT;
ssize_t poll_ret = ::poll(&pfd, 1, size_t(timeout*1e3));
- if (fp_verbose) UHD_LOGV(always) << " POLLOUT: " << poll_ret << std::endl;
+ if (fp_verbose) UHD_LOGGER_DEBUG("E100") << " POLLOUT: " << poll_ret ;
if (poll_ret <= 0) return managed_send_buffer::sptr();
}
diff --git a/host/lib/usrp/e100/fpga_downloader.cpp b/host/lib/usrp/e100/fpga_downloader.cpp
index 9abde32f7..bfb207f39 100644
--- a/host/lib/usrp/e100/fpga_downloader.cpp
+++ b/host/lib/usrp/e100/fpga_downloader.cpp
@@ -23,18 +23,13 @@
#include <uhd/device.hpp>
#include <uhd/image_loader.hpp>
#include <uhd/types/device_addr.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/paths.hpp>
#include <uhd/utils/static.hpp>
#include "e100_impl.hpp"
#else //special case when this file is externally included
#include <stdexcept>
#include <iostream>
-#define UHD_MSG(type) std::cout
-namespace uhd{
- typedef std::runtime_error os_error;
- typedef std::runtime_error io_error;
-}
#endif
#include <sstream>
@@ -246,7 +241,7 @@ static void send_file_to_fpga(const std::string &file_name, gpio &error, gpio &d
throw uhd::os_error("INIT_B went high, error occured.");
if (!done.get_value())
- UHD_MSG(status) << "Configuration complete." << std::endl;
+ UHD_LOGGER_INFO("E100") << "Configuration complete.";
} while (bitstream.gcount() == BUF_SIZE);
}
@@ -260,20 +255,20 @@ void e100_load_fpga(const std::string &bin_file){
gpio gpio_init_b(INIT_B, IN);
gpio gpio_done (DONE, IN);
- UHD_MSG(status) << "Loading FPGA image: " << bin_file << "... " << std::flush;
+ UHD_LOGGER_INFO("E100") << "Loading FPGA image: " << bin_file << "... ";
// if(std::system("/sbin/rmmod usrp_e") != 0){
-// UHD_MSG(warning) << "USRP-E100 FPGA downloader: could not unload usrp_e module" << std::endl;
+// UHD_LOGGER_WARNING("E100") << "USRP-E100 FPGA downloader: could not unload usrp_e module" ;
// }
prepare_fpga_for_configuration(gpio_prog_b, gpio_init_b);
- UHD_MSG(status) << "done = " << gpio_done.get_value() << std::endl;
+ UHD_LOGGER_INFO("E100") << "done = " << gpio_done.get_value();
send_file_to_fpga(bin_file, gpio_init_b, gpio_done);
// if(std::system("/sbin/modprobe usrp_e") != 0){
-// UHD_MSG(warning) << "USRP-E100 FPGA downloader: could not load usrp_e module" << std::endl;
+// UHD_LOGGER_WARNING("E100") << "USRP-E100 FPGA downloader: could not load usrp_e module" ;
// }
}
diff --git a/host/lib/usrp/e100/io_impl.cpp b/host/lib/usrp/e100/io_impl.cpp
index ebed3614c..acd14f17b 100644
--- a/host/lib/usrp/e100/io_impl.cpp
+++ b/host/lib/usrp/e100/io_impl.cpp
@@ -20,7 +20,7 @@
#include "../../transport/super_recv_packet_handler.hpp"
#include "../../transport/super_send_packet_handler.hpp"
#include "e100_impl.hpp"
-#include <uhd/utils/msg.hpp>
+
#include <uhd/utils/log.hpp>
#include <uhd/utils/tasks.hpp>
#include <boost/bind.hpp>
@@ -75,10 +75,10 @@ void e100_impl::update_rates(void){
_tree->access<double>(mb_path / "tick_rate").update();
//and now that the tick rate is set, init the host rates to something
- BOOST_FOREACH(const std::string &name, _tree->list(mb_path / "rx_dsps")){
+ for(const std::string &name: _tree->list(mb_path / "rx_dsps")){
_tree->access<double>(mb_path / "rx_dsps" / name / "rate" / "value").update();
}
- BOOST_FOREACH(const std::string &name, _tree->list(mb_path / "tx_dsps")){
+ for(const std::string &name: _tree->list(mb_path / "tx_dsps")){
_tree->access<double>(mb_path / "tx_dsps" / name / "rate" / "value").update();
}
}
diff --git a/host/lib/usrp/e300/e300_common.cpp b/host/lib/usrp/e300/e300_common.cpp
index 216713bc6..61da3a2a2 100644
--- a/host/lib/usrp/e300/e300_common.cpp
+++ b/host/lib/usrp/e300/e300_common.cpp
@@ -15,7 +15,7 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//
#include <uhd/image_loader.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/paths.hpp>
#include <uhd/utils/static.hpp>
@@ -26,6 +26,7 @@
#include "e300_common.hpp"
#include <boost/filesystem.hpp>
+#include <boost/noncopyable.hpp>
#include <fstream>
#include <string>
@@ -39,7 +40,7 @@ void load_fpga_image(const std::string &path)
if (not boost::filesystem::exists("/dev/xdevcfg"))
::system("mknod /dev/xdevcfg c 259 0");
- UHD_MSG(status) << "Loading FPGA image: " << path << "..." << std::flush;
+ UHD_LOGGER_INFO("E300") << "Loading FPGA image: " << path << "...";
std::ifstream fpga_file(path.c_str(), std::ios_base::binary);
UHD_ASSERT_THROW(fpga_file.good());
@@ -57,7 +58,7 @@ void load_fpga_image(const std::string &path)
fpga_file.close();
std::fclose(wfile);
- UHD_MSG(status) << " done" << std::endl;
+ UHD_LOGGER_INFO("E300") << "FPGA image loaded";
}
static bool e300_image_loader(const image_loader::image_loader_args_t &image_loader_args) {
diff --git a/host/lib/usrp/e300/e300_common.hpp b/host/lib/usrp/e300/e300_common.hpp
index d9a0afd9e..c0cb9e2fd 100644
--- a/host/lib/usrp/e300/e300_common.hpp
+++ b/host/lib/usrp/e300/e300_common.hpp
@@ -18,6 +18,8 @@
#ifndef INCLUDED_E300_COMMON_HPP
#define INCLUDED_E300_COMMON_HPP
+#include <string>
+
namespace uhd { namespace usrp { namespace e300 {
namespace common {
diff --git a/host/lib/usrp/e300/e300_defaults.hpp b/host/lib/usrp/e300/e300_defaults.hpp
index 267897e03..cc810c0df 100644
--- a/host/lib/usrp/e300/e300_defaults.hpp
+++ b/host/lib/usrp/e300/e300_defaults.hpp
@@ -60,7 +60,7 @@ public:
}
}
clocking_mode_t get_clocking_mode() {
- return AD9361_XTAL_N_CLK_PATH;
+ return clocking_mode_t::AD9361_XTAL_N_CLK_PATH;
}
digital_interface_mode_t get_digital_interface_mode() {
return AD9361_DDR_FDD_LVCMOS;
diff --git a/host/lib/usrp/e300/e300_fifo_config.cpp b/host/lib/usrp/e300/e300_fifo_config.cpp
index 43d14aa65..89162a048 100644
--- a/host/lib/usrp/e300/e300_fifo_config.cpp
+++ b/host/lib/usrp/e300/e300_fifo_config.cpp
@@ -1,5 +1,5 @@
//
-// Copyright 2013-2014 Ettus Research LLC
+// Copyright 2013-2017 Ettus Research LLC
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
@@ -17,8 +17,9 @@
#ifdef E300_NATIVE
-#include <stdint.h>
#include <uhd/config.hpp>
+#include <stdint.h>
+#include <atomic>
// constants coded into the fpga parameters
static const size_t ZF_CONFIG_BASE = 0x40000000;
@@ -81,7 +82,7 @@ static UHD_INLINE size_t ZF_STREAM_OFF(const size_t which)
#include <fcntl.h> //open, close
#include <poll.h> //poll
#include <uhd/utils/log.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <boost/format.hpp>
#include <boost/thread/thread.hpp> //sleep
#include <uhd/types/time_spec.hpp> //timeout
@@ -101,6 +102,10 @@ struct e300_fifo_poll_waiter
//NOP
}
+ /*!
+ * Waits until the file descriptor fd has data to read.
+ * Access to the file descriptor is thread safe.
+ */
void wait(const double timeout)
{
if (timeout == 0) {
@@ -210,9 +215,9 @@ public:
_index(0),
_waiter(waiter)
{
- //UHD_MSG(status) << boost::format("phys 0x%x") % addrs.phys << std::endl;
- //UHD_MSG(status) << boost::format("data 0x%x") % addrs.data << std::endl;
- //UHD_MSG(status) << boost::format("ctrl 0x%x") % addrs.ctrl << std::endl;
+ //UHD_LOGGER_INFO("E300") << boost::format("phys 0x%x") % addrs.phys ;
+ //UHD_LOGGER_INFO("E300") << boost::format("data 0x%x") % addrs.data ;
+ //UHD_LOGGER_INFO("E300") << boost::format("ctrl 0x%x") % addrs.ctrl ;
const uint32_t sig = zf_peek32(_addrs.ctrl + ARBITER_RD_SIG);
UHD_ASSERT_THROW((sig >> 16) == 0xACE0);
@@ -351,7 +356,7 @@ public:
virtual ~e300_fifo_interface_impl(void)
{
delete _waiter;
- UHD_LOG << "cleanup: munmap" << std::endl;
+ UHD_LOGGER_TRACE("E300")<< "cleanup: munmap" ;
::munmap(_buff, _config.ctrl_length + _config.buff_length);
::close(_fd);
}
diff --git a/host/lib/usrp/e300/e300_impl.cpp b/host/lib/usrp/e300/e300_impl.cpp
index 84f50c67b..4b73ceac7 100644
--- a/host/lib/usrp/e300/e300_impl.cpp
+++ b/host/lib/usrp/e300/e300_impl.cpp
@@ -25,7 +25,7 @@
#include "e300_common.hpp"
#include "e300_remote_codec_ctrl.hpp"
-#include <uhd/utils/msg.hpp>
+
#include <uhd/utils/log.hpp>
#include <uhd/utils/static.hpp>
#include <uhd/utils/paths.hpp>
@@ -75,11 +75,11 @@ static std::vector<std::string> discover_ip_addrs(
try {
udp_bcast_xport = uhd::transport::udp_simple::make_broadcast(addr_hint, port);
} catch(const std::exception &e) {
- UHD_MSG(error) << boost::format("Cannot open UDP transport on %s for discovery\n%s")
- % addr_hint % e.what() << std::endl;
+ UHD_LOGGER_ERROR("E300") << boost::format("Cannot open UDP transport on %s for discovery%s")
+ % addr_hint % e.what() ;
return addrs;
} catch(...) {
- UHD_MSG(error) << "E300 Network discovery unknown error" << std::endl;
+ UHD_LOGGER_ERROR("E300") << "E300 Network discovery unknown error";
return addrs;
}
@@ -95,10 +95,10 @@ static std::vector<std::string> discover_ip_addrs(
try {
udp_bcast_xport->send(boost::asio::buffer(&req, sizeof(req)));
} catch (const std::exception &ex) {
- UHD_MSG(error) << "E300 Network discovery error " << ex.what() << std::endl;
+ UHD_LOGGER_ERROR("E300") << "E300 Network discovery error " << ex.what();
return addrs;
} catch(...) {
- UHD_MSG(error) << "E300 Network discovery unknown error" << std::endl;
+ UHD_LOGGER_ERROR("E300") << "E300 Network discovery unknown error";
return addrs;
}
@@ -130,7 +130,7 @@ device_addrs_t e300_find(const device_addr_t &multi_dev_hint)
if (hints.size() > 1) {
device_addrs_t found_devices;
std::string err_msg;
- BOOST_FOREACH(const device_addr_t &hint_i, hints)
+ for(const device_addr_t &hint_i: hints)
{
device_addrs_t found_devices_i = e300_find(hint_i);
if(found_devices_i.size() != 1)
@@ -165,7 +165,7 @@ device_addrs_t e300_find(const device_addr_t &multi_dev_hint)
if (not loopback_only) {
// if no address or node has been specified, send a broadcast
if ((not hint.has_key("addr")) and (not hint.has_key("node"))) {
- BOOST_FOREACH(const if_addrs_t &if_addrs, get_if_addrs())
+ for(const if_addrs_t &if_addrs: get_if_addrs())
{
// avoid the loopback device
if (is_loopback(if_addrs))
@@ -187,7 +187,7 @@ device_addrs_t e300_find(const device_addr_t &multi_dev_hint)
std::vector<std::string> ip_addrs = discover_ip_addrs(
hint["addr"], E300_SERVER_I2C_PORT);
- BOOST_FOREACH(const std::string &ip_addr, ip_addrs)
+ for(const std::string &ip_addr: ip_addrs)
{
device_addr_t new_addr;
new_addr["type"] = "e3x0";
@@ -260,7 +260,7 @@ device_addrs_t e300_find(const device_addr_t &multi_dev_hint)
**********************************************************************/
static device::sptr e300_make(const device_addr_t &device_addr)
{
- UHD_LOG << "e300_make with args " << device_addr.to_pp_string() << std::endl;
+ UHD_LOGGER_DEBUG("E300")<< "e300_make with args " << device_addr.to_pp_string() ;
if(device_addr.has_key("server"))
throw uhd::runtime_error(
str(boost::format("Please run the server executable \"%s\"")
@@ -295,8 +295,8 @@ void get_e3x0_fpga_images(const uhd::device_addr_t &device_addr,
break;
case e300_eeprom_manager::UNKNOWN:
default:
- UHD_MSG(warning) << "Unknown motherboard type, loading e300 image."
- << std::endl;
+ UHD_LOGGER_WARNING("E300") << "Unknown motherboard type, loading e300 image."
+ ;
fpga_image = device_addr.cast<std::string>("fpga",
find_image_path(E300_FPGA_FILE_NAME));
idle_image = find_image_path(E3XX_SG1_FPGA_IDLE_FILE_NAME);
@@ -405,14 +405,14 @@ e300_impl::e300_impl(const uhd::device_addr_t &device_addr)
_codec_mgr = ad936x_manager::make(_codec_ctrl, fpga::NUM_RADIOS);
#ifdef E300_GPSD
- UHD_MSG(status) << "Detecting internal GPSDO " << std::flush;
+ UHD_LOGGER_INFO("E300") << "Detecting internal GPSDO ";
try {
if (_xport_path == AXI)
_gps = gpsd_iface::make("localhost", 2947);
else
_gps = gpsd_iface::make(device_addr["addr"], 2947);
} catch (std::exception &e) {
- UHD_MSG(error) << "An error occured making GPSDd interface: " << e.what() << std::endl;
+ UHD_LOGGER_ERROR("E300") << "An error occured making GPSDd interface: " << e.what();
}
if (_gps) {
@@ -426,12 +426,12 @@ e300_impl::e300_impl(const uhd::device_addr_t &device_addr)
break;
}
}
- UHD_MSG(status) << (_gps->gps_detected() ? "found" : "not found") << std::endl;
+ UHD_LOGGER_INFO("E300") << "GPSDO " << (_gps->gps_detected() ? "found" : "not found");
}
#endif
// Verify we can talk to the e300 core control registers ...
- UHD_MSG(status) << "Initializing core control..." << std::endl;
+ UHD_LOGGER_INFO("E300") << "Initializing core control...";
this->_register_loopback_self_test(_global_regs);
// Verify fpga compatibility version matches at least for the major
@@ -468,14 +468,14 @@ e300_impl::e300_impl(const uhd::device_addr_t &device_addr)
// and do the misc mboard sensors
////////////////////////////////////////////////////////////////////
_tree->create<int>(mb_path / "sensors");
- BOOST_FOREACH(const std::string &name, _sensor_manager->get_sensors())
+ for(const std::string &name: _sensor_manager->get_sensors())
{
_tree->create<sensor_value_t>(mb_path / "sensors" / name)
.set_publisher(boost::bind(&e300_sensor_manager::get_sensor, _sensor_manager, name));
}
#ifdef E300_GPSD
if (_gps) {
- BOOST_FOREACH(const std::string &name, _gps->get_sensors())
+ for(const std::string &name: _gps->get_sensors())
{
_tree->create<sensor_value_t>(mb_path / "sensors" / name)
.set_publisher(boost::bind(&gpsd_iface::get_sensor, _gps, name));
@@ -511,7 +511,7 @@ e300_impl::e300_impl(const uhd::device_addr_t &device_addr)
this->_setup_radio(instance);
//now test each radio module's connection to the codec interface
- BOOST_FOREACH(radio_perifs_t &perif, _radio_perifs)
+ for(radio_perifs_t &perif: _radio_perifs)
{
_codec_mgr->loopback_self_test(
boost::bind(
@@ -524,7 +524,7 @@ e300_impl::e300_impl(const uhd::device_addr_t &device_addr)
// internal gpios
////////////////////////////////////////////////////////////////////
gpio_atr_3000::sptr fp_gpio = gpio_atr_3000::make(_radio_perifs[0].ctrl, radio::sr_addr(radio::FP_GPIO), radio::RB32_FP_GPIO);
- BOOST_FOREACH(const gpio_attr_map_t::value_type attr, gpio_attr_map)
+ for(const gpio_attr_map_t::value_type attr: gpio_attr_map)
{
_tree->create<uint32_t>(mb_path / "gpio" / "INT0" / attr.second)
.add_coerced_subscriber(boost::bind(&gpio_atr_3000::set_gpio_attr, fp_gpio, attr.first, _1))
@@ -625,11 +625,11 @@ e300_impl::e300_impl(const uhd::device_addr_t &device_addr)
// subdev spec contains full width of selections
subdev_spec_t rx_spec, tx_spec;
- BOOST_FOREACH(const std::string &fe, _tree->list(mb_path / "dboards" / "A" / "rx_frontends"))
+ for(const std::string &fe: _tree->list(mb_path / "dboards" / "A" / "rx_frontends"))
{
rx_spec.push_back(subdev_spec_pair_t("A", fe));
}
- BOOST_FOREACH(const std::string &fe, _tree->list(mb_path / "dboards" / "A" / "tx_frontends"))
+ for(const std::string &fe: _tree->list(mb_path / "dboards" / "A" / "tx_frontends"))
{
tx_spec.push_back(subdev_spec_pair_t("A", fe));
}
@@ -692,11 +692,11 @@ void e300_impl::_enforce_tick_rate_limits(
double e300_impl::_set_tick_rate(const double rate)
{
- UHD_MSG(status) << "Asking for clock rate " << rate/1e6 << " MHz\n";
+ UHD_LOGGER_INFO("E300") << "Asking for clock rate " << rate/1e6 << " MHz\n";
_tick_rate = _codec_ctrl->set_clock_rate(rate);
- UHD_MSG(status) << "Actually got clock rate " << _tick_rate/1e6 << " MHz\n";
+ UHD_LOGGER_INFO("E300") << "Actually got clock rate " << _tick_rate/1e6 << " MHz\n";
- BOOST_FOREACH(radio_perifs_t &perif, _radio_perifs)
+ for(radio_perifs_t &perif: _radio_perifs)
{
perif.time64->set_tick_rate(_tick_rate);
perif.time64->self_test();
@@ -707,7 +707,7 @@ double e300_impl::_set_tick_rate(const double rate)
void e300_impl::_register_loopback_self_test(wb_iface::sptr iface)
{
bool test_fail = false;
- UHD_MSG(status) << "Performing register loopback test... " << std::flush;
+ UHD_LOGGER_INFO("E300") << "Performing register loopback test... ";
size_t hash = size_t(time(NULL));
for (size_t i = 0; i < 100; i++)
{
@@ -716,7 +716,7 @@ void e300_impl::_register_loopback_self_test(wb_iface::sptr iface)
test_fail = iface->peek32(radio::RB32_TEST) != uint32_t(hash);
if (test_fail) break; //exit loop on any failure
}
- UHD_MSG(status) << ((test_fail)? " fail" : "pass") << std::endl;
+ UHD_LOGGER_INFO("E300") << "Register loopback test " << ((test_fail)? " failed" : "passed");
}
uint32_t e300_impl::_get_version(compat_t which)
@@ -747,19 +747,20 @@ uint32_t e300_impl::_allocate_sid(const sid_config_t &config)
{
const uint32_t stream = (config.dst_prefix | (config.router_dst_there << 2)) & 0xff;
+ const size_t sid_framer = _sid_framer++; //increment for next setup
const uint32_t sid = 0
| (E300_DEVICE_HERE << 24)
- | (_sid_framer << 16)
+ | (sid_framer << 16)
| (config.router_addr_there << 8)
| (stream << 0)
;
- UHD_LOG << std::hex
+ UHD_LOGGER_DEBUG("E300")<< std::hex
<< " sid 0x" << sid
- << " framer 0x" << _sid_framer
+ << " framer 0x" << sid_framer
<< " stream 0x" << stream
<< " router_dst_there 0x" << int(config.router_dst_there)
<< " router_addr_there 0x" << int(config.router_addr_there)
- << std::dec << std::endl;
+ << std::dec ;
// Program the E300 to recognize it's own local address.
_global_regs->poke32(global_regs::SR_CORE_XB_LOCAL, config.router_addr_there);
@@ -775,26 +776,23 @@ uint32_t e300_impl::_allocate_sid(const sid_config_t &config)
_global_regs->poke32(XB_ADDR(E300_DEVICE_HERE),
config.router_dst_here);
- UHD_LOG << std::hex
+ UHD_LOGGER_TRACE("E300") << std::hex
<< "done router config for sid 0x" << sid
- << std::dec << std::endl;
-
- //increment for next setup
- _sid_framer++;
+ << std::dec ;
return sid;
}
void e300_impl::_setup_dest_mapping(const uint32_t sid, const size_t which_stream)
{
- UHD_LOG << boost::format("Setting up dest map for 0x%lx to be stream %d")
- % (sid & 0xff) % which_stream << std::endl;
+ UHD_LOGGER_DEBUG("E300") << boost::format("Setting up dest map for 0x%lx to be stream %d")
+ % (sid & 0xff) % which_stream ;
_global_regs->poke32(DST_ADDR(sid & 0xff), which_stream);
}
void e300_impl::_update_time_source(const std::string &source)
{
- UHD_MSG(status) << boost::format("Setting time source to %s") % source << std::endl;
+ UHD_LOGGER_INFO("E300") << boost::format("Setting time source to %s") % source;
if (source == "none" or source == "internal") {
_misc.pps_sel = global_regs::PPS_INT;
#ifdef E300_GPSD
@@ -811,7 +809,7 @@ void e300_impl::_update_time_source(const std::string &source)
void e300_impl::_set_time(const uhd::time_spec_t& t)
{
- BOOST_FOREACH(radio_perifs_t &perif, _radio_perifs)
+ for(radio_perifs_t &perif: _radio_perifs)
perif.time64->set_time_sync(t);
_misc.time_sync = 1;
_update_gpio_state();
@@ -1037,7 +1035,7 @@ void e300_impl::_setup_radio(const size_t dspno)
// create RF frontend interfacing
////////////////////////////////////////////////////////////////////
static const std::vector<direction_t> dirs = boost::assign::list_of(RX_DIRECTION)(TX_DIRECTION);
- BOOST_FOREACH(direction_t dir, dirs) {
+ for(direction_t dir: dirs) {
const std::string x = (dir == RX_DIRECTION) ? "rx" : "tx";
const std::string key = boost::to_upper_copy(x) + std::string(((dspno == FE0)? "1" : "2"));
const fs_path rf_fe_path
diff --git a/host/lib/usrp/e300/e300_impl.hpp b/host/lib/usrp/e300/e300_impl.hpp
index 50d78fdd4..cc2e39e23 100644
--- a/host/lib/usrp/e300/e300_impl.hpp
+++ b/host/lib/usrp/e300/e300_impl.hpp
@@ -47,6 +47,7 @@
#include "e300_i2c.hpp"
#include "e300_eeprom_manager.hpp"
#include "e300_sensor_manager.hpp"
+#include <atomic>
/* if we don't compile with gpsd support, don't bother */
#ifdef E300_GPSD
@@ -288,7 +289,7 @@ private: // members
uhd::device_addr_t _device_addr;
xport_t _xport_path;
e300_fifo_interface::sptr _fifo_iface;
- size_t _sid_framer;
+ std::atomic<size_t> _sid_framer;
radio_perifs_t _radio_perifs[2];
double _tick_rate;
ad9361_ctrl::sptr _codec_ctrl;
diff --git a/host/lib/usrp/e300/e300_io_impl.cpp b/host/lib/usrp/e300/e300_io_impl.cpp
index 2514b7f40..4490afa21 100644
--- a/host/lib/usrp/e300/e300_io_impl.cpp
+++ b/host/lib/usrp/e300/e300_io_impl.cpp
@@ -26,7 +26,6 @@
#include <boost/bind.hpp>
#include <uhd/utils/tasks.hpp>
#include <uhd/utils/log.hpp>
-#include <boost/foreach.hpp>
#include <boost/make_shared.hpp>
using namespace uhd;
@@ -43,7 +42,7 @@ static const uint32_t HW_SEQ_NUM_MASK = 0xfff;
void e300_impl::_check_tick_rate_with_current_streamers(const double rate)
{
size_t max_tx_chan_count = 0, max_rx_chan_count = 0;
- BOOST_FOREACH(radio_perifs_t &perif, _radio_perifs)
+ for(radio_perifs_t &perif: _radio_perifs)
{
{
boost::shared_ptr<sph::recv_packet_streamer> rx_streamer =
@@ -73,7 +72,7 @@ void e300_impl::_update_tick_rate(const double rate)
{
_check_tick_rate_with_current_streamers(rate);
- BOOST_FOREACH(radio_perifs_t &perif, _radio_perifs)
+ for(radio_perifs_t &perif: _radio_perifs)
{
boost::shared_ptr<sph::recv_packet_streamer> my_streamer =
boost::dynamic_pointer_cast<sph::recv_packet_streamer>(perif.rx_streamer.lock());
@@ -81,7 +80,7 @@ void e300_impl::_update_tick_rate(const double rate)
my_streamer->set_tick_rate(rate);
perif.framer->set_tick_rate(_tick_rate);
}
- BOOST_FOREACH(radio_perifs_t &perif, _radio_perifs)
+ for(radio_perifs_t &perif: _radio_perifs)
{
boost::shared_ptr<sph::send_packet_streamer> my_streamer =
boost::dynamic_pointer_cast<sph::send_packet_streamer>(perif.tx_streamer.lock());
@@ -336,7 +335,7 @@ static void handle_tx_async_msgs(boost::shared_ptr<e300_tx_fc_cache_t> fc_cache,
}
catch(const std::exception &ex)
{
- UHD_MSG(error) << "Error parsing async message packet: " << ex.what() << std::endl;
+ UHD_LOGGER_ERROR("E300") << "Error parsing async message packet: " << ex.what() ;
return;
}
@@ -483,9 +482,9 @@ rx_streamer::sptr e300_impl::get_rx_stream(const uhd::stream_args_t &args_)
E300_RX_SW_BUFF_FULLNESS);
const size_t fc_handle_window = std::max<size_t>(1, fc_window / E300_RX_FC_REQUEST_FREQ);
- UHD_LOG << "RX Flow Control Window = " << fc_window
+ UHD_LOGGER_DEBUG("E300") << "RX Flow Control Window = " << fc_window
<< ", RX Flow Control Handler Window = "
- << fc_handle_window << std::endl;
+ << fc_handle_window ;
perif.framer->configure_flow_control(fc_window);
boost::shared_ptr<e300_rx_fc_cache_t> fc_cache(new e300_rx_fc_cache_t());
@@ -591,9 +590,9 @@ tx_streamer::sptr e300_impl::get_tx_stream(const uhd::stream_args_t &args_)
const size_t fc_window = data_xports.send->get_num_send_frames();
const size_t fc_handle_window = std::max<size_t>(1, fc_window/E300_TX_FC_RESPONSE_FREQ);
- UHD_LOG << "TX Flow Control Window = " << fc_window
+ UHD_LOGGER_DEBUG("E300") << "TX Flow Control Window = " << fc_window
<< ", TX Flow Control Handler Window = "
- << fc_handle_window << std::endl;
+ << fc_handle_window ;
perif.deframer->configure_flow_control(0/*cycs off*/, fc_handle_window/*pkts*/);
boost::shared_ptr<e300_tx_fc_cache_t> fc_cache(new e300_tx_fc_cache_t());
diff --git a/host/lib/usrp/e300/e300_network.cpp b/host/lib/usrp/e300/e300_network.cpp
index e68f2a54d..f3045eb2d 100644
--- a/host/lib/usrp/e300/e300_network.cpp
+++ b/host/lib/usrp/e300/e300_network.cpp
@@ -31,7 +31,7 @@
#include "e300_common.hpp"
#include "e300_remote_codec_ctrl.hpp"
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/byteswap.hpp>
#include <uhd/utils/paths.hpp>
@@ -88,7 +88,7 @@ static void e300_recv_tunnel(
//step 1 - get the buffer
managed_recv_buffer::sptr buff = recver->get_recv_buff();
if (not buff) continue;
- if (E300_NETWORK_DEBUG) UHD_MSG(status) << name << " got " << buff->size() << std::endl;
+ if (E300_NETWORK_DEBUG) UHD_LOGGER_INFO("E300") << name << " got " << buff->size();
//step 1.5 -- update endpoint
{
@@ -102,13 +102,13 @@ static void e300_recv_tunnel(
}
catch(const std::exception &ex)
{
- UHD_MSG(error) << "e300_recv_tunnel exit " << name << " " << ex.what() << std::endl;
+ UHD_LOGGER_ERROR("E300") << "e300_recv_tunnel exit " << name << " " << ex.what();
}
catch(...)
{
- UHD_MSG(error) << "e300_recv_tunnel exit " << name << std::endl;
+ UHD_LOGGER_ERROR("E300") << "e300_recv_tunnel exit " << name ;
}
- UHD_MSG(status) << "e300_recv_tunnel exit " << name << std::endl;
+ UHD_LOGGER_INFO("E300") << "e300_recv_tunnel exit " << name;
*running = false;
}
@@ -136,7 +136,7 @@ static void e300_send_tunnel(
while (not wait_for_recv_ready(recver->native(), 100) and *running){}
if (not *running) break;
const size_t num_bytes = recver->receive_from(asio::buffer(buff->cast<void *>(), buff->size()), _rx_endpoint);
- if (E300_NETWORK_DEBUG) UHD_MSG(status) << name << " got " << num_bytes << std::endl;
+ if (E300_NETWORK_DEBUG) UHD_LOGGER_INFO("E300") << name << " got " << num_bytes;
//step 2.5 -- update endpoint
{
@@ -150,13 +150,13 @@ static void e300_send_tunnel(
}
catch(const std::exception &ex)
{
- UHD_MSG(error) << "e300_send_tunnel exit " << name << " " << ex.what() << std::endl;
+ UHD_LOGGER_ERROR("E300") << "e300_send_tunnel exit " << name << " " << ex.what() ;
}
catch(...)
{
- UHD_MSG(error) << "e300_send_tunnel exit " << name << std::endl;
+ UHD_LOGGER_ERROR("E300") << "e300_send_tunnel exit " << name ;
}
- UHD_MSG(status) << "e300_send_tunnel exit " << name << std::endl;
+ UHD_LOGGER_INFO("E300") << "e300_send_tunnel exit " << name;
*running = false;
}
@@ -252,7 +252,7 @@ static void e300_codec_ctrl_tunnel(
out->bw = _codec_ctrl->set_bw_filter(which_str, in->bw);
break;
default:
- UHD_MSG(status) << "Got unknown request?!" << std::endl;
+ UHD_LOGGER_INFO("E300") << "Got unknown request?!";
//Zero out actions to fail this request on client
out->action = uhd::htonx<uint32_t>(0);
}
@@ -262,13 +262,13 @@ static void e300_codec_ctrl_tunnel(
}
catch(const std::exception &ex)
{
- UHD_MSG(error) << "e300_ctrl_tunnel exit " << name << " " << ex.what() << std::endl;
+ UHD_LOGGER_ERROR("E300") << "e300_ctrl_tunnel exit " << name << " " << ex.what() ;
}
catch(...)
{
- UHD_MSG(error) << "e300_ctrl_tunnel exit " << name << std::endl;
+ UHD_LOGGER_ERROR("E300") << "e300_ctrl_tunnel exit " << name ;
}
- UHD_MSG(status) << "e300_ctrl_tunnel exit " << name << std::endl;
+ UHD_LOGGER_INFO("E300") << "e300_ctrl_tunnel exit " << name;
*running = false;
}
@@ -309,13 +309,13 @@ static void e300_global_regs_tunnel(
}
catch(const std::exception &ex)
{
- UHD_MSG(error) << "e300_gregs_tunnel exit " << name << " " << ex.what() << std::endl;
+ UHD_LOGGER_ERROR("E300") << "e300_gregs_tunnel exit " << name << " " << ex.what() ;
}
catch(...)
{
- UHD_MSG(error) << "e300_gregs_tunnel exit " << name << std::endl;
+ UHD_LOGGER_ERROR("E300") << "e300_gregs_tunnel exit " << name ;
}
- UHD_MSG(status) << "e300_gregs_tunnel exit " << name << std::endl;
+ UHD_LOGGER_INFO("E300") << "e300_gregs_tunnel exit " << name;
*running = false;
}
@@ -353,20 +353,20 @@ static void e300_sensor_tunnel(
in->value = uhd::htonx<uint32_t>(
sensor_manager->get_ref_lock().to_bool() ? 1 : 0);
} else
- UHD_MSG(status) << "Got unknown request?!" << std::endl;
+ UHD_LOGGER_INFO("E300") << "Got unknown request?!";
socket->send_to(asio::buffer(in_buff, sizeof(sensor_transaction_t)), *endpoint);
}
}
catch(const std::exception &ex)
{
- UHD_MSG(error) << "e300_sensor_tunnel exit " << name << " " << ex.what() << std::endl;
+ UHD_LOGGER_ERROR("E300") << "e300_sensor_tunnel exit " << name << " " << ex.what() ;
}
catch(...)
{
- UHD_MSG(error) << "e300_sensor_tunnel exit " << name << std::endl;
+ UHD_LOGGER_ERROR("E300") << "e300_sensor_tunnel exit " << name ;
}
- UHD_MSG(status) << "e300_sensor_tunnel exit " << name << std::endl;
+ UHD_LOGGER_INFO("E300") << "e300_sensor_tunnel exit " << name;
*running = false;
}
@@ -419,19 +419,19 @@ static void e300_i2c_tunnel(
}
} else {
- UHD_MSG(error) << "e300_i2c_tunnel could not handle message." << std::endl;
+ UHD_LOGGER_ERROR("E300") << "e300_i2c_tunnel could not handle message." ;
}
}
}
catch(const std::exception &ex)
{
- UHD_MSG(error) << "e300_i2c_tunnel exit " << name << " " << ex.what() << std::endl;
+ UHD_LOGGER_ERROR("E300") << "e300_i2c_tunnel exit " << name << " " << ex.what() ;
}
catch(...)
{
- UHD_MSG(error) << "e300_i2c_tunnel exit " << name << std::endl;
+ UHD_LOGGER_ERROR("E300") << "e300_i2c_tunnel exit " << name ;
}
- UHD_MSG(status) << "e300_i2c_tunnel exit " << name << std::endl;
+ UHD_LOGGER_INFO("E300") << "e300_i2c_tunnel exit " << name;
*running = false;
}
@@ -491,7 +491,7 @@ void network_server_impl::_run_server(
//boost::shared_ptr<asio::ip::udp::acceptor> acceptor(new asio::ip::udp::acceptor(io_service, endpoint));
while (not boost::this_thread::interruption_requested())
{
- UHD_MSG(status) << "e300 run server on port " << port << " for " << what << std::endl;
+ UHD_LOGGER_INFO("E300") << "e300 run server on port " << port << " for " << what;
try
{
//while (not wait_for_recv_ready(acceptor->native(), 100))
@@ -501,7 +501,7 @@ void network_server_impl::_run_server(
boost::shared_ptr<asio::ip::udp::socket> socket;
socket.reset(new asio::ip::udp::socket(io_service, endpoint));
//acceptor->accept(*socket);
- UHD_MSG(status) << "e300 socket accept on port " << port << " for " << what << std::endl;
+ UHD_LOGGER_INFO("E300") << "e300 socket accept on port " << port << " for " << what;
//asio::ip::udp::no_delay option(true);
//socket->set_option(option);
boost::thread_group tg;
@@ -586,8 +586,8 @@ network_server_impl::network_server_impl(const uhd::device_addr_t &device_addr)
break;
case e300_eeprom_manager::UNKNOWN:
default:
- UHD_MSG(warning) << "Unknown motherboard type, loading e300 image."
- << std::endl;
+ UHD_LOGGER_WARNING("E300") << "Unknown motherboard type, loading e300 image."
+ ;
fpga_image = find_image_path(E300_FPGA_FILE_NAME);
break;
}
diff --git a/host/lib/usrp/e300/e300_remote_codec_ctrl.cpp b/host/lib/usrp/e300/e300_remote_codec_ctrl.cpp
index b7c46c17d..d970d7221 100644
--- a/host/lib/usrp/e300/e300_remote_codec_ctrl.cpp
+++ b/host/lib/usrp/e300/e300_remote_codec_ctrl.cpp
@@ -242,7 +242,7 @@ public:
//! Write back a filter
void set_filter(const std::string &, const std::string &, const filter_info_base::sptr)
{
- UHD_MSG(warning) << "Attempting to set filter on E300 in network mode." << std::endl;
+ UHD_LOGGER_WARNING("E300") << "Attempting to set filter on E300 in network mode." ;
}
void output_digital_test_tone(UHD_UNUSED(bool enb))
diff --git a/host/lib/usrp/e300/e300_sysfs_hooks.cpp b/host/lib/usrp/e300/e300_sysfs_hooks.cpp
index 7bd1d6d7e..bcfca8b92 100644
--- a/host/lib/usrp/e300/e300_sysfs_hooks.cpp
+++ b/host/lib/usrp/e300/e300_sysfs_hooks.cpp
@@ -32,7 +32,7 @@
#include <boost/format.hpp>
#include <boost/lexical_cast.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/exception.hpp>
static const std::string E300_AXI_FPGA_SYSFS = "40000000.axi-fpga";
diff --git a/host/lib/usrp/gps_ctrl.cpp b/host/lib/usrp/gps_ctrl.cpp
index 447a13c33..a5d1748fe 100644
--- a/host/lib/usrp/gps_ctrl.cpp
+++ b/host/lib/usrp/gps_ctrl.cpp
@@ -16,25 +16,25 @@
//
#include <uhd/usrp/gps_ctrl.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/utils/log.hpp>
#include <uhd/exception.hpp>
#include <uhd/types/sensors.hpp>
#include <boost/algorithm/string.hpp>
#include <boost/assign/list_of.hpp>
#include <stdint.h>
-#include <boost/date_time/posix_time/posix_time.hpp>
#include <boost/thread/thread.hpp>
#include <boost/tokenizer.hpp>
#include <boost/format.hpp>
#include <boost/regex.hpp>
#include <boost/thread/mutex.hpp>
+#include <ctime>
+#include <string>
+#include <boost/date_time.hpp>
#include "boost/tuple/tuple.hpp"
-#include "boost/foreach.hpp"
using namespace uhd;
-using namespace boost::gregorian;
using namespace boost::posix_time;
using namespace boost::algorithm;
using namespace boost::this_thread;
@@ -92,7 +92,7 @@ private:
sentences[which].get<2>() = true;
}
} catch(std::exception &e) {
- UHD_LOGV(often) << "get_sentence: " << e.what();
+ UHD_LOGGER_DEBUG("GPS") << "get_sentence: " << e.what();
}
if (not sentence.empty() or now > exit_time)
@@ -159,7 +159,7 @@ private:
if (msg.length() < 6)
{
- UHD_LOGV(regularly) << __FUNCTION__ << ": Short GPSDO string: " << msg << std::endl;
+ UHD_LOGGER_WARNING("GPS") << __FUNCTION__ << ": Short GPSDO string: " << msg ;
continue;
}
@@ -174,14 +174,14 @@ private:
}
else
{
- UHD_LOGV(regularly) << __FUNCTION__ << ": Malformed GPSDO string: " << msg << std::endl;
+ UHD_LOGGER_WARNING("GPS") << __FUNCTION__ << ": Malformed GPSDO string: " << msg ;
}
}
boost::system_time time = boost::get_system_time();
// Update sentences with newly read data
- BOOST_FOREACH(std::string key, keys)
+ for(std::string key: keys)
{
if (not msgs[key].empty())
{
@@ -234,7 +234,7 @@ public:
if(i_heard_some_nmea) {
_gps_type = GPS_TYPE_GENERIC_NMEA;
} else if(i_heard_something_weird) {
- UHD_MSG(error) << "GPS invalid reply \"" << reply << "\", assuming none available" << std::endl;
+ UHD_LOGGER_ERROR("GPS") << "GPS invalid reply \"" << reply << "\", assuming none available";
}
}
@@ -242,17 +242,17 @@ public:
case GPS_TYPE_INTERNAL_GPSDO:
erase_all(reply, "\r");
erase_all(reply, "\n");
- UHD_MSG(status) << "Found an internal GPSDO: " << reply << std::endl;
+ UHD_LOGGER_INFO("GPS") << "Found an internal GPSDO: " << reply;
init_gpsdo();
break;
case GPS_TYPE_GENERIC_NMEA:
- UHD_MSG(status) << "Found a generic NMEA GPS device" << std::endl;
+ UHD_LOGGER_INFO("GPS") << "Found a generic NMEA GPS device";
break;
case GPS_TYPE_NONE:
default:
- UHD_MSG(status) << "No GPSDO found" << std::endl;
+ UHD_LOGGER_INFO("GPS") << "No GPSDO found";
break;
}
@@ -349,20 +349,20 @@ private:
throw uhd::value_error(str(boost::format("Invalid response \"%s\"") % reply));
}
- //just trust me on this one
- gps_time = ptime( date(
- greg_year(boost::lexical_cast<int>(datestr.substr(4, 2)) + 2000),
- greg_month(boost::lexical_cast<int>(datestr.substr(2, 2))),
- greg_day(boost::lexical_cast<int>(datestr.substr(0, 2)))
- ),
- hours( boost::lexical_cast<int>(timestr.substr(0, 2)))
- + minutes(boost::lexical_cast<int>(timestr.substr(2, 2)))
- + seconds(boost::lexical_cast<int>(timestr.substr(4, 2)))
- );
+ struct tm raw_date;
+ raw_date.tm_year = std::stoi(datestr.substr(4, 2)) + 2000 - 1900; // years since 1900
+ raw_date.tm_mon = std::stoi(datestr.substr(2, 2)) - 1; // months since january (0-11)
+ raw_date.tm_mday = std::stoi(datestr.substr(0, 2)); // dom (1-31)
+ raw_date.tm_hour = std::stoi(timestr.substr(0, 2));
+ raw_date.tm_min = std::stoi(timestr.substr(2, 2));
+ raw_date.tm_sec = std::stoi(timestr.substr(4,2));
+ gps_time = boost::posix_time::ptime_from_tm(raw_date);
+
+ UHD_LOG_TRACE("GPS", "GPS time: " + boost::posix_time::to_simple_string(gps_time));
return gps_time;
} catch(std::exception &e) {
- UHD_LOGV(often) << "get_time: " << e.what();
+ UHD_LOGGER_DEBUG("GPS") << "get_time: " << e.what();
error_cnt++;
}
}
@@ -389,7 +389,7 @@ private:
else
return (get_token(reply, 6) != "0");
} catch(std::exception &e) {
- UHD_LOGV(often) << "locked: " << e.what();
+ UHD_LOGGER_DEBUG("GPS") << "locked: " << e.what();
error_cnt++;
}
}
diff --git a/host/lib/usrp/gpsd_iface.cpp b/host/lib/usrp/gpsd_iface.cpp
index 98b359135..c5d4a1745 100644
--- a/host/lib/usrp/gpsd_iface.cpp
+++ b/host/lib/usrp/gpsd_iface.cpp
@@ -31,7 +31,7 @@
#include <uhd/exception.hpp>
#include <uhd/usrp/gps_ctrl.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/types/dict.hpp>
#include "gpsd_iface.hpp"
diff --git a/host/lib/usrp/mboard_eeprom.cpp b/host/lib/usrp/mboard_eeprom.cpp
index 4cf58cd05..ecae5e56d 100644
--- a/host/lib/usrp/mboard_eeprom.cpp
+++ b/host/lib/usrp/mboard_eeprom.cpp
@@ -22,7 +22,6 @@
#include <boost/asio/ip/address_v4.hpp>
#include <boost/assign/list_of.hpp>
#include <boost/lexical_cast.hpp>
-#include <boost/foreach.hpp>
#include <algorithm>
#include <iostream>
#include <cstddef>
@@ -600,7 +599,7 @@ template <typename T> static const byte_vector_t to_bytes(const T &item){
}
#define sizeof_member(struct_name, member_name) \
- sizeof(reinterpret_cast<struct_name*>(NULL)->member_name)
+ sizeof(reinterpret_cast<struct_name*>(0)->member_name)
static void load_e100(mboard_eeprom_t &mb_eeprom, i2c_iface &iface){
const size_t num_bytes = offsetof(e100_eeprom_map, model);
diff --git a/host/lib/usrp/multi_usrp.cpp b/host/lib/usrp/multi_usrp.cpp
index ac0f52255..0910ad59d 100644
--- a/host/lib/usrp/multi_usrp.cpp
+++ b/host/lib/usrp/multi_usrp.cpp
@@ -17,7 +17,7 @@
#include <uhd/property_tree.hpp>
#include <uhd/usrp/multi_usrp.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/exception.hpp>
#include <uhd/utils/log.hpp>
#include <uhd/utils/math.hpp>
@@ -30,7 +30,6 @@
#include "legacy_compat.hpp"
#include <boost/assign/list_of.hpp>
#include <boost/thread.hpp>
-#include <boost/foreach.hpp>
#include <boost/format.hpp>
#include <boost/algorithm/string.hpp>
#include <algorithm>
@@ -66,7 +65,7 @@ static void do_samp_rate_warning_message(
){
static const double max_allowed_error = 1.0; //Sps
if (std::abs(target_rate - actual_rate) > max_allowed_error){
- UHD_MSG(warning) << boost::format(
+ UHD_LOGGER_WARNING("MULTI_USRP") << boost::format(
"The hardware does not support the requested %s sample rate:\n"
"Target sample rate: %f MSps\n"
"Actual sample rate: %f MSps\n"
@@ -97,7 +96,7 @@ static void do_samp_rate_warning_message(
if(requested_freq_success and target_freq_success and rf_lo_tune_success
and dsp_tune_success) {
- UHD_MSG(status) << boost::format(
+ UHD_LOGGER_INFO("MULTI_USRP") << boost::format(
"Successfully tuned to %f MHz\n\n")
% (actual_freq / 1e6);
} else {
@@ -120,7 +119,7 @@ static void do_samp_rate_warning_message(
results_string += rf_lo_message.str();
- UHD_MSG(status) << results_string;
+ UHD_LOGGER_INFO("MULTI_USRP") << results_string;
return;
}
@@ -174,7 +173,7 @@ static void do_samp_rate_warning_message(
results_string += failure_message.str();
}
- UHD_MSG(warning) << results_string << std::endl;
+ UHD_LOGGER_WARNING("MULTI_USRP") << results_string ;
}
}*/
@@ -190,7 +189,7 @@ static meta_range_t make_overall_tune_range(
const double bw
){
meta_range_t range;
- BOOST_FOREACH(const range_t &sub_range, fe_range){
+ for(const range_t &sub_range: fe_range){
range.push_back(range_t(
sub_range.start() + std::max(dsp_range.start(), -bw/2),
sub_range.stop() + std::min(dsp_range.stop(), bw/2),
@@ -262,7 +261,8 @@ static tune_result_t tune_xx_subdev_and_dsp(
* tune_request. This lo_offset is based on the requirements of the FE, and
* does not reflect a user-requested lo_offset, which is handled later. */
double lo_offset = 0.0;
- if (rf_fe_subtree->access<bool>("use_lo_offset").get()){
+ if (rf_fe_subtree->exists("use_lo_offset") and
+ rf_fe_subtree->access<bool>("use_lo_offset").get()){
// If the frontend has lo_offset value and range properties, trust it
// for lo_offset
if (rf_fe_subtree->exists("lo_offset/value")) {
@@ -459,7 +459,7 @@ public:
if (_tree->exists(mb_root(mboard) / "auto_tick_rate")
and _tree->access<bool>(mb_root(mboard) / "auto_tick_rate").get()) {
_tree->access<bool>(mb_root(mboard) / "auto_tick_rate").set(false);
- UHD_MSG(status) << "Setting master clock rate selection to 'manual'." << std::endl;
+ UHD_LOGGER_INFO("MULTI_USRP") << "Setting master clock rate selection to 'manual'.";
}
_tree->access<double>(mb_root(mboard) / "tick_rate").set(rate);
return;
@@ -557,7 +557,7 @@ public:
}
void set_time_unknown_pps(const time_spec_t &time_spec){
- UHD_MSG(status) << " 1) catch time transition at pps edge" << std::endl;
+ UHD_LOGGER_INFO("MULTI_USRP") << " 1) catch time transition at pps edge";
boost::system_time end_time = boost::get_system_time() + boost::posix_time::milliseconds(1100);
time_spec_t time_start_last_pps = get_time_last_pps();
while (time_start_last_pps == get_time_last_pps())
@@ -573,7 +573,7 @@ public:
boost::this_thread::sleep(boost::posix_time::milliseconds(1));
}
- UHD_MSG(status) << " 2) set times next pps (synchronously)" << std::endl;
+ UHD_LOGGER_INFO("MULTI_USRP") << " 2) set times next pps (synchronously)";
set_time_next_pps(time_spec, ALL_MBOARDS);
boost::this_thread::sleep(boost::posix_time::seconds(1));
@@ -582,7 +582,7 @@ public:
time_spec_t time_0 = this->get_time_now(0);
time_spec_t time_i = this->get_time_now(m);
if (time_i < time_0 or (time_i - time_0) > time_spec_t(0.01)){ //10 ms: greater than RTT but not too big
- UHD_MSG(warning) << boost::format(
+ UHD_LOGGER_WARNING("MULTI_USRP") << boost::format(
"Detected time deviation between board %d and board 0.\n"
"Board 0 time is %f seconds.\n"
"Board %d time is %f seconds.\n"
@@ -797,7 +797,7 @@ public:
{
throw uhd::index_error(str(boost::format("multi_usrp::get_rx_subdev_spec(%u) failed to make default spec - %s") % mboard % e.what()));
}
- UHD_MSG(status) << "Selecting default RX front end spec: " << spec.to_pp_string() << std::endl;
+ UHD_LOGGER_INFO("MULTI_USRP") << "Selecting default RX front end spec: " << spec.to_pp_string();
}
return spec;
}
@@ -873,7 +873,7 @@ public:
std::vector<std::string> get_rx_lo_names(size_t chan = 0){
std::vector<std::string> lo_names;
if (_tree->exists(rx_rf_fe_root(chan) / "los")) {
- BOOST_FOREACH(const std::string &name, _tree->list(rx_rf_fe_root(chan) / "los")) {
+ for(const std::string &name: _tree->list(rx_rf_fe_root(chan) / "los")) {
lo_names.push_back(name);
}
}
@@ -887,7 +887,7 @@ public:
//Special value ALL_LOS support atomically sets the source for all LOs
_tree->access<std::string>(rx_rf_fe_root(chan) / "los" / ALL_LOS / "source" / "value").set(src);
} else {
- BOOST_FOREACH(const std::string &n, _tree->list(rx_rf_fe_root(chan) / "los")) {
+ for(const std::string &n: _tree->list(rx_rf_fe_root(chan) / "los")) {
this->set_rx_lo_source(src, n, chan);
}
}
@@ -950,7 +950,7 @@ public:
//Special value ALL_LOS support atomically sets the source for all LOs
_tree->access<bool>(rx_rf_fe_root(chan) / "los" / ALL_LOS / "export").set(enabled);
} else {
- BOOST_FOREACH(const std::string &n, _tree->list(rx_rf_fe_root(chan) / "los")) {
+ for(const std::string &n: _tree->list(rx_rf_fe_root(chan) / "los")) {
this->set_rx_lo_export_enabled(enabled, n, chan);
}
}
@@ -1041,7 +1041,7 @@ public:
if (_tree->exists(rx_rf_fe_root(chan) / "gain" / "agc")) {
bool agc = _tree->access<bool>(rx_rf_fe_root(chan) / "gain" / "agc" / "enable").get();
if(agc) {
- UHD_MSG(warning) << "AGC enabled for this channel. Setting will be ignored." << std::endl;
+ UHD_LOGGER_WARNING("MULTI_USRP") << "AGC enabled for this channel. Setting will be ignored." ;
}
}
} else {
@@ -1049,7 +1049,7 @@ public:
if (_tree->exists(rx_rf_fe_root(c) / "gain" / "agc")) {
bool agc = _tree->access<bool>(rx_rf_fe_root(chan) / "gain" / "agc" / "enable").get();
if(agc) {
- UHD_MSG(warning) << "AGC enabled for this channel. Setting will be ignored." << std::endl;
+ UHD_LOGGER_WARNING("MULTI_USRP") << "AGC enabled for this channel. Setting will be ignored." ;
}
}
}
@@ -1079,7 +1079,7 @@ public:
if (_tree->exists(rx_rf_fe_root(chan) / "gain" / "agc" / "enable")) {
_tree->access<bool>(rx_rf_fe_root(chan) / "gain" / "agc" / "enable").set(enable);
} else {
- UHD_MSG(warning) << "AGC is not available on this device." << std::endl;
+ UHD_LOGGER_WARNING("MULTI_USRP") << "AGC is not available on this device." ;
}
return;
}
@@ -1157,7 +1157,11 @@ public:
}
std::vector<std::string> get_rx_sensor_names(size_t chan){
- return _tree->list(rx_rf_fe_root(chan) / "sensors");
+ std::vector<std::string> sensor_names;
+ if (_tree->exists(rx_rf_fe_root(chan) / "sensors")) {
+ sensor_names = _tree->list(rx_rf_fe_root(chan) / "sensors");
+ }
+ return sensor_names;
}
void set_rx_dc_offset(const bool enb, size_t chan){
@@ -1168,7 +1172,7 @@ public:
/*For B2xx devices the dc-offset correction is implemented in the rf front-end*/
_tree->access<bool>(rx_rf_fe_root(chan) / "dc_offset" / "enable").set(enb);
} else {
- UHD_MSG(warning) << "Setting DC offset compensation is not possible on this device." << std::endl;
+ UHD_LOGGER_WARNING("MULTI_USRP") << "Setting DC offset compensation is not possible on this device." ;
}
return;
}
@@ -1182,7 +1186,7 @@ public:
if (_tree->exists(rx_fe_root(chan) / "dc_offset" / "value")) {
_tree->access<std::complex<double> >(rx_fe_root(chan) / "dc_offset" / "value").set(offset);
} else {
- UHD_MSG(warning) << "Setting DC offset is not possible on this device." << std::endl;
+ UHD_LOGGER_WARNING("MULTI_USRP") << "Setting DC offset is not possible on this device." ;
}
return;
}
@@ -1196,7 +1200,7 @@ public:
if (_tree->exists(rx_rf_fe_root(chan) / "iq_balance" / "enable")) {
_tree->access<bool>(rx_rf_fe_root(chan) / "iq_balance" / "enable").set(enb);
} else {
- UHD_MSG(warning) << "Setting IQ imbalance compensation is not possible on this device." << std::endl;
+ UHD_LOGGER_WARNING("MULTI_USRP") << "Setting IQ imbalance compensation is not possible on this device." ;
}
return;
}
@@ -1210,7 +1214,7 @@ public:
if (_tree->exists(rx_fe_root(chan) / "iq_balance" / "value")) {
_tree->access<std::complex<double> >(rx_fe_root(chan) / "iq_balance" / "value").set(offset);
} else {
- UHD_MSG(warning) << "Setting IQ balance is not possible on this device." << std::endl;
+ UHD_LOGGER_WARNING("MULTI_USRP") << "Setting IQ balance is not possible on this device." ;
}
return;
}
@@ -1337,7 +1341,7 @@ public:
{
throw uhd::index_error(str(boost::format("multi_usrp::get_tx_subdev_spec(%u) failed to make default spec - %s") % mboard % e.what()));
}
- UHD_MSG(status) << "Selecting default TX front end spec: " << spec.to_pp_string() << std::endl;
+ UHD_LOGGER_INFO("MULTI_USRP") << "Selecting default TX front end spec: " << spec.to_pp_string();
}
return spec;
}
@@ -1497,7 +1501,11 @@ public:
}
std::vector<std::string> get_tx_sensor_names(size_t chan){
- return _tree->list(tx_rf_fe_root(chan) / "sensors");
+ std::vector<std::string> sensor_names;
+ if (_tree->exists(rx_rf_fe_root(chan) / "sensors")) {
+ sensor_names = _tree->list(tx_rf_fe_root(chan) / "sensors");
+ }
+ return sensor_names;
}
void set_tx_dc_offset(const std::complex<double> &offset, size_t chan){
@@ -1505,7 +1513,7 @@ public:
if (_tree->exists(tx_fe_root(chan) / "dc_offset" / "value")) {
_tree->access<std::complex<double> >(tx_fe_root(chan) / "dc_offset" / "value").set(offset);
} else {
- UHD_MSG(warning) << "Setting DC offset is not possible on this device." << std::endl;
+ UHD_LOGGER_WARNING("MULTI_USRP") << "Setting DC offset is not possible on this device." ;
}
return;
}
@@ -1519,7 +1527,7 @@ public:
if (_tree->exists(tx_fe_root(chan) / "iq_balance" / "value")) {
_tree->access<std::complex<double> >(tx_fe_root(chan) / "iq_balance" / "value").set(offset);
} else {
- UHD_MSG(warning) << "Setting IQ balance is not possible on this device." << std::endl;
+ UHD_LOGGER_WARNING("MULTI_USRP") << "Setting IQ balance is not possible on this device." ;
}
return;
}
@@ -1536,12 +1544,12 @@ public:
std::vector<std::string> banks;
if (_tree->exists(mb_root(mboard) / "gpio"))
{
- BOOST_FOREACH(const std::string &name, _tree->list(mb_root(mboard) / "gpio"))
+ for(const std::string &name: _tree->list(mb_root(mboard) / "gpio"))
{
banks.push_back(name);
}
}
- BOOST_FOREACH(const std::string &name, _tree->list(mb_root(mboard) / "dboards"))
+ for(const std::string &name: _tree->list(mb_root(mboard) / "dboards"))
{
banks.push_back("RX"+name);
banks.push_back("TX"+name);
@@ -1755,8 +1763,12 @@ private:
{
try
{
- const std::string name = _tree->list("/mboards").at(mboard);
- return "/mboards/" + name;
+ const std::string tree_path = "/mboards/" + boost::lexical_cast<std::string>(mboard);
+ if (_tree->exists(tree_path)) {
+ return tree_path;
+ } else {
+ throw uhd::index_error(str(boost::format("multi_usrp::mb_root(%u) - path not found") % mboard));
+ }
}
catch(const std::exception &e)
{
@@ -1779,8 +1791,12 @@ private:
try
{
- const std::string name = _tree->list(mb_root(mcp.mboard) / "rx_dsps").at(mcp.chan);
- return mb_root(mcp.mboard) / "rx_dsps" / name;
+ const std::string tree_path = mb_root(mcp.mboard) / "rx_dsps" / boost::lexical_cast<std::string>(mcp.chan);
+ if (_tree->exists(tree_path)) {
+ return tree_path;
+ } else {
+ throw uhd::index_error(str(boost::format("multi_usrp::rx_dsp_root(%u) - mcp(%u) - path not found") % chan % mcp.chan));
+ }
}
catch(const std::exception &e)
{
@@ -1802,8 +1818,12 @@ private:
}
try
{
- const std::string name = _tree->list(mb_root(mcp.mboard) / "tx_dsps").at(mcp.chan);
- return mb_root(mcp.mboard) / "tx_dsps" / name;
+ const std::string tree_path = mb_root(mcp.mboard) / "tx_dsps" / boost::lexical_cast<std::string>(mcp.chan);
+ if (_tree->exists(tree_path)) {
+ return tree_path;
+ } else {
+ throw uhd::index_error(str(boost::format("multi_usrp::tx_dsp_root(%u) - mcp(%u) - path not found") % chan % mcp.chan));
+ }
}
catch(const std::exception &e)
{
@@ -1877,10 +1897,10 @@ private:
mboard_chan_pair mcp = rx_chan_to_mcp(chan);
const subdev_spec_pair_t spec = get_rx_subdev_spec(mcp.mboard).at(mcp.chan);
gain_group::sptr gg = gain_group::make();
- BOOST_FOREACH(const std::string &name, _tree->list(mb_root(mcp.mboard) / "rx_codecs" / spec.db_name / "gains")){
+ for(const std::string &name: _tree->list(mb_root(mcp.mboard) / "rx_codecs" / spec.db_name / "gains")){
gg->register_fcns("ADC-"+name, make_gain_fcns_from_subtree(_tree->subtree(mb_root(mcp.mboard) / "rx_codecs" / spec.db_name / "gains" / name)), 0 /* low prio */);
}
- BOOST_FOREACH(const std::string &name, _tree->list(rx_rf_fe_root(chan) / "gains")){
+ for(const std::string &name: _tree->list(rx_rf_fe_root(chan) / "gains")){
gg->register_fcns(name, make_gain_fcns_from_subtree(_tree->subtree(rx_rf_fe_root(chan) / "gains" / name)), 1 /* high prio */);
}
return gg;
@@ -1890,10 +1910,10 @@ private:
mboard_chan_pair mcp = tx_chan_to_mcp(chan);
const subdev_spec_pair_t spec = get_tx_subdev_spec(mcp.mboard).at(mcp.chan);
gain_group::sptr gg = gain_group::make();
- BOOST_FOREACH(const std::string &name, _tree->list(mb_root(mcp.mboard) / "tx_codecs" / spec.db_name / "gains")){
+ for(const std::string &name: _tree->list(mb_root(mcp.mboard) / "tx_codecs" / spec.db_name / "gains")){
gg->register_fcns("DAC-"+name, make_gain_fcns_from_subtree(_tree->subtree(mb_root(mcp.mboard) / "tx_codecs" / spec.db_name / "gains" / name)), 1 /* high prio */);
}
- BOOST_FOREACH(const std::string &name, _tree->list(tx_rf_fe_root(chan) / "gains")){
+ for(const std::string &name: _tree->list(tx_rf_fe_root(chan) / "gains")){
gg->register_fcns(name, make_gain_fcns_from_subtree(_tree->subtree(tx_rf_fe_root(chan) / "gains" / name)), 0 /* low prio */);
}
return gg;
@@ -1907,7 +1927,7 @@ private:
size_t bytes_per_sample = convert::get_bytes_per_item(args.otw_format.empty() ? "sc16" : args.otw_format);
double max_link_rate = 0;
double sum_rate = 0;
- BOOST_FOREACH(const size_t chan, args.channels) {
+ for(const size_t chan: args.channels) {
mboard_chan_pair mcp = is_tx ? tx_chan_to_mcp(chan) : rx_chan_to_mcp(chan);
if (_tree->exists(mb_root(mcp.mboard) / "link_max_rate")) {
max_link_rate = std::max(
@@ -1919,10 +1939,10 @@ private:
}
sum_rate /= get_num_mboards();
if (max_link_rate > 0 and (max_link_rate / bytes_per_sample) < sum_rate) {
- UHD_MSG(warning) << boost::format(
+ UHD_LOGGER_WARNING("MULTI_USRP") << boost::format(
"The total sum of rates (%f MSps on %u channels) exceeds the maximum capacity of the connection.\n"
"This can cause %s."
- ) % (sum_rate/1e6) % args.channels.size() % (is_tx ? "underruns (U)" : "overflows (O)") << std::endl;
+ ) % (sum_rate/1e6) % args.channels.size() % (is_tx ? "underruns (U)" : "overflows (O)") ;
link_rate_is_ok = false;
}
@@ -1938,6 +1958,6 @@ multi_usrp::~multi_usrp(void){
* The Make Function
**********************************************************************/
multi_usrp::sptr multi_usrp::make(const device_addr_t &dev_addr){
- UHD_LOG << "multi_usrp::make with args " << dev_addr.to_pp_string() << std::endl;
+ UHD_LOGGER_TRACE("MULTI_USRP") << "multi_usrp::make with args " << dev_addr.to_pp_string() ;
return sptr(new multi_usrp_impl(dev_addr));
}
diff --git a/host/lib/usrp/n230/CMakeLists.txt b/host/lib/usrp/n230/CMakeLists.txt
index 9eaccffba..8bed1f26d 100644
--- a/host/lib/usrp/n230/CMakeLists.txt
+++ b/host/lib/usrp/n230/CMakeLists.txt
@@ -33,5 +33,6 @@ IF(ENABLE_N230)
${CMAKE_CURRENT_SOURCE_DIR}/n230_frontend_ctrl.cpp
${CMAKE_CURRENT_SOURCE_DIR}/n230_uart.cpp
${CMAKE_CURRENT_SOURCE_DIR}/n230_image_loader.cpp
+ ${CMAKE_CURRENT_SOURCE_DIR}/n230_fw_ctrl_iface.cpp
)
ENDIF(ENABLE_N230)
diff --git a/host/lib/usrp/n230/n230_clk_pps_ctrl.cpp b/host/lib/usrp/n230/n230_clk_pps_ctrl.cpp
index 2bcfb0394..372325fbe 100644
--- a/host/lib/usrp/n230/n230_clk_pps_ctrl.cpp
+++ b/host/lib/usrp/n230/n230_clk_pps_ctrl.cpp
@@ -17,11 +17,10 @@
#include "n230_clk_pps_ctrl.hpp"
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/safe_call.hpp>
#include <stdint.h>
#include <boost/format.hpp>
-#include <boost/foreach.hpp>
#include <stdexcept>
#include <cmath>
#include <cstdlib>
@@ -56,11 +55,11 @@ public:
double set_tick_rate(const double rate)
{
- UHD_MSG(status) << "Configuring a tick rate of " << rate/1e6 << " MHz... ";
+ UHD_LOGGER_INFO("N230") << "Configuring a tick rate of " << rate/1e6 << " MHz... ";
_tick_rate = _codec_ctrl->set_clock_rate(rate);
- UHD_MSG(status) << "got " << _tick_rate/1e6 << " MHz\n";
+ UHD_LOGGER_INFO("N230") << "got " << _tick_rate/1e6 << " MHz\n";
- BOOST_FOREACH(time_core_3000::sptr& time_core, _time_cores) {
+ for(time_core_3000::sptr& time_core: _time_cores) {
time_core->set_tick_rate(_tick_rate);
time_core->self_test();
}
diff --git a/host/lib/usrp/n230/n230_eeprom_manager.cpp b/host/lib/usrp/n230/n230_eeprom_manager.cpp
index d9d02c58c..c21eb9ddb 100644
--- a/host/lib/usrp/n230/n230_eeprom_manager.cpp
+++ b/host/lib/usrp/n230/n230_eeprom_manager.cpp
@@ -17,7 +17,7 @@
#include "n230_eeprom.h"
#include <uhd/utils/byteswap.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/exception.hpp>
#include <uhd/types/mac_addr.hpp>
#include <boost/format.hpp>
diff --git a/host/lib/usrp/n230/n230_frontend_ctrl.cpp b/host/lib/usrp/n230/n230_frontend_ctrl.cpp
index 3d81721ec..80f1232b1 100644
--- a/host/lib/usrp/n230/n230_frontend_ctrl.cpp
+++ b/host/lib/usrp/n230/n230_frontend_ctrl.cpp
@@ -17,7 +17,7 @@
#include "n230_frontend_ctrl.hpp"
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/exception.hpp>
#include <uhd/types/dict.hpp>
#include <boost/format.hpp>
diff --git a/host/lib/usrp/common/fw_comm_protocol.h b/host/lib/usrp/n230/n230_fw_comm_protocol.h
index 14adb33a9..b7c85f2ba 100644
--- a/host/lib/usrp/common/fw_comm_protocol.h
+++ b/host/lib/usrp/n230/n230_fw_comm_protocol.h
@@ -15,8 +15,8 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//
-#ifndef INCLUDED_FW_COMM_PROTOCOL
-#define INCLUDED_FW_COMM_PROTOCOL
+#ifndef INCLUDED_N230_FW_COMM_PROTOCOL
+#define INCLUDED_N230_FW_COMM_PROTOCOL
#include <stdint.h>
#ifndef __cplusplus
@@ -99,4 +99,4 @@ bool process_fw_comm_protocol_pkt(
#endif //ifdef __cplusplus
-#endif /* INCLUDED_FW_COMM_PROTOCOL */
+#endif /* INCLUDED_N230_FW_COMM_PROTOCOL */
diff --git a/host/lib/usrp/common/usrp3_fw_ctrl_iface.cpp b/host/lib/usrp/n230/n230_fw_ctrl_iface.cpp
index 16ee84140..18c2c4cf8 100644
--- a/host/lib/usrp/common/usrp3_fw_ctrl_iface.cpp
+++ b/host/lib/usrp/n230/n230_fw_ctrl_iface.cpp
@@ -15,34 +15,33 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//
-#include "usrp3_fw_ctrl_iface.hpp"
+#include "n230_fw_ctrl_iface.hpp"
#include <uhd/utils/byteswap.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/exception.hpp>
#include <boost/format.hpp>
#include <boost/asio.hpp> //used for htonl and ntohl
-#include <boost/foreach.hpp>
-#include "fw_comm_protocol.h"
+#include "n230_fw_comm_protocol.h"
-namespace uhd { namespace usrp { namespace usrp3 {
+namespace uhd { namespace usrp { namespace n230 {
//----------------------------------------------------------
// Factory method
//----------------------------------------------------------
-uhd::wb_iface::sptr usrp3_fw_ctrl_iface::make(
+uhd::wb_iface::sptr n230_fw_ctrl_iface::make(
uhd::transport::udp_simple::sptr udp_xport,
const uint16_t product_id,
const bool verbose)
{
- return wb_iface::sptr(new usrp3_fw_ctrl_iface(udp_xport, product_id, verbose));
+ return wb_iface::sptr(new n230_fw_ctrl_iface(udp_xport, product_id, verbose));
}
//----------------------------------------------------------
// udp_fw_ctrl_iface
//----------------------------------------------------------
-usrp3_fw_ctrl_iface::usrp3_fw_ctrl_iface(
+n230_fw_ctrl_iface::n230_fw_ctrl_iface(
uhd::transport::udp_simple::sptr udp_xport,
const uint16_t product_id,
const bool verbose) :
@@ -53,18 +52,18 @@ usrp3_fw_ctrl_iface::usrp3_fw_ctrl_iface(
peek32(0);
}
-usrp3_fw_ctrl_iface::~usrp3_fw_ctrl_iface()
+n230_fw_ctrl_iface::~n230_fw_ctrl_iface()
{
flush();
}
-void usrp3_fw_ctrl_iface::flush()
+void n230_fw_ctrl_iface::flush()
{
boost::mutex::scoped_lock lock(_mutex);
_flush();
}
-void usrp3_fw_ctrl_iface::poke32(const wb_addr_type addr, const uint32_t data)
+void n230_fw_ctrl_iface::poke32(const wb_addr_type addr, const uint32_t data)
{
boost::mutex::scoped_lock lock(_mutex);
@@ -75,13 +74,13 @@ void usrp3_fw_ctrl_iface::poke32(const wb_addr_type addr, const uint32_t data)
} catch(const std::exception &ex) {
const std::string error_msg = str(boost::format(
"udp fw poke32 failure #%u\n%s") % i % ex.what());
- if (_verbose) UHD_MSG(warning) << error_msg << std::endl;
+ if (_verbose) UHD_LOGGER_WARNING("N230") << error_msg ;
if (i == NUM_RETRIES) throw uhd::io_error(error_msg);
}
}
}
-uint32_t usrp3_fw_ctrl_iface::peek32(const wb_addr_type addr)
+uint32_t n230_fw_ctrl_iface::peek32(const wb_addr_type addr)
{
boost::mutex::scoped_lock lock(_mutex);
@@ -91,14 +90,14 @@ uint32_t usrp3_fw_ctrl_iface::peek32(const wb_addr_type addr)
} catch(const std::exception &ex) {
const std::string error_msg = str(boost::format(
"udp fw peek32 failure #%u\n%s") % i % ex.what());
- if (_verbose) UHD_MSG(warning) << error_msg << std::endl;
+ if (_verbose) UHD_LOGGER_WARNING("N230") << error_msg ;
if (i == NUM_RETRIES) throw uhd::io_error(error_msg);
}
}
return 0;
}
-void usrp3_fw_ctrl_iface::_poke32(const wb_addr_type addr, const uint32_t data)
+void n230_fw_ctrl_iface::_poke32(const wb_addr_type addr, const uint32_t data)
{
//Load request struct
fw_comm_pkt_t request;
@@ -129,7 +128,7 @@ void usrp3_fw_ctrl_iface::_poke32(const wb_addr_type addr, const uint32_t data)
UHD_ASSERT_THROW(reply.data[0] == request.data[0]);
}
-uint32_t usrp3_fw_ctrl_iface::_peek32(const wb_addr_type addr)
+uint32_t n230_fw_ctrl_iface::_peek32(const wb_addr_type addr)
{
//Load request struct
fw_comm_pkt_t request;
@@ -162,7 +161,7 @@ uint32_t usrp3_fw_ctrl_iface::_peek32(const wb_addr_type addr)
return uhd::ntohx<uint32_t>(reply.data[0]);
}
-void usrp3_fw_ctrl_iface::_flush(void)
+void n230_fw_ctrl_iface::_flush(void)
{
char buff[FW_COMM_PROTOCOL_MTU] = {};
while (_udp_xport->recv(boost::asio::buffer(buff), 0.0)) {
@@ -170,7 +169,7 @@ void usrp3_fw_ctrl_iface::_flush(void)
}
}
-std::vector<std::string> usrp3_fw_ctrl_iface::discover_devices(
+std::vector<std::string> n230_fw_ctrl_iface::discover_devices(
const std::string& addr_hint, const std::string& port,
uint16_t product_id)
{
@@ -183,8 +182,8 @@ std::vector<std::string> usrp3_fw_ctrl_iface::discover_devices(
try {
udp_bcast_xport = uhd::transport::udp_simple::make_broadcast(addr_hint, port);
} catch(const std::exception &e) {
- UHD_MSG(error) << boost::format("Cannot open UDP transport on %s for discovery\n%s")
- % addr_hint % e.what() << std::endl;
+ UHD_LOGGER_ERROR("N230") << boost::format("Cannot open UDP transport on %s for discovery\n%s")
+ % addr_hint % e.what() ;
return addrs;
}
@@ -213,7 +212,7 @@ std::vector<std::string> usrp3_fw_ctrl_iface::discover_devices(
return addrs;
}
-uint32_t usrp3_fw_ctrl_iface::get_iface_id(
+uint32_t n230_fw_ctrl_iface::get_iface_id(
const std::string& addr, const std::string& port,
uint16_t product_id)
{
diff --git a/host/lib/usrp/common/usrp3_fw_ctrl_iface.hpp b/host/lib/usrp/n230/n230_fw_ctrl_iface.hpp
index 9dc35ef9e..675c843b3 100644
--- a/host/lib/usrp/common/usrp3_fw_ctrl_iface.hpp
+++ b/host/lib/usrp/n230/n230_fw_ctrl_iface.hpp
@@ -15,24 +15,24 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//
-#ifndef INCLUDED_LIBUHD_USRP_USRP3_UDP_FW_CTRL_IFACE_HPP
-#define INCLUDED_LIBUHD_USRP_USRP3_UDP_FW_CTRL_IFACE_HPP
+#ifndef INCLUDED_LIBUHD_USRP_N230_UDP_FW_CTRL_IFACE_HPP
+#define INCLUDED_LIBUHD_USRP_N230_UDP_FW_CTRL_IFACE_HPP
#include <uhd/types/wb_iface.hpp>
#include <uhd/transport/udp_simple.hpp>
#include <boost/thread/mutex.hpp>
#include <vector>
-namespace uhd { namespace usrp { namespace usrp3 {
+namespace uhd { namespace usrp { namespace n230 {
-class usrp3_fw_ctrl_iface : public uhd::wb_iface
+class n230_fw_ctrl_iface : public uhd::wb_iface
{
public:
- usrp3_fw_ctrl_iface(
+ n230_fw_ctrl_iface(
uhd::transport::udp_simple::sptr udp_xport,
const uint16_t product_id,
const bool verbose);
- virtual ~usrp3_fw_ctrl_iface();
+ virtual ~n230_fw_ctrl_iface();
// -- uhd::wb_iface --
void poke32(const wb_addr_type addr, const uint32_t data);
@@ -69,4 +69,4 @@ private:
}}} //namespace
-#endif //INCLUDED_LIBUHD_USRP_USRP3_USRP3_UDP_FW_CTRL_HPP
+#endif // INCLUDED_LIBUHD_USRP_N230_UDP_FW_CTRL_IFACE_HPP
diff --git a/host/lib/usrp/n230/n230_impl.cpp b/host/lib/usrp/n230/n230_impl.cpp
index 015140fcc..63971fb34 100644
--- a/host/lib/usrp/n230/n230_impl.cpp
+++ b/host/lib/usrp/n230/n230_impl.cpp
@@ -17,7 +17,7 @@
#include "n230_impl.hpp"
-#include "usrp3_fw_ctrl_iface.hpp"
+#include "n230_fw_ctrl_iface.hpp"
#include "validate_subdev_spec.hpp"
#include <uhd/utils/static.hpp>
#include <uhd/transport/if_addrs.hpp>
@@ -25,7 +25,7 @@
#include <uhd/usrp/subdev_spec.hpp>
#include <uhd/utils/byteswap.hpp>
#include <uhd/utils/log.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/types/sensors.hpp>
#include <uhd/types/ranges.hpp>
#include <uhd/types/direction.hpp>
@@ -33,7 +33,6 @@
#include <uhd/usrp/dboard_eeprom.hpp>
#include <uhd/usrp/gps_ctrl.hpp>
#include <boost/format.hpp>
-#include <boost/foreach.hpp>
#include <boost/lexical_cast.hpp>
#include <boost/bind.hpp>
#include <boost/algorithm/string.hpp>
@@ -42,7 +41,6 @@
#include <boost/asio.hpp> //used for htonl and ntohl
#include <boost/make_shared.hpp>
-#include "../common/fw_comm_protocol.h"
#include "n230_defaults.h"
#include "n230_fpga_defs.h"
#include "n230_fw_defs.h"
@@ -71,7 +69,7 @@ uhd::device_addrs_t n230_impl::n230_find(const uhd::device_addr_t &multi_dev_hin
if (hints.size() > 1){
device_addrs_t found_devices;
std::string error_msg;
- BOOST_FOREACH(const device_addr_t &hint_i, hints){
+ for(const device_addr_t &hint_i: hints){
device_addrs_t found_devices_i = n230_find(hint_i);
if (found_devices_i.size() != 1) error_msg += str(boost::format(
"Could not resolve device hint \"%s\" to a single device."
@@ -98,7 +96,7 @@ uhd::device_addrs_t n230_impl::n230_find(const uhd::device_addr_t &multi_dev_hin
//if no address was specified, send a broadcast on each interface
if (not hint.has_key("addr")) {
- BOOST_FOREACH(const if_addrs_t &if_addrs, get_if_addrs()) {
+ for(const if_addrs_t &if_addrs: get_if_addrs()) {
//avoid the loopback device
if (if_addrs.inet == asio::ip::address_v4::loopback().to_string()) continue;
@@ -116,10 +114,10 @@ uhd::device_addrs_t n230_impl::n230_find(const uhd::device_addr_t &multi_dev_hin
}
std::vector<std::string> discovered_addrs =
- usrp3::usrp3_fw_ctrl_iface::discover_devices(
+ n230_fw_ctrl_iface::discover_devices(
hint["addr"], BOOST_STRINGIZE(N230_FW_COMMS_UDP_PORT), N230_FW_PRODUCT_ID);
- BOOST_FOREACH(const std::string& addr, discovered_addrs)
+ for(const std::string& addr: discovered_addrs)
{
device_addr_t new_addr;
new_addr["type"] = "n230";
@@ -136,10 +134,10 @@ uhd::device_addrs_t n230_impl::n230_find(const uhd::device_addr_t &multi_dev_hin
//connected communication can fail. Retry the following call to allow
//the stack to update
size_t first_conn_retries = 10;
- usrp3::usrp3_fw_ctrl_iface::sptr fw_ctrl;
+ n230_fw_ctrl_iface::sptr fw_ctrl;
while (first_conn_retries > 0) {
try {
- fw_ctrl = usrp3::usrp3_fw_ctrl_iface::make(ctrl_xport, N230_FW_PRODUCT_ID, false /*verbose*/);
+ fw_ctrl = n230_fw_ctrl_iface::make(ctrl_xport, N230_FW_PRODUCT_ID, false /*verbose*/);
break;
} catch (uhd::io_error& ex) {
boost::this_thread::sleep(boost::posix_time::milliseconds(500));
@@ -192,7 +190,7 @@ device::sptr n230_impl::n230_make(const device_addr_t &device_addr)
**********************************************************************/
n230_impl::n230_impl(const uhd::device_addr_t& dev_addr)
{
- UHD_MSG(status) << "N230 initialization sequence..." << std::endl;
+ UHD_LOGGER_INFO("N230") << "N230 initialization sequence...";
_dev_args.parse(dev_addr);
_tree = uhd::property_tree::make();
@@ -210,9 +208,9 @@ n230_impl::n230_impl(const uhd::device_addr_t& dev_addr)
const mboard_eeprom_t& mb_eeprom = _eeprom_mgr->get_mb_eeprom();
bool recover_mb_eeprom = dev_addr.has_key("recover_mb_eeprom");
if (recover_mb_eeprom) {
- UHD_MSG(warning) << "UHD is operating in EEPROM Recovery Mode which disables hardware version "
+ UHD_LOGGER_WARNING("N230") << "UHD is operating in EEPROM Recovery Mode which disables hardware version "
"checks.\nOperating in this mode may cause hardware damage and unstable "
- "radio performance!"<< std::endl;
+ "radio performance!";
}
uint16_t hw_rev = boost::lexical_cast<uint16_t>(mb_eeprom["revision"]);
uint16_t hw_rev_compat = boost::lexical_cast<uint16_t>(mb_eeprom["revision_compat"]);
@@ -234,11 +232,11 @@ n230_impl::n230_impl(const uhd::device_addr_t& dev_addr)
//Debug loopback mode
switch(_dev_args.get_loopback_mode()) {
case n230_device_args_t::LOOPBACK_RADIO:
- UHD_MSG(status) << "DEBUG: Running in TX->RX Radio loopback mode.\n";
+ UHD_LOGGER_INFO("N230") << "DEBUG: Running in TX->RX Radio loopback mode.";
_resource_mgr->get_frontend_ctrl().set_self_test_mode(LOOPBACK_RADIO);
break;
case n230_device_args_t::LOOPBACK_CODEC:
- UHD_MSG(status) << "DEBUG: Running in TX->RX CODEC loopback mode.\n";
+ UHD_LOGGER_INFO("N230") << "DEBUG: Running in TX->RX CODEC loopback mode.";
_resource_mgr->get_frontend_ctrl().set_self_test_mode(LOOPBACK_CODEC);
break;
default:
@@ -397,11 +395,11 @@ void n230_impl::_initialize_property_tree(const fs_path& mb_path)
// Initialize subdev specs
//------------------------------------------------------------------
subdev_spec_t rx_spec, tx_spec;
- BOOST_FOREACH(const std::string &fe, _tree->list(mb_path / "dboards" / "A" / "rx_frontends"))
+ for(const std::string &fe: _tree->list(mb_path / "dboards" / "A" / "rx_frontends"))
{
rx_spec.push_back(subdev_spec_pair_t("A", fe));
}
- BOOST_FOREACH(const std::string &fe, _tree->list(mb_path / "dboards" / "A" / "tx_frontends"))
+ for(const std::string &fe: _tree->list(mb_path / "dboards" / "A" / "tx_frontends"))
{
tx_spec.push_back(subdev_spec_pair_t("A", fe));
}
@@ -437,7 +435,7 @@ void n230_impl::_initialize_property_tree(const fs_path& mb_path)
//------------------------------------------------------------------
if (_resource_mgr->is_gpsdo_present()) {
uhd::gps_ctrl::sptr gps_ctrl = _resource_mgr->get_gps_ctrl();
- BOOST_FOREACH(const std::string &name, gps_ctrl->get_sensors())
+ for(const std::string &name: gps_ctrl->get_sensors())
{
_tree->create<sensor_value_t>(mb_path / "sensors" / name)
.set_publisher(boost::bind(&gps_ctrl::get_sensor, gps_ctrl, name));
@@ -499,7 +497,7 @@ void n230_impl::_initialize_radio_properties(const fs_path& mb_path, size_t inst
//RF Frontend Interfacing
static const std::vector<direction_t> data_directions = boost::assign::list_of(RX_DIRECTION)(TX_DIRECTION);
- BOOST_FOREACH(direction_t direction, data_directions) {
+ for(direction_t direction: data_directions) {
const std::string dir_str = (direction == RX_DIRECTION) ? "rx" : "tx";
const std::string key = boost::to_upper_copy(dir_str) + str(boost::format("%u") % (instance + 1));
const fs_path rf_fe_path = mb_path / "dboards" / "A" / (dir_str + "_frontends") / ((instance==0)?"A":"B");
diff --git a/host/lib/usrp/n230/n230_resource_manager.cpp b/host/lib/usrp/n230/n230_resource_manager.cpp
index b96de542a..61a80ae43 100644
--- a/host/lib/usrp/n230/n230_resource_manager.cpp
+++ b/host/lib/usrp/n230/n230_resource_manager.cpp
@@ -17,11 +17,11 @@
#include "n230_resource_manager.hpp"
-#include "usrp3_fw_ctrl_iface.hpp"
+#include "n230_fw_ctrl_iface.hpp"
#include <uhd/transport/if_addrs.hpp>
#include <uhd/transport/udp_zero_copy.hpp>
#include <uhd/utils/byteswap.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/platform.hpp>
#include <uhd/utils/paths.hpp>
#include <boost/format.hpp>
@@ -58,7 +58,7 @@ public:
}
}
clocking_mode_t get_clocking_mode() {
- return AD9361_XTAL_N_CLK_PATH;
+ return clocking_mode_t::AD9361_XTAL_N_CLK_PATH;
}
digital_interface_mode_t get_digital_interface_mode() {
return AD9361_DDR_FDD_LVDS;
@@ -80,18 +80,18 @@ n230_resource_manager::n230_resource_manager(
_safe_mode(safe_mode),
_last_host_enpoint(0)
{
- if (_safe_mode) UHD_MSG(warning) << "Initializing device in safe mode\n";
- UHD_MSG(status) << "Setup basic communication...\n";
+ if (_safe_mode) UHD_LOGGER_WARNING("N230") << "Initializing device in safe mode\n";
+ UHD_LOGGER_INFO("N230") << "Setup basic communication...";
//Discover ethernet interfaces
bool dual_eth_expected = (ip_addrs.size() > 1);
- BOOST_FOREACH(const std::string& addr, ip_addrs) {
+ for(const std::string& addr: ip_addrs) {
n230_eth_conn_t conn_iface;
conn_iface.ip_addr = addr;
uint32_t iface_id = 0xFFFFFFFF;
try {
- iface_id = usrp3::usrp3_fw_ctrl_iface::get_iface_id(
+ iface_id = n230::n230_fw_ctrl_iface::get_iface_id(
conn_iface.ip_addr, BOOST_STRINGIZE(N230_FW_COMMS_UDP_PORT), N230_FW_PRODUCT_ID);
} catch (uhd::io_error&) {
throw uhd::io_error(str(boost::format(
@@ -118,7 +118,7 @@ n230_resource_manager::n230_resource_manager(
}
//Create firmware communication interface
- _fw_ctrl = usrp3::usrp3_fw_ctrl_iface::make(
+ _fw_ctrl = n230::n230_fw_ctrl_iface::make(
transport::udp_simple::make_connected(
_get_conn(PRI_ETH).ip_addr, BOOST_STRINGIZE(N230_FW_COMMS_UDP_PORT)), N230_FW_PRODUCT_ID);
if (_fw_ctrl.get() == NULL) {
@@ -145,7 +145,7 @@ n230_resource_manager::n230_resource_manager(
}
_check_fpga_compat();
- UHD_MSG(status) << boost::format("Version signatures... Firmware:%s FPGA:%s...\n")
+ UHD_LOGGER_INFO("N230") << boost::format("Version signatures... Firmware:%s FPGA:%s...")
% _fw_version.get_hash_str() % _fpga_version.get_hash_str();
_core_radio_ctrl_reg.initialize(*_core_ctrl, true /*flush*/);
@@ -160,7 +160,7 @@ n230_resource_manager::n230_resource_manager(
}
//Create AD9361 interface
- UHD_MSG(status) << "Initializing CODEC...\n";
+ UHD_LOGGER_INFO("N230") << "Initializing CODEC...";
_codec_ctrl = ad9361_ctrl::make_spi(
boost::make_shared<n230_ad9361_client_t>(), _core_spi_ctrl, fpga::AD9361_SPI_SLAVE_NUM);
if (_codec_ctrl.get() == NULL) {
@@ -210,7 +210,7 @@ n230_resource_manager::n230_resource_manager(
//Create GPSDO interface
if (_core_status_reg.read(fpga::core_status_reg_t::GPSDO_STATUS) != fpga::GPSDO_ST_ABSENT) {
- UHD_MSG(status) << "Detecting GPSDO.... " << std::flush;
+ UHD_LOGGER_INFO("N230") << "Detecting GPSDO.... ";
try {
const sid_t gps_uart_sid = _generate_sid(GPS_UART, _get_conn(PRI_ETH).type);
transport::zero_copy_if::sptr gps_uart_xport =
@@ -221,7 +221,7 @@ n230_resource_manager::n230_resource_manager(
boost::this_thread::sleep(boost::posix_time::seconds(1)); //allow for a little propagation
_gps_ctrl = gps_ctrl::make(_gps_uart);
} catch(std::exception &e) {
- UHD_MSG(error) << "An error occurred making GPSDO control: " << e.what() << std::endl;
+ UHD_LOGGER_ERROR("N230") << "An error occurred making GPSDO control: " << e.what() ;
}
if (not is_gpsdo_present()) {
_core_ctrl->poke32(fpga::sr_addr(fpga::SR_CORE_GPSDO_ST), fpga::GPSDO_ST_ABSENT);
@@ -276,7 +276,7 @@ transport::zero_copy_if::sptr n230_resource_manager::create_transport(
return xport;
}
-bool n230_resource_manager::is_device_claimed(uhd::usrp::usrp3::usrp3_fw_ctrl_iface::sptr fw_ctrl)
+bool n230_resource_manager::is_device_claimed(n230_fw_ctrl_iface::sptr fw_ctrl)
{
boost::mutex::scoped_lock(_claimer_mutex);
@@ -488,7 +488,7 @@ bool n230_resource_manager::_radio_data_loopback_self_test(wb_iface::sptr iface)
const uint32_t rb_rx = uint32_t(rb_word64 & 0xffffffff);
test_fail = word32 != rb_tx or word32 != rb_rx;
if (test_fail){
- UHD_MSG(fastpath) << boost::format("mismatch (exp:%x, got:%x and %x)... ") % word32 % rb_tx % rb_rx;
+ UHD_LOG_ERROR("N230", str(boost::format("mismatch (exp:%x, got:%x and %x)... ") % word32 % rb_tx % rb_rx));
break; //exit loop on any failure
}
}
diff --git a/host/lib/usrp/n230/n230_resource_manager.hpp b/host/lib/usrp/n230/n230_resource_manager.hpp
index 180f4f437..6da3b9035 100644
--- a/host/lib/usrp/n230/n230_resource_manager.hpp
+++ b/host/lib/usrp/n230/n230_resource_manager.hpp
@@ -37,7 +37,7 @@
#include <uhd/transport/bounded_buffer.hpp>
#include <uhd/usrp/gps_ctrl.hpp>
-#include "usrp3_fw_ctrl_iface.hpp"
+#include "n230_fw_ctrl_iface.hpp"
#include "n230_clk_pps_ctrl.hpp"
#include "n230_cores.hpp"
#include "n230_fpga_defs.h"
@@ -98,7 +98,7 @@ public: //Methods
n230_resource_manager(const std::vector<std::string> ip_addrs, const bool safe_mode);
virtual ~n230_resource_manager();
- static bool is_device_claimed(uhd::usrp::usrp3::usrp3_fw_ctrl_iface::sptr fw_ctrl);
+ static bool is_device_claimed(n230_fw_ctrl_iface::sptr fw_ctrl);
inline bool is_device_claimed() {
if (_fw_ctrl.get()) {
@@ -276,12 +276,12 @@ private:
ver_info_t _fpga_version;
//Firmware register interface
- uhd::usrp::usrp3::usrp3_fw_ctrl_iface::sptr _fw_ctrl;
+ n230_fw_ctrl_iface::sptr _fw_ctrl;
uhd::task::sptr _claimer_task;
static boost::mutex _claimer_mutex; //All claims and checks in this process are serialized
//Transport
- uint8_t _last_host_enpoint;
+ uint8_t _last_host_enpoint;
//Radio settings interface
radio_ctrl_core_3000::sptr _core_ctrl;
diff --git a/host/lib/usrp/n230/n230_stream_manager.cpp b/host/lib/usrp/n230/n230_stream_manager.cpp
index 0528212d0..95da2752e 100644
--- a/host/lib/usrp/n230/n230_stream_manager.cpp
+++ b/host/lib/usrp/n230/n230_stream_manager.cpp
@@ -24,7 +24,6 @@
#include <boost/bind.hpp>
#include <uhd/utils/tasks.hpp>
#include <uhd/utils/log.hpp>
-#include <boost/foreach.hpp>
#include <boost/make_shared.hpp>
static const double N230_RX_SW_BUFF_FULL_FACTOR = 0.90; //Buffer should ideally be 90% full.
@@ -478,7 +477,7 @@ void n230_stream_manager::_handle_tx_async_msgs(
_cvita_hdr_unpack(packet_buff, if_packet_info);
endian_conv = uhd::ntohx;
} catch(const std::exception &ex) {
- UHD_MSG(error) << "Error parsing async message packet: " << ex.what() << std::endl;
+ UHD_LOGGER_ERROR("N230") << "Error parsing async message packet: " << ex.what() ;
return;
}
diff --git a/host/lib/usrp/n230/n230_uart.cpp b/host/lib/usrp/n230/n230_uart.cpp
index 7291a7276..7330dff4a 100644
--- a/host/lib/usrp/n230/n230_uart.cpp
+++ b/host/lib/usrp/n230/n230_uart.cpp
@@ -20,7 +20,7 @@
#include <uhd/transport/bounded_buffer.hpp>
#include <uhd/transport/vrt_if_packet.hpp>
#include <uhd/utils/byteswap.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/types/time_spec.hpp>
#include <uhd/exception.hpp>
diff --git a/host/lib/usrp/subdev_spec.cpp b/host/lib/usrp/subdev_spec.cpp
index 6912afec8..b7eb64f87 100644
--- a/host/lib/usrp/subdev_spec.cpp
+++ b/host/lib/usrp/subdev_spec.cpp
@@ -20,7 +20,6 @@
#include <boost/algorithm/string.hpp> //for split
#include <boost/tokenizer.hpp>
#include <boost/format.hpp>
-#include <boost/foreach.hpp>
#include <sstream>
#include <vector>
@@ -44,8 +43,16 @@ bool usrp::operator==(const subdev_spec_pair_t &lhs, const subdev_spec_pair_t &r
return (lhs.db_name == rhs.db_name) and (lhs.sd_name == rhs.sd_name);
}
+bool subdev_spec_pair_t::operator==(const subdev_spec_pair_t &other){
+ return (other.db_name == db_name) and (other.sd_name == sd_name);
+}
+
+bool subdev_spec_pair_t::operator!=(const subdev_spec_pair_t &other){
+ return (other.db_name != db_name) or (other.sd_name != sd_name);
+}
+
subdev_spec_t::subdev_spec_t(const std::string &markup){
- BOOST_FOREACH(const std::string &pair, pair_tokenizer(markup)){
+ for(const std::string &pair: pair_tokenizer(markup)){
if (pair.empty()) continue;
std::vector<std::string> db_sd; boost::split(db_sd, pair, boost::is_any_of(":"));
switch(db_sd.size()){
@@ -62,7 +69,7 @@ std::string subdev_spec_t::to_pp_string(void) const{
std::stringstream ss;
size_t count = 0;
ss << "Subdevice Specification:" << std::endl;
- BOOST_FOREACH(const subdev_spec_pair_t &pair, *this){
+ for(const subdev_spec_pair_t &pair: *this){
ss << boost::format(
" Channel %d: Daughterboard %s, Subdevice %s"
) % (count++) % pair.db_name % pair.sd_name << std::endl;
@@ -73,7 +80,7 @@ std::string subdev_spec_t::to_pp_string(void) const{
std::string subdev_spec_t::to_string(void) const{
std::string markup;
size_t count = 0;
- BOOST_FOREACH(const subdev_spec_pair_t &pair, *this){
+ for(const subdev_spec_pair_t &pair: *this){
markup += ((count++)? " " : "") + pair.db_name + ":" + pair.sd_name;
}
return markup;
diff --git a/host/lib/usrp/usrp1/codec_ctrl.cpp b/host/lib/usrp/usrp1/codec_ctrl.cpp
index 4c811d5e2..bb81ec3e7 100644
--- a/host/lib/usrp/usrp1/codec_ctrl.cpp
+++ b/host/lib/usrp/usrp1/codec_ctrl.cpp
@@ -277,9 +277,9 @@ void usrp1_codec_ctrl_impl::send_reg(uint8_t addr)
{
uint32_t reg = _ad9862_regs.get_write_reg(addr);
- UHD_LOGV(often)
+ UHD_LOGGER_TRACE("USRP1")
<< "codec control write reg: 0x"
- << std::setw(8) << std::hex << reg << std::endl
+ << std::setw(8) << std::hex << reg
;
_iface->write_spi(_spi_slave,
spi_config_t::EDGE_RISE, reg, 16);
@@ -289,17 +289,17 @@ void usrp1_codec_ctrl_impl::recv_reg(uint8_t addr)
{
uint32_t reg = _ad9862_regs.get_read_reg(addr);
- UHD_LOGV(often)
+ UHD_LOGGER_TRACE("USRP1")
<< "codec control read reg: 0x"
- << std::setw(8) << std::hex << reg << std::endl
+ << std::setw(8) << std::hex << reg
;
uint32_t ret = _iface->read_spi(_spi_slave,
spi_config_t::EDGE_RISE, reg, 16);
- UHD_LOGV(often)
+ UHD_LOGGER_TRACE("USRP1")
<< "codec control read ret: 0x"
- << std::setw(8) << std::hex << ret << std::endl
+ << std::setw(8) << std::hex << ret
;
_ad9862_regs.set_reg(addr, uint16_t(ret));
@@ -389,13 +389,13 @@ void usrp1_codec_ctrl_impl::set_duc_freq(double freq, double rate)
double coarse_freq = coarse_tune(codec_rate, freq);
double fine_freq = fine_tune(codec_rate / 4, freq - coarse_freq);
- UHD_LOG
- << "ad9862 tuning result:" << std::endl
- << " requested: " << freq << std::endl
- << " actual: " << coarse_freq + fine_freq << std::endl
- << " coarse freq: " << coarse_freq << std::endl
- << " fine freq: " << fine_freq << std::endl
- << " codec rate: " << codec_rate << std::endl
+ UHD_LOGGER_DEBUG("USRP1")
+ << "ad9862 tuning result:"
+ << " requested: " << freq
+ << " actual: " << coarse_freq + fine_freq
+ << " coarse freq: " << coarse_freq
+ << " fine freq: " << fine_freq
+ << " codec rate: " << codec_rate
;
this->send_reg(20);
diff --git a/host/lib/usrp/usrp1/io_impl.cpp b/host/lib/usrp/usrp1/io_impl.cpp
index 920023dad..7ed1d8671 100644
--- a/host/lib/usrp/usrp1/io_impl.cpp
+++ b/host/lib/usrp/usrp1/io_impl.cpp
@@ -22,7 +22,7 @@
#include "../../transport/super_send_packet_handler.hpp"
#include "usrp1_calc_mux.hpp"
#include "usrp1_impl.hpp"
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/tasks.hpp>
#include <uhd/utils/safe_call.hpp>
#include <uhd/transport/bounded_buffer.hpp>
@@ -305,19 +305,19 @@ void usrp1_impl::vandal_conquest_loop(void){
if (_tx_enabled and underflow){
async_metadata.time_spec = _soft_time_ctrl->get_time();
_soft_time_ctrl->get_async_queue().push_with_pop_on_full(async_metadata);
- UHD_MSG(fastpath) << "U";
+ UHD_LOG_FASTPATH("U")
}
if (_rx_enabled and overflow){
inline_metadata.time_spec = _soft_time_ctrl->get_time();
_soft_time_ctrl->get_inline_queue().push_with_pop_on_full(inline_metadata);
- UHD_MSG(fastpath) << "O";
+ UHD_LOG_FASTPATH("O")
}
boost::this_thread::sleep(boost::posix_time::milliseconds(50));
}}
catch(const boost::thread_interrupted &){} //normal exit condition
catch(const std::exception &e){
- UHD_MSG(error) << "The vandal caught an unexpected exception " << e.what() << std::endl;
+ UHD_LOGGER_ERROR("USRP1") << "The vandal caught an unexpected exception " << e.what() ;
}
}
@@ -439,7 +439,7 @@ void usrp1_impl::update_rx_subdev_spec(const uhd::usrp::subdev_spec_t &spec){
//set the mux and set the number of rx channels
std::vector<mapping_pair_t> mapping;
- BOOST_FOREACH(const subdev_spec_pair_t &pair, spec){
+ for(const subdev_spec_pair_t &pair: spec){
const std::string conn = _tree->access<std::string>(str(boost::format(
"/mboards/0/dboards/%s/rx_frontends/%s/connection"
) % pair.db_name % pair.sd_name)).get();
@@ -459,7 +459,7 @@ void usrp1_impl::update_tx_subdev_spec(const uhd::usrp::subdev_spec_t &spec){
//set the mux and set the number of tx channels
std::vector<mapping_pair_t> mapping;
- BOOST_FOREACH(const subdev_spec_pair_t &pair, spec){
+ for(const subdev_spec_pair_t &pair: spec){
const std::string conn = _tree->access<std::string>(str(boost::format(
"/mboards/0/dboards/%s/tx_frontends/%s/connection"
) % pair.db_name % pair.sd_name)).get();
@@ -500,11 +500,11 @@ double usrp1_impl::update_rx_samp_rate(size_t dspno, const double samp_rate){
const size_t div = this->has_rx_halfband()? 2 : 1;
const size_t rate = boost::math::iround(_master_clock_rate/this->get_rx_dsp_host_rates().clip(samp_rate, true));
- if (rate < 8 and this->has_rx_halfband()) UHD_MSG(warning) <<
+ if (rate < 8 and this->has_rx_halfband()) UHD_LOGGER_WARNING("USRP1") <<
"USRP1 cannot achieve decimations below 8 when the half-band filter is present.\n"
"The usrp1_fpga_4rx.rbf file is a special FPGA image without RX half-band filters.\n"
"To load this image, set the device address key/value pair: fpga=usrp1_fpga_4rx.rbf\n"
- << std::endl;
+ ;
if (dspno == 0){ //only care if dsp0 is set since its homogeneous
bool s = this->disable_rx();
@@ -548,10 +548,10 @@ double usrp1_impl::update_tx_samp_rate(size_t dspno, const double samp_rate){
void usrp1_impl::update_rates(void){
const fs_path mb_path = "/mboards/0";
this->update_tick_rate(_master_clock_rate);
- BOOST_FOREACH(const std::string &name, _tree->list(mb_path / "rx_dsps")){
+ for(const std::string &name: _tree->list(mb_path / "rx_dsps")){
_tree->access<double>(mb_path / "rx_dsps" / name / "rate" / "value").update();
}
- BOOST_FOREACH(const std::string &name, _tree->list(mb_path / "tx_dsps")){
+ for(const std::string &name: _tree->list(mb_path / "tx_dsps")){
_tree->access<double>(mb_path / "tx_dsps" / name / "rate" / "value").update();
}
}
diff --git a/host/lib/usrp/usrp1/usrp1_calc_mux.hpp b/host/lib/usrp/usrp1/usrp1_calc_mux.hpp
index 3577a8042..293d9c37c 100644
--- a/host/lib/usrp/usrp1/usrp1_calc_mux.hpp
+++ b/host/lib/usrp/usrp1/usrp1_calc_mux.hpp
@@ -71,7 +71,7 @@ static uint32_t calc_rx_mux(const std::vector<mapping_pair_t> &mapping){
//calculate the channel flags
int channel_flags = 0;
size_t num_reals = 0, num_quads = 0;
- BOOST_FOREACH(const mapping_pair_t &pair, uhd::reversed(mapping)){
+ for(const mapping_pair_t &pair: uhd::reversed(mapping)){
const std::string name = pair.first, conn = pair.second;
if (conn == "IQ" or conn == "QI") num_quads++;
if (conn == "I" or conn == "Q") num_reals++;
@@ -83,7 +83,7 @@ static uint32_t calc_rx_mux(const std::vector<mapping_pair_t> &mapping){
// for all quadrature sources: Z = 0
// for mixed sources: warning + Z = 0
int Z = (num_quads > 0)? 0 : 1;
- if (num_quads != 0 and num_reals != 0) UHD_MSG(warning) << boost::format(
+ if (num_quads != 0 and num_reals != 0) UHD_LOGGER_WARNING("USRP1") << boost::format(
"Mixing real and quadrature rx subdevices is not supported.\n"
"The Q input to the real source(s) will be non-zero.\n"
);
@@ -132,7 +132,7 @@ static uint32_t calc_tx_mux(const std::vector<mapping_pair_t> &mapping){
//calculate the channel flags
int channel_flags = 0, chan = 0;
uhd::dict<std::string, int> slot_to_chan_count = boost::assign::map_list_of("A", 0)("B", 0);
- BOOST_FOREACH(const mapping_pair_t &pair, mapping){
+ for(const mapping_pair_t &pair: mapping){
const std::string name = pair.first, conn = pair.second;
//combine the channel flags: shift for slot A vs B
diff --git a/host/lib/usrp/usrp1/usrp1_iface.cpp b/host/lib/usrp/usrp1/usrp1_iface.cpp
index b65f8fa2f..9390714ae 100644
--- a/host/lib/usrp/usrp1/usrp1_iface.cpp
+++ b/host/lib/usrp/usrp1/usrp1_iface.cpp
@@ -49,10 +49,10 @@ public:
{
uint32_t swapped = uhd::htonx(value);
- UHD_LOGV(always)
+ UHD_LOGGER_TRACE("USRP1")
<< "poke32("
<< std::dec << std::setw(2) << addr << ", 0x"
- << std::hex << std::setw(8) << value << ")" << std::endl
+ << std::hex << std::setw(8) << value << ")"
;
uint8_t w_index_h = SPI_ENABLE_FPGA & 0xff;
@@ -70,9 +70,9 @@ public:
uint32_t peek32(const uint32_t addr)
{
- UHD_LOGV(always)
+ UHD_LOGGER_TRACE("USRP1")
<< "peek32("
- << std::dec << std::setw(2) << addr << ")" << std::endl
+ << std::dec << std::setw(2) << addr << ")"
;
uint32_t value_out;
@@ -129,12 +129,12 @@ public:
size_t num_bits,
bool readback)
{
- UHD_LOGV(always)
- << "transact_spi: " << std::endl
- << " slave: " << which_slave << std::endl
- << " bits: " << bits << std::endl
- << " num_bits: " << num_bits << std::endl
- << " readback: " << readback << std::endl
+ UHD_LOGGER_TRACE("USRP1")
+ << "transact_spi: "
+ << " slave: " << which_slave
+ << " bits: " << bits
+ << " num_bits: " << num_bits
+ << " readback: " << readback
;
UHD_ASSERT_THROW((num_bits <= 32) && !(num_bits % 8));
size_t num_bytes = num_bits / 8;
diff --git a/host/lib/usrp/usrp1/usrp1_impl.cpp b/host/lib/usrp/usrp1/usrp1_impl.cpp
index 09352c5e0..7c479a447 100644
--- a/host/lib/usrp/usrp1/usrp1_impl.cpp
+++ b/host/lib/usrp/usrp1/usrp1_impl.cpp
@@ -19,7 +19,7 @@
#include <uhd/utils/log.hpp>
#include <uhd/utils/safe_call.hpp>
#include <uhd/transport/usb_control.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/cast.hpp>
#include <uhd/exception.hpp>
#include <uhd/utils/static.hpp>
@@ -76,16 +76,16 @@ static device_addrs_t usrp1_find(const device_addr_t &hint)
//find the usrps and load firmware
size_t found = 0;
- BOOST_FOREACH(usb_device_handle::sptr handle, usb_device_handle::get_device_list(vid, pid)) {
+ for(usb_device_handle::sptr handle: usb_device_handle::get_device_list(vid, pid)) {
//extract the firmware path for the USRP1
std::string usrp1_fw_image;
try{
usrp1_fw_image = find_image_path(hint.get("fw", "usrp1_fw.ihx"));
}
catch(...){
- UHD_MSG(warning) << boost::format("Could not locate USRP1 firmware. %s") % print_utility_error("uhd_images_downloader.py");
+ UHD_LOGGER_WARNING("USRP1") << boost::format("Could not locate USRP1 firmware. %s") % print_utility_error("uhd_images_downloader.py");
}
- UHD_LOG << "USRP1 firmware image: " << usrp1_fw_image << std::endl;
+ UHD_LOGGER_DEBUG("USRP1") << "USRP1 firmware image: " << usrp1_fw_image ;
usb_control::sptr control;
try{control = usb_control::make(handle, 0);}
@@ -104,7 +104,7 @@ static device_addrs_t usrp1_find(const device_addr_t &hint)
//search for the device until found or timeout
while (boost::get_system_time() < timeout_time and usrp1_addrs.empty() and found != 0)
{
- BOOST_FOREACH(usb_device_handle::sptr handle, usb_device_handle::get_device_list(vid, pid))
+ for(usb_device_handle::sptr handle: usb_device_handle::get_device_list(vid, pid))
{
usb_control::sptr control;
try{control = usb_control::make(handle, 0);}
@@ -144,14 +144,14 @@ UHD_STATIC_BLOCK(register_usrp1_device){
* Structors
**********************************************************************/
usrp1_impl::usrp1_impl(const device_addr_t &device_addr){
- UHD_MSG(status) << "Opening a USRP1 device..." << std::endl;
+ UHD_LOGGER_INFO("USRP1") << "Opening a USRP1 device...";
_type = device::USRP;
//extract the FPGA path for the USRP1
std::string usrp1_fpga_image = find_image_path(
device_addr.get("fpga", "usrp1_fpga.rbf")
);
- UHD_LOG << "USRP1 FPGA image: " << usrp1_fpga_image << std::endl;
+ UHD_LOGGER_DEBUG("USRP1") << "USRP1 FPGA image: " << usrp1_fpga_image ;
//try to match the given device address with something on the USB bus
std::vector<usb_device_handle::sptr> device_list =
@@ -159,7 +159,7 @@ usrp1_impl::usrp1_impl(const device_addr_t &device_addr){
//locate the matching handle in the device list
usb_device_handle::sptr handle;
- BOOST_FOREACH(usb_device_handle::sptr dev_handle, device_list) {
+ for(usb_device_handle::sptr dev_handle: device_list) {
if (dev_handle->get_serial() == device_addr["serial"]){
handle = dev_handle;
break;
@@ -190,12 +190,12 @@ usrp1_impl::usrp1_impl(const device_addr_t &device_addr){
_iface->poke32(FR_MODE, 0x00000000);
_iface->poke32(FR_DEBUG_EN, 0x00000000);
- UHD_LOG
- << "USRP1 Capabilities" << std::endl
- << " number of duc's: " << get_num_ddcs() << std::endl
- << " number of ddc's: " << get_num_ducs() << std::endl
- << " rx halfband: " << has_rx_halfband() << std::endl
- << " tx halfband: " << has_tx_halfband() << std::endl
+ UHD_LOGGER_DEBUG("USRP1")
+ << "USRP1 Capabilities"
+ << " number of duc's: " << get_num_ddcs()
+ << " number of ddc's: " << get_num_ducs()
+ << " rx halfband: " << has_rx_halfband()
+ << " tx halfband: " << has_tx_halfband()
;
////////////////////////////////////////////////////////////////////
@@ -232,7 +232,7 @@ usrp1_impl::usrp1_impl(const device_addr_t &device_addr){
_master_clock_rate = boost::lexical_cast<double>(device_addr["mcr"]);
}
catch(const std::exception &e){
- UHD_MSG(error) << "Error parsing FPGA clock rate from device address: " << e.what() << std::endl;
+ UHD_LOGGER_ERROR("USRP1") << "Error parsing FPGA clock rate from device address: " << e.what() ;
}
}
else if (not mb_eeprom["mcr"].empty()){
@@ -240,10 +240,10 @@ usrp1_impl::usrp1_impl(const device_addr_t &device_addr){
_master_clock_rate = boost::lexical_cast<double>(mb_eeprom["mcr"]);
}
catch(const std::exception &e){
- UHD_MSG(error) << "Error parsing FPGA clock rate from EEPROM: " << e.what() << std::endl;
+ UHD_LOGGER_ERROR("USRP1") << "Error parsing FPGA clock rate from EEPROM: " << e.what() ;
}
}
- UHD_MSG(status) << boost::format("Using FPGA clock rate of %fMHz...") % (_master_clock_rate/1e6) << std::endl;
+ UHD_LOGGER_INFO("USRP1") << boost::format("Using FPGA clock rate of %fMHz...") % (_master_clock_rate/1e6) ;
_tree->create<double>(mb_path / "tick_rate")
.add_coerced_subscriber(boost::bind(&usrp1_impl::update_tick_rate, this, _1))
.set(_master_clock_rate);
@@ -251,7 +251,7 @@ usrp1_impl::usrp1_impl(const device_addr_t &device_addr){
////////////////////////////////////////////////////////////////////
// create codec control objects
////////////////////////////////////////////////////////////////////
- BOOST_FOREACH(const std::string &db, _dbc.keys()){
+ for(const std::string &db: _dbc.keys()){
_dbc[db].codec = usrp1_codec_ctrl::make(_iface, (db == "A")? SPI_ENABLE_CODEC_A : SPI_ENABLE_CODEC_B);
const fs_path rx_codec_path = mb_path / "rx_codecs" / db;
const fs_path tx_codec_path = mb_path / "tx_codecs" / db;
@@ -284,7 +284,7 @@ usrp1_impl::usrp1_impl(const device_addr_t &device_addr){
.set(subdev_spec_t())
.add_coerced_subscriber(boost::bind(&usrp1_impl::update_tx_subdev_spec, this, _1));
- BOOST_FOREACH(const std::string &db, _dbc.keys()){
+ for(const std::string &db: _dbc.keys()){
const fs_path rx_fe_path = mb_path / "rx_frontends" / db;
_tree->create<std::complex<double> >(rx_fe_path / "dc_offset" / "value")
.set_coercer(boost::bind(&usrp1_impl::set_rx_dc_offset, this, db, _1))
@@ -349,7 +349,7 @@ usrp1_impl::usrp1_impl(const device_addr_t &device_addr){
////////////////////////////////////////////////////////////////////
// create dboard control objects
////////////////////////////////////////////////////////////////////
- BOOST_FOREACH(const std::string &db, _dbc.keys()){
+ for(const std::string &db: _dbc.keys()){
//read the dboard eeprom to extract the dboard ids
dboard_eeprom_t rx_db_eeprom, tx_db_eeprom, gdb_eeprom;
@@ -400,7 +400,7 @@ usrp1_impl::usrp1_impl(const device_addr_t &device_addr){
this->update_rates();
//reset cordic rates and their properties to zero
- BOOST_FOREACH(const std::string &name, _tree->list(mb_path / "rx_dsps")){
+ for(const std::string &name: _tree->list(mb_path / "rx_dsps")){
_tree->access<double>(mb_path / "rx_dsps" / name / "freq" / "value").set(0.0);
}
diff --git a/host/lib/usrp/usrp1/usrp1_impl.hpp b/host/lib/usrp/usrp1/usrp1_impl.hpp
index 18c5c8bd3..1aa255f8d 100644
--- a/host/lib/usrp/usrp1/usrp1_impl.hpp
+++ b/host/lib/usrp/usrp1/usrp1_impl.hpp
@@ -31,7 +31,6 @@
#include <uhd/usrp/dboard_eeprom.hpp>
#include <uhd/usrp/dboard_manager.hpp>
#include <uhd/transport/usb_zero_copy.hpp>
-#include <boost/foreach.hpp>
#include <boost/weak_ptr.hpp>
#include <complex>
@@ -158,7 +157,7 @@ private:
void enable_tx(bool enb){
_tx_enabled = enb;
_fx2_ctrl->usrp_tx_enable(enb);
- BOOST_FOREACH(const std::string &key, _dbc.keys())
+ for(const std::string &key: _dbc.keys())
{
_dbc[key].codec->enable_tx_digital(enb);
}
diff --git a/host/lib/usrp/usrp2/codec_ctrl.cpp b/host/lib/usrp/usrp2/codec_ctrl.cpp
index a0e456708..ec8bcf919 100644
--- a/host/lib/usrp/usrp2/codec_ctrl.cpp
+++ b/host/lib/usrp/usrp2/codec_ctrl.cpp
@@ -23,7 +23,6 @@
#include <uhd/utils/safe_call.hpp>
#include <uhd/exception.hpp>
#include <stdint.h>
-#include <boost/foreach.hpp>
using namespace uhd;
@@ -198,7 +197,7 @@ private:
void send_ad9777_reg(uint8_t addr){
uint16_t reg = _ad9777_regs.get_write_reg(addr);
- UHD_LOGV(always) << "send_ad9777_reg: " << std::hex << reg << std::endl;
+ UHD_LOGGER_TRACE("USRP2") << "send_ad9777_reg: " << std::hex << reg;
_spiface->write_spi(
SPI_SS_AD9777, spi_config_t::EDGE_RISE,
reg, 16
diff --git a/host/lib/usrp/usrp2/dboard_iface.cpp b/host/lib/usrp/usrp2/dboard_iface.cpp
index 1dafe5721..49ad38eaf 100644
--- a/host/lib/usrp/usrp2/dboard_iface.cpp
+++ b/host/lib/usrp/usrp2/dboard_iface.cpp
@@ -134,7 +134,7 @@ usrp2_dboard_iface::usrp2_dboard_iface(
//reset the aux dacs
_dac_regs[UNIT_RX] = ad5623_regs_t();
_dac_regs[UNIT_TX] = ad5623_regs_t();
- BOOST_FOREACH(unit_t unit, _dac_regs.keys()){
+ for(unit_t unit: _dac_regs.keys()){
_dac_regs[unit].data = 1;
_dac_regs[unit].addr = ad5623_regs_t::ADDR_ALL;
_dac_regs[unit].cmd = ad5623_regs_t::CMD_RESET;
diff --git a/host/lib/usrp/usrp2/io_impl.cpp b/host/lib/usrp/usrp2/io_impl.cpp
index 224519944..992d70835 100644
--- a/host/lib/usrp/usrp2/io_impl.cpp
+++ b/host/lib/usrp/usrp2/io_impl.cpp
@@ -23,7 +23,7 @@
#include "usrp2_regs.hpp"
#include "fw_common.h"
#include <uhd/utils/log.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/utils/tasks.hpp>
#include <uhd/exception.hpp>
#include <uhd/utils/byteswap.hpp>
@@ -209,7 +209,7 @@ void usrp2_impl::io_impl::recv_pirate_loop(
fc_mon.update_fc_condition(uhd::ntohx(fc_word32));
continue;
}
- //else UHD_MSG(often) << "metadata.event_code " << metadata.event_code << std::endl;
+ //else UHD_LOGGER_DEBUG("USRP2") << "metadata.event_code " << metadata.event_code;
async_msg_fifo.push_with_pop_on_full(metadata);
standard_async_msg_prints(metadata);
@@ -218,7 +218,7 @@ void usrp2_impl::io_impl::recv_pirate_loop(
//TODO unknown received packet, may want to print error...
}
}catch(const std::exception &e){
- UHD_MSG(error) << "Error in recv pirate loop: " << e.what() << std::endl;
+ UHD_LOGGER_ERROR("USRP2") << "Error in recv pirate loop: " << e.what() ;
}
}
}
@@ -231,7 +231,7 @@ void usrp2_impl::io_init(void){
_io_impl = UHD_PIMPL_MAKE(io_impl, ());
//init first so we dont have an access race
- BOOST_FOREACH(const std::string &mb, _mbc.keys()){
+ for(const std::string &mb: _mbc.keys()){
//init the tx xport and flow control monitor
_io_impl->tx_xports.push_back(_mbc[mb].tx_dsp_xport);
_io_impl->fc_mons.push_back(flow_control_monitor::sptr(new flow_control_monitor(
@@ -241,14 +241,14 @@ void usrp2_impl::io_init(void){
}
//allocate streamer weak ptrs containers
- BOOST_FOREACH(const std::string &mb, _mbc.keys()){
+ for(const std::string &mb: _mbc.keys()){
_mbc[mb].rx_streamers.resize(_mbc[mb].rx_dsps.size());
_mbc[mb].tx_streamers.resize(1/*known to be 1 dsp*/);
}
//create a new pirate thread for each zc if (yarr!!)
size_t index = 0;
- BOOST_FOREACH(const std::string &mb, _mbc.keys()){
+ for(const std::string &mb: _mbc.keys()){
//spawn a new pirate to plunder the recv booty
_io_impl->pirate_tasks.push_back(task::make(boost::bind(
&usrp2_impl::io_impl::recv_pirate_loop, _io_impl.get(),
@@ -261,7 +261,7 @@ void usrp2_impl::update_tick_rate(const double rate){
_io_impl->tick_rate = rate; //shadow for async msg
//update the tick rate on all existing streamers -> thread safe
- BOOST_FOREACH(const std::string &mb, _mbc.keys()){
+ for(const std::string &mb: _mbc.keys()){
for (size_t i = 0; i < _mbc[mb].rx_streamers.size(); i++){
boost::shared_ptr<sph::recv_packet_streamer> my_streamer =
boost::dynamic_pointer_cast<sph::recv_packet_streamer>(_mbc[mb].rx_streamers[i].lock());
@@ -298,15 +298,15 @@ void usrp2_impl::update_tx_samp_rate(const std::string &mb, const size_t dsp, co
}
void usrp2_impl::update_rates(void){
- BOOST_FOREACH(const std::string &mb, _mbc.keys()){
+ for(const std::string &mb: _mbc.keys()){
fs_path root = "/mboards/" + mb;
_tree->access<double>(root / "tick_rate").update();
//and now that the tick rate is set, init the host rates to something
- BOOST_FOREACH(const std::string &name, _tree->list(root / "rx_dsps")){
+ for(const std::string &name: _tree->list(root / "rx_dsps")){
_tree->access<double>(root / "rx_dsps" / name / "rate" / "value").update();
}
- BOOST_FOREACH(const std::string &name, _tree->list(root / "tx_dsps")){
+ for(const std::string &name: _tree->list(root / "tx_dsps")){
_tree->access<double>(root / "tx_dsps" / name / "rate" / "value").update();
}
}
@@ -330,7 +330,7 @@ void usrp2_impl::update_rx_subdev_spec(const std::string &which_mb, const subdev
//compute the new occupancy and resize
_mbc[which_mb].rx_chan_occ = spec.size();
size_t nchan = 0;
- BOOST_FOREACH(const std::string &mb, _mbc.keys()) nchan += _mbc[mb].rx_chan_occ;
+ for(const std::string &mb: _mbc.keys()) nchan += _mbc[mb].rx_chan_occ;
}
void usrp2_impl::update_tx_subdev_spec(const std::string &which_mb, const subdev_spec_t &spec){
@@ -346,7 +346,7 @@ void usrp2_impl::update_tx_subdev_spec(const std::string &which_mb, const subdev
//compute the new occupancy and resize
_mbc[which_mb].tx_chan_occ = spec.size();
size_t nchan = 0;
- BOOST_FOREACH(const std::string &mb, _mbc.keys()) nchan += _mbc[mb].tx_chan_occ;
+ for(const std::string &mb: _mbc.keys()) nchan += _mbc[mb].tx_chan_occ;
}
/***********************************************************************
@@ -375,10 +375,10 @@ void usrp2_impl::program_stream_dest(
//user has provided an alternative address and port for destination
if (args.args.has_key("addr") and args.args.has_key("port")){
- UHD_MSG(status) << boost::format(
- "Programming streaming destination for custom address.\n"
- "IPv4 Address: %s, UDP Port: %s\n"
- ) % args.args["addr"] % args.args["port"] << std::endl;
+ UHD_LOGGER_INFO("USRP2") << boost::format(
+ "Programming streaming destination for custom address. "
+ "IPv4 Address: %s, UDP Port: %s"
+ ) % args.args["addr"] % args.args["port"];
asio::io_service io_service;
asio::ip::udp::resolver resolver(io_service);
@@ -388,7 +388,7 @@ void usrp2_impl::program_stream_dest(
stream_ctrl.udp_port = uhd::htonx(uint32_t(endpoint.port()));
for (size_t i = 0; i < 3; i++){
- UHD_MSG(status) << "ARP attempt " << i << std::endl;
+ UHD_LOGGER_INFO("USRP2") << "ARP attempt " << i;
managed_send_buffer::sptr send_buff = xport->get_send_buff();
std::memcpy(send_buff->cast<void *>(), &stream_ctrl, sizeof(stream_ctrl));
send_buff->commit(sizeof(stream_ctrl));
@@ -398,7 +398,7 @@ void usrp2_impl::program_stream_dest(
if (recv_buff and recv_buff->size() >= sizeof(uint32_t)){
const uint32_t result = uhd::ntohx(recv_buff->cast<const uint32_t *>()[0]);
if (result == 0){
- UHD_MSG(status) << "Success! " << std::endl;
+ UHD_LOGGER_INFO("USRP2") << "Success! ";
return;
}
}
@@ -454,7 +454,7 @@ rx_streamer::sptr usrp2_impl::get_rx_stream(const uhd::stream_args_t &args_){
for (size_t chan_i = 0; chan_i < args.channels.size(); chan_i++){
const size_t chan = args.channels[chan_i];
size_t num_chan_so_far = 0;
- BOOST_FOREACH(const std::string &mb, _mbc.keys()){
+ for(const std::string &mb: _mbc.keys()){
num_chan_so_far += _mbc[mb].rx_chan_occ;
if (chan < num_chan_so_far){
const size_t dsp = chan + _mbc[mb].rx_chan_occ - num_chan_so_far;
@@ -524,7 +524,7 @@ tx_streamer::sptr usrp2_impl::get_tx_stream(const uhd::stream_args_t &args_){
const size_t chan = args.channels[chan_i];
size_t num_chan_so_far = 0;
size_t abs = 0;
- BOOST_FOREACH(const std::string &mb, _mbc.keys()){
+ for(const std::string &mb: _mbc.keys()){
num_chan_so_far += _mbc[mb].tx_chan_occ;
if (chan < num_chan_so_far){
const size_t dsp = chan + _mbc[mb].tx_chan_occ - num_chan_so_far;
diff --git a/host/lib/usrp/usrp2/n200_image_loader.cpp b/host/lib/usrp/usrp2/n200_image_loader.cpp
index c68484600..01a8faa5e 100644
--- a/host/lib/usrp/usrp2/n200_image_loader.cpp
+++ b/host/lib/usrp/usrp2/n200_image_loader.cpp
@@ -225,7 +225,7 @@ static uhd::device_addr_t n200_find(const image_loader::image_loader_args_t &ima
* this query. If the user supplied specific arguments that
* led to a USRP2, throw an error.
*/
- BOOST_FOREACH(const uhd::device_addr_t &dev, found){
+ for(const uhd::device_addr_t &dev: found){
rev_xport = udp_simple::make_connected(
dev.get("addr"),
BOOST_STRINGIZE(N200_UDP_FW_UPDATE_PORT)
@@ -257,7 +257,7 @@ static uhd::device_addr_t n200_find(const image_loader::image_loader_args_t &ima
std::string err_msg = "Could not resolve given args to a single N-Series device.\n"
"Applicable devices:\n";
- BOOST_FOREACH(const uhd::device_addr_t &dev, n200_found){
+ for(const uhd::device_addr_t &dev: n200_found){
err_msg += str(boost::format("* %s (addr=%s)\n")
% dev.get("hw_rev")
% dev.get("addr"));
diff --git a/host/lib/usrp/usrp2/usrp2_fifo_ctrl.cpp b/host/lib/usrp/usrp2/usrp2_fifo_ctrl.cpp
index 9cd3afc6c..cdd26bbc4 100644
--- a/host/lib/usrp/usrp2/usrp2_fifo_ctrl.cpp
+++ b/host/lib/usrp/usrp2/usrp2_fifo_ctrl.cpp
@@ -17,7 +17,7 @@
#include "usrp2_regs.hpp"
#include <uhd/exception.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/safe_call.hpp>
#include <uhd/transport/vrt_if_packet.hpp>
#include "usrp2_fifo_ctrl.hpp"
diff --git a/host/lib/usrp/usrp2/usrp2_iface.cpp b/host/lib/usrp/usrp2/usrp2_iface.cpp
index 021f0e3e5..ce547bad0 100644
--- a/host/lib/usrp/usrp2/usrp2_iface.cpp
+++ b/host/lib/usrp/usrp2/usrp2_iface.cpp
@@ -20,14 +20,13 @@
#include "fw_common.h"
#include "usrp2_iface.hpp"
#include <uhd/exception.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/paths.hpp>
#include <uhd/utils/tasks.hpp>
#include <uhd/utils/paths.hpp>
#include <uhd/utils/safe_call.hpp>
#include <uhd/types/dict.hpp>
#include <boost/thread.hpp>
-#include <boost/foreach.hpp>
#include <boost/asio.hpp> //used for htonl and ntohl
#include <boost/assign/list_of.hpp>
#include <boost/format.hpp>
@@ -269,10 +268,10 @@ public:
return ctrl_send_and_recv_internal(out_data, lo, hi, CTRL_RECV_TIMEOUT/CTRL_RECV_RETRIES);
}
catch(const timeout_error &e){
- UHD_MSG(error)
+ UHD_LOGGER_ERROR("USRP2")
<< "Control packet attempt " << i
<< ", sequence number " << _ctrl_seq_num
- << ":\n" << e.what() << std::endl;
+ << ":\n" << e.what() ;
}
}
throw uhd::runtime_error("link dead: timeout waiting for control packet ACK");
diff --git a/host/lib/usrp/usrp2/usrp2_impl.cpp b/host/lib/usrp/usrp2/usrp2_impl.cpp
index ee2434fab..78a9acb72 100644
--- a/host/lib/usrp/usrp2/usrp2_impl.cpp
+++ b/host/lib/usrp/usrp2/usrp2_impl.cpp
@@ -19,7 +19,7 @@
#include "fw_common.h"
#include "apply_corrections.hpp"
#include <uhd/utils/log.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/exception.hpp>
#include <uhd/transport/if_addrs.hpp>
#include <uhd/transport/udp_zero_copy.hpp>
@@ -29,7 +29,6 @@
#include <uhd/utils/byteswap.hpp>
#include <uhd/utils/safe_call.hpp>
#include <boost/format.hpp>
-#include <boost/foreach.hpp>
#include <boost/lexical_cast.hpp>
#include <boost/bind.hpp>
#include <boost/assign/list_of.hpp>
@@ -54,7 +53,7 @@ device_addrs_t usrp2_find(const device_addr_t &hint_){
if (hints.size() > 1){
device_addrs_t found_devices;
std::string error_msg;
- BOOST_FOREACH(const device_addr_t &hint_i, hints){
+ for(const device_addr_t &hint_i: hints){
device_addrs_t found_devices_i = usrp2_find(hint_i);
if (found_devices_i.size() != 1) error_msg += str(boost::format(
"Could not resolve device hint \"%s\" to a single device."
@@ -81,7 +80,7 @@ device_addrs_t usrp2_find(const device_addr_t &hint_){
//if no address was specified, send a broadcast on each interface
if (not hint.has_key("addr")){
- BOOST_FOREACH(const if_addrs_t &if_addrs, get_if_addrs()){
+ for(const if_addrs_t &if_addrs: get_if_addrs()){
//avoid the loopback device
if (if_addrs.inet == asio::ip::address_v4::loopback().to_string()) continue;
@@ -106,7 +105,7 @@ device_addrs_t usrp2_find(const device_addr_t &hint_){
udp_transport = udp_simple::make_broadcast(hint["addr"], BOOST_STRINGIZE(USRP2_UDP_CTRL_PORT));
}
catch(const std::exception &e){
- UHD_MSG(error) << boost::format("Cannot open UDP transport on %s\n%s") % hint["addr"] % e.what() << std::endl;
+ UHD_LOGGER_ERROR("USRP2") << boost::format("Cannot open UDP transport on %s\n%s") % hint["addr"] % e.what() ;
return usrp2_addrs; //dont throw, but return empty address so caller can insert
}
@@ -120,11 +119,11 @@ device_addrs_t usrp2_find(const device_addr_t &hint_){
}
catch(const std::exception &ex)
{
- UHD_MSG(error) << "USRP2 Network discovery error " << ex.what() << std::endl;
+ UHD_LOGGER_ERROR("USRP2") << "USRP2 Network discovery error " << ex.what() ;
}
catch(...)
{
- UHD_MSG(error) << "USRP2 Network discovery unknown error " << std::endl;
+ UHD_LOGGER_ERROR("USRP2") << "USRP2 Network discovery unknown error " ;
}
//loop and recieve until the timeout
@@ -284,7 +283,7 @@ static zero_copy_if::sptr make_xport(
//only copy hints that contain the filter word
device_addr_t filtered_hints;
- BOOST_FOREACH(const std::string &key, hints.keys()){
+ for(const std::string &key: hints.keys()){
if (key.find(filter) == std::string::npos) continue;
filtered_hints[key] = hints[key];
}
@@ -319,7 +318,7 @@ static zero_copy_if::sptr make_xport(
usrp2_impl::usrp2_impl(const device_addr_t &_device_addr) :
device_addr(_device_addr)
{
- UHD_MSG(status) << "Opening a USRP2/N-Series device..." << std::endl;
+ UHD_LOGGER_INFO("USRP2") << "Opening a USRP2/N-Series device...";
//setup the dsp transport hints (default to a large recv buff)
if (not device_addr.has_key("recv_buff_size")){
@@ -356,8 +355,8 @@ usrp2_impl::usrp2_impl(const device_addr_t &_device_addr) :
device_addr["recv_frame_size"] = boost::lexical_cast<std::string>(mtu.recv_mtu);
device_addr["send_frame_size"] = boost::lexical_cast<std::string>(mtu.send_mtu);
- UHD_MSG(status) << boost::format("Current recv frame size: %d bytes") % mtu.recv_mtu << std::endl;
- UHD_MSG(status) << boost::format("Current send frame size: %d bytes") % mtu.send_mtu << std::endl;
+ UHD_LOGGER_INFO("USRP2") << boost::format("Current recv frame size: %d bytes") % mtu.recv_mtu;
+ UHD_LOGGER_INFO("USRP2") << boost::format("Current send frame size: %d bytes") % mtu.send_mtu;
}
catch(const uhd::not_implemented_error &){
//just ignore this error, makes older fw work...
@@ -412,7 +411,7 @@ usrp2_impl::usrp2_impl(const device_addr_t &_device_addr) :
// handle case where the MB EEPROM is not programmed
if (fpga_major == USRP2_FPGA_COMPAT_NUM or fpga_major == N200_FPGA_COMPAT_NUM)
{
- UHD_MSG(warning) << "Unable to identify device - assuming USRP2/N-Series device" << std::endl;
+ UHD_LOGGER_WARNING("USRP2") << "Unable to identify device - assuming USRP2/N-Series device" ;
expected_fpga_compat_num = fpga_major;
}
}
@@ -433,19 +432,19 @@ usrp2_impl::usrp2_impl(const device_addr_t &_device_addr) :
////////////////////////////////////////////////////////////////
// construct transports for RX and TX DSPs
////////////////////////////////////////////////////////////////
- UHD_LOG << "Making transport for RX DSP0..." << std::endl;
+ UHD_LOGGER_TRACE("USRP2") << "Making transport for RX DSP0..." ;
_mbc[mb].rx_dsp_xports.push_back(make_xport(
addr, BOOST_STRINGIZE(USRP2_UDP_RX_DSP0_PORT), device_args_i, "recv"
));
- UHD_LOG << "Making transport for RX DSP1..." << std::endl;
+ UHD_LOGGER_TRACE("USRP2") << "Making transport for RX DSP1..." ;
_mbc[mb].rx_dsp_xports.push_back(make_xport(
addr, BOOST_STRINGIZE(USRP2_UDP_RX_DSP1_PORT), device_args_i, "recv"
));
- UHD_LOG << "Making transport for TX DSP0..." << std::endl;
+ UHD_LOGGER_TRACE("USRP2") << "Making transport for TX DSP0..." ;
_mbc[mb].tx_dsp_xport = make_xport(
addr, BOOST_STRINGIZE(USRP2_UDP_TX_DSP0_PORT), device_args_i, "send"
);
- UHD_LOG << "Making transport for Control..." << std::endl;
+ UHD_LOGGER_TRACE("USRP2") << "Making transport for Control..." ;
_mbc[mb].fifo_ctrl_xport = make_xport(
addr, BOOST_STRINGIZE(USRP2_UDP_FIFO_CRTL_PORT), device_addr_t(), ""
);
@@ -536,18 +535,18 @@ usrp2_impl::usrp2_impl(const device_addr_t &_device_addr) :
//otherwise if not disabled, look for the internal GPSDO
if (_mbc[mb].iface->peekfw(U2_FW_REG_HAS_GPSDO) != dont_look_for_gpsdo)
{
- UHD_MSG(status) << "Detecting internal GPSDO.... " << std::flush;
+ UHD_LOGGER_INFO("USRP2") << "Detecting internal GPSDO.... ";
try{
_mbc[mb].gps = gps_ctrl::make(udp_simple::make_uart(udp_simple::make_connected(
addr, BOOST_STRINGIZE(USRP2_UDP_UART_GPS_PORT)
)));
}
catch(std::exception &e){
- UHD_MSG(error) << "An error occurred making GPSDO control: " << e.what() << std::endl;
+ UHD_LOGGER_ERROR("USRP2") << "An error occurred making GPSDO control: " << e.what() ;
}
if (_mbc[mb].gps and _mbc[mb].gps->gps_detected())
{
- BOOST_FOREACH(const std::string &name, _mbc[mb].gps->get_sensors())
+ for(const std::string &name: _mbc[mb].gps->get_sensors())
{
_tree->create<sensor_value_t>(mb_path / "sensors" / name)
.set_publisher(boost::bind(&gps_ctrl::get_sensor, _mbc[mb].gps, name));
@@ -743,12 +742,12 @@ usrp2_impl::usrp2_impl(const device_addr_t &_device_addr) :
//bind frontend corrections to the dboard freq props
const fs_path db_tx_fe_path = mb_path / "dboards" / "A" / "tx_frontends";
- BOOST_FOREACH(const std::string &name, _tree->list(db_tx_fe_path)){
+ for(const std::string &name: _tree->list(db_tx_fe_path)){
_tree->access<double>(db_tx_fe_path / name / "freq" / "value")
.add_coerced_subscriber(boost::bind(&usrp2_impl::set_tx_fe_corrections, this, mb, _1));
}
const fs_path db_rx_fe_path = mb_path / "dboards" / "A" / "rx_frontends";
- BOOST_FOREACH(const std::string &name, _tree->list(db_rx_fe_path)){
+ for(const std::string &name: _tree->list(db_rx_fe_path)){
_tree->access<double>(db_rx_fe_path / name / "freq" / "value")
.add_coerced_subscriber(boost::bind(&usrp2_impl::set_rx_fe_corrections, this, mb, _1));
}
@@ -759,14 +758,14 @@ usrp2_impl::usrp2_impl(const device_addr_t &_device_addr) :
//do some post-init tasks
this->update_rates();
- BOOST_FOREACH(const std::string &mb, _mbc.keys()){
+ for(const std::string &mb: _mbc.keys()){
fs_path root = "/mboards/" + mb;
//reset cordic rates and their properties to zero
- BOOST_FOREACH(const std::string &name, _tree->list(root / "rx_dsps")){
+ for(const std::string &name: _tree->list(root / "rx_dsps")){
_tree->access<double>(root / "rx_dsps" / name / "freq" / "value").set(0.0);
}
- BOOST_FOREACH(const std::string &name, _tree->list(root / "tx_dsps")){
+ for(const std::string &name: _tree->list(root / "tx_dsps")){
_tree->access<double>(root / "tx_dsps" / name / "freq" / "value").set(0.0);
}
@@ -778,7 +777,7 @@ usrp2_impl::usrp2_impl(const device_addr_t &_device_addr) :
//GPS installed: use external ref, time, and init time spec
if (_mbc[mb].gps and _mbc[mb].gps->gps_detected()){
_mbc[mb].time64->enable_gpsdo();
- UHD_MSG(status) << "Setting references to the internal GPSDO" << std::endl;
+ UHD_LOGGER_INFO("USRP2") << "Setting references to the internal GPSDO" ;
_tree->access<std::string>(root / "time_source/value").set("gpsdo");
_tree->access<std::string>(root / "clock_source/value").set("gpsdo");
}
@@ -787,7 +786,7 @@ usrp2_impl::usrp2_impl(const device_addr_t &_device_addr) :
}
usrp2_impl::~usrp2_impl(void){UHD_SAFE_CALL(
- BOOST_FOREACH(const std::string &mb, _mbc.keys()){
+ for(const std::string &mb: _mbc.keys()){
_mbc[mb].tx_dsp->set_updates(0, 0);
}
)}
diff --git a/host/lib/usrp/usrp_c.cpp b/host/lib/usrp/usrp_c.cpp
index e97be2abe..3d50bd8be 100644
--- a/host/lib/usrp/usrp_c.cpp
+++ b/host/lib/usrp/usrp_c.cpp
@@ -23,7 +23,6 @@
#include <uhd/error.h>
#include <uhd/usrp/usrp.h>
-#include <boost/foreach.hpp>
#include <boost/thread/mutex.hpp>
#include <string.h>
@@ -259,7 +258,7 @@ uhd_error uhd_usrp_find(
uhd::device_addrs_t devs = uhd::device::find(std::string(args), uhd::device::USRP);
(*strings_out)->string_vector_cpp.clear();
- BOOST_FOREACH(const uhd::device_addr_t &dev, devs){
+ for(const uhd::device_addr_t &dev: devs){
(*strings_out)->string_vector_cpp.push_back(dev.to_string());
}
)
diff --git a/host/lib/usrp/x300/x300_adc_ctrl.cpp b/host/lib/usrp/x300/x300_adc_ctrl.cpp
index fed2ffaf7..d9d0cb168 100644
--- a/host/lib/usrp/x300/x300_adc_ctrl.cpp
+++ b/host/lib/usrp/x300/x300_adc_ctrl.cpp
@@ -21,7 +21,6 @@
#include <uhd/utils/log.hpp>
#include <uhd/utils/safe_call.hpp>
#include <uhd/exception.hpp>
-#include <boost/foreach.hpp>
using namespace uhd;
diff --git a/host/lib/usrp/x300/x300_adc_dac_utils.cpp b/host/lib/usrp/x300/x300_adc_dac_utils.cpp
index a6815286e..05228a309 100644
--- a/host/lib/usrp/x300/x300_adc_dac_utils.cpp
+++ b/host/lib/usrp/x300/x300_adc_dac_utils.cpp
@@ -100,7 +100,7 @@ void x300_impl::self_test_adcs(mboard_members_t& mb, uint32_t ramp_time_ms) {
void x300_impl::extended_adc_test(mboard_members_t& mb, double duration_s)
{
static const size_t SECS_PER_ITER = 5;
- UHD_MSG(status) << boost::format("Running Extended ADC Self-Test (Duration=%.0fs, %ds/iteration)...\n")
+ UHD_LOGGER_INFO("X300") << boost::format("Running Extended ADC Self-Test (Duration=%.0fs, %ds/iteration)...")
% duration_s % SECS_PER_ITER;
size_t num_iters = static_cast<size_t>(ceil(duration_s/SECS_PER_ITER));
@@ -112,18 +112,18 @@ void x300_impl::extended_adc_test(mboard_members_t& mb, double duration_s)
time_strm.imbue(std::locale(std::locale::classic(), facet));
time_strm << boost::posix_time::second_clock::local_time();
//Run self-test
- UHD_MSG(status) << boost::format("-- [%s] Iteration %06d... ") % time_strm.str() % (iter+1);
+ UHD_LOGGER_INFO("X300") << boost::format("Running ADC Test Iteration %06d... ") % (iter+1);
try {
self_test_adcs(mb, SECS_PER_ITER*1000);
- UHD_MSG(status) << "passed" << std::endl;
+ UHD_LOGGER_INFO("X300") << boost::format("ADC Test Iteration %06d passed") % (iter+1);
} catch(std::exception &e) {
num_failures++;
- UHD_MSG(status) << e.what() << std::endl;
+ UHD_LOGGER_ERROR("X300") << e.what();
}
}
if (num_failures == 0) {
- UHD_MSG(status) << "Extended ADC Self-Test PASSED\n";
+ UHD_LOGGER_INFO("X300") << "Extended ADC Self-Test PASSED";
} else {
throw uhd::runtime_error(
(boost::format("Extended ADC Self-Test FAILED!!! (%d/%d failures)\n") % num_failures % num_iters).str());
@@ -137,7 +137,7 @@ void x300_impl::extended_adc_test(mboard_members_t& mb, double duration_s)
void x300_impl::self_cal_adc_capture_delay(mboard_members_t& mb, const size_t radio_i, bool print_status)
{
radio_perifs_t& perif = mb.radio_perifs[radio_i];
- if (print_status) UHD_MSG(status) << "Running ADC capture delay self-cal..." << std::flush;
+ if (print_status) UHD_LOGGER_INFO("X300") << "Running ADC capture delay self-cal...";
static const uint32_t NUM_DELAY_STEPS = 32; //The IDELAYE2 element has 32 steps
static const uint32_t NUM_RETRIES = 2; //Retry self-cal if it fails in warmup situations
@@ -200,7 +200,7 @@ void x300_impl::self_cal_adc_capture_delay(mboard_members_t& mb, const size_t ra
}
}
}
- //UHD_MSG(status) << (boost::format("CapTap=%d, Error=%d\n") % dly_tap % err_code);
+ //UHD_LOGGER_INFO("X300") << (boost::format("CapTap=%d, Error=%d") % dly_tap % err_code);
}
//Retry the self-cal if it fails
@@ -230,13 +230,13 @@ void x300_impl::self_cal_adc_capture_delay(mboard_members_t& mb, const size_t ra
if (print_status) {
double tap_delay = (1.0e12 / mb.clock->get_master_clock_rate()) / (2*32); //in ps
- UHD_MSG(status) << boost::format(" done (Tap=%d, Window=%d, TapDelay=%.3fps, Iter=%d)\n") % ideal_tap % (win_stop-win_start) % tap_delay % iter;
+ UHD_LOGGER_INFO("X300") << boost::format(" ADC capture delay self-cal done (Tap=%d, Window=%d, TapDelay=%.3fps, Iter=%d)") % ideal_tap % (win_stop-win_start) % tap_delay % iter;
}
}
double x300_impl::self_cal_adc_xfer_delay(mboard_members_t& mb, bool apply_delay)
{
- UHD_MSG(status) << "Running ADC transfer delay self-cal: " << std::flush;
+ UHD_LOGGER_INFO("X300") << "Running ADC transfer delay self-cal: ";
//Effective resolution of the self-cal.
static const size_t NUM_DELAY_STEPS = 100;
@@ -246,7 +246,6 @@ double x300_impl::self_cal_adc_xfer_delay(mboard_members_t& mb, bool apply_delay
double delay_range = 2 * master_clk_period;
double delay_incr = delay_range / NUM_DELAY_STEPS;
- UHD_MSG(status) << "Measuring..." << std::flush;
double cached_clk_delay = mb.clock->get_clock_delay(X300_CLOCK_WHICH_ADC0);
double fpga_clk_delay = mb.clock->get_clock_delay(X300_CLOCK_WHICH_FPGA);
@@ -292,7 +291,7 @@ double x300_impl::self_cal_adc_xfer_delay(mboard_members_t& mb, bool apply_delay
err_code += 100; //Increment error code by 100 to indicate no lock
}
}
- //UHD_MSG(status) << (boost::format("XferDelay=%fns, Error=%d\n") % delay % err_code);
+ //UHD_LOGGER_INFO("X300") << (boost::format("XferDelay=%fns, Error=%d") % delay % err_code);
results.push_back(std::pair<double,bool>(delay, err_code==0));
}
@@ -344,7 +343,6 @@ double x300_impl::self_cal_adc_xfer_delay(mboard_members_t& mb, bool apply_delay
}
if (apply_delay) {
- UHD_MSG(status) << "Validating..." << std::flush;
//Apply delay
win_center = mb.clock->set_clock_delay(X300_CLOCK_WHICH_ADC0, win_center); //Sets ADC0 and ADC1
wait_for_clk_locked(mb, fw_regmap_t::clk_status_reg_t::LMK_LOCK, 0.1);
@@ -360,7 +358,7 @@ double x300_impl::self_cal_adc_xfer_delay(mboard_members_t& mb, bool apply_delay
mb.radio_perifs[r].adc->set_test_word("normal", "normal");
mb.radio_perifs[r].regmap->misc_outs_reg.write(radio_regmap_t::misc_outs_reg_t::ADC_CHECKER_ENABLED, 0);
}
- UHD_MSG(status) << (boost::format(" done (FPGA->ADC=%.3fns%s, Window=%.3fns)\n") %
+ UHD_LOGGER_INFO("X300") << (boost::format("ADC transfer delay self-cal done (FPGA->ADC=%.3fns%s, Window=%.3fns)") %
(win_center-fpga_clk_delay) % (cycle_slip?" +cyc":"") % win_length);
return win_center;
diff --git a/host/lib/usrp/x300/x300_clock_ctrl.cpp b/host/lib/usrp/x300/x300_clock_ctrl.cpp
index b8b100ceb..7307bcc66 100644
--- a/host/lib/usrp/x300/x300_clock_ctrl.cpp
+++ b/host/lib/usrp/x300/x300_clock_ctrl.cpp
@@ -303,9 +303,8 @@ public:
//be close to what the client requested.
}
- UHD_LOGV(often)
- << boost::format("x300_clock_ctrl::set_clock_delay: Which=%d, Requested=%f, Digital Taps=%d, Half Shift=%d, Analog Delay=%d (%s), Coerced Delay=%fns"
- ) % which % delay_ns % ddly_value % (half_shift_en?"ON":"OFF") % ((int)adly_value) % (adly_en?"ON":"OFF") % coerced_delay << std::endl;
+ UHD_LOG_DEBUG("X300", boost::format("x300_clock_ctrl::set_clock_delay: Which=%d, Requested=%f, Digital Taps=%d, Half Shift=%d, Analog Delay=%d (%s), Coerced Delay=%fns"
+ ) % which % delay_ns % ddly_value % (half_shift_en?"ON":"OFF") % ((int)adly_value) % (adly_en?"ON":"OFF") % coerced_delay)
//Apply settings
switch (which)
diff --git a/host/lib/usrp/x300/x300_dac_ctrl.cpp b/host/lib/usrp/x300/x300_dac_ctrl.cpp
index 162eeb143..51b93662c 100644
--- a/host/lib/usrp/x300/x300_dac_ctrl.cpp
+++ b/host/lib/usrp/x300/x300_dac_ctrl.cpp
@@ -18,11 +18,10 @@
#include "x300_dac_ctrl.hpp"
#include "x300_regs.hpp"
#include <uhd/types/time_spec.hpp>
-#include <uhd/utils/msg.hpp>
#include <uhd/utils/log.hpp>
#include <uhd/utils/safe_call.hpp>
#include <uhd/exception.hpp>
-#include <boost/foreach.hpp>
+#include <boost/format.hpp>
#include <boost/thread/thread.hpp> //sleep
#define X300_DAC_FRONTEND_SYNC_FAILURE_FATAL
@@ -243,7 +242,7 @@ public:
if (failure_is_fatal) {
throw uhd::runtime_error(msg);
} else {
- UHD_MSG(warning) << msg;
+ UHD_LOGGER_WARNING("X300") << msg;
}
}
}
diff --git a/host/lib/usrp/x300/x300_dboard_iface.cpp b/host/lib/usrp/x300/x300_dboard_iface.cpp
index 092c888b0..e492fe2b3 100644
--- a/host/lib/usrp/x300/x300_dboard_iface.cpp
+++ b/host/lib/usrp/x300/x300_dboard_iface.cpp
@@ -34,7 +34,7 @@ x300_dboard_iface::x300_dboard_iface(const x300_dboard_iface_config_t &config):
//reset the aux dacs
_dac_regs[UNIT_RX] = ad5623_regs_t();
_dac_regs[UNIT_TX] = ad5623_regs_t();
- BOOST_FOREACH(unit_t unit, _dac_regs.keys())
+ for(unit_t unit: _dac_regs.keys())
{
_dac_regs[unit].data = 1;
_dac_regs[unit].addr = ad5623_regs_t::ADDR_ALL;
diff --git a/host/lib/usrp/x300/x300_fw_ctrl.cpp b/host/lib/usrp/x300/x300_fw_ctrl.cpp
index d149dadf3..38b7eb139 100644
--- a/host/lib/usrp/x300/x300_fw_ctrl.cpp
+++ b/host/lib/usrp/x300/x300_fw_ctrl.cpp
@@ -19,7 +19,7 @@
#include "x300_fw_common.h"
#include <uhd/transport/udp_simple.hpp>
#include <uhd/utils/byteswap.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/exception.hpp>
#include <boost/format.hpp>
#include <boost/thread/mutex.hpp>
@@ -28,6 +28,7 @@
#include "x300_regs.hpp"
#include <boost/date_time/posix_time/posix_time.hpp>
#include <boost/thread/thread.hpp>
+#include <boost/lexical_cast.hpp>
using namespace uhd;
using namespace uhd::niusrprio;
@@ -60,8 +61,8 @@ public:
catch(const uhd::io_error &ex)
{
std::string error_msg = str(boost::format(
- "x300 fw communication failure #%u\n%s") % i % ex.what());
- if (errors) UHD_MSG(error) << error_msg << std::endl;
+ "%s: x300 fw communication failure #%u\n%s") % __loc_info() % i % ex.what());
+ if (errors) UHD_LOGGER_ERROR("X300") << error_msg ;
if (i == num_retries) throw uhd::io_error(error_msg);
}
}
@@ -80,8 +81,8 @@ public:
catch(const uhd::io_error &ex)
{
std::string error_msg = str(boost::format(
- "x300 fw communication failure #%u\n%s") % i % ex.what());
- if (errors) UHD_MSG(error) << error_msg << std::endl;
+ "%s: x300 fw communication failure #%u\n%s") % __loc_info() % i % ex.what());
+ if (errors) UHD_LOGGER_ERROR("X300") << error_msg ;
if (i == num_retries) throw uhd::io_error(error_msg);
}
}
@@ -94,6 +95,7 @@ protected:
virtual void __poke32(const wb_addr_type addr, const uint32_t data) = 0;
virtual uint32_t __peek32(const wb_addr_type addr) = 0;
virtual void __flush() = 0;
+ virtual std::string __loc_info() = 0;
boost::mutex reg_access;
};
@@ -182,6 +184,11 @@ protected:
while (udp->recv(boost::asio::buffer(buff), 0.0)){} //flush
}
+ virtual std::string __loc_info(void)
+ {
+ return udp->get_send_addr();
+ }
+
private:
uhd::transport::udp_simple::sptr udp;
size_t seq;
@@ -290,6 +297,11 @@ protected:
__peek32(0);
}
+ virtual std::string __loc_info(void)
+ {
+ return boost::lexical_cast<std::string>(_drv_proxy->get_interface_num());
+ }
+
private:
niriok_proxy::sptr _drv_proxy;
static const uint32_t READ_TIMEOUT_IN_MS = 100;
diff --git a/host/lib/usrp/x300/x300_fw_uart.cpp b/host/lib/usrp/x300/x300_fw_uart.cpp
index a2cbcc908..83a564997 100644
--- a/host/lib/usrp/x300/x300_fw_uart.cpp
+++ b/host/lib/usrp/x300/x300_fw_uart.cpp
@@ -18,11 +18,10 @@
#include "x300_impl.hpp"
#include <uhd/types/wb_iface.hpp>
#include "x300_regs.hpp"
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/types/serial.hpp>
#include <uhd/exception.hpp>
#include <boost/format.hpp>
-#include <boost/foreach.hpp>
#include <boost/thread/thread.hpp>
using namespace uhd;
@@ -65,7 +64,7 @@ struct x300_uart_iface : uart_iface
void write_uart(const std::string &buff)
{
boost::mutex::scoped_lock(_write_mutex);
- BOOST_FOREACH(const char ch, buff)
+ for(const char ch: buff)
{
this->putchar(ch);
}
diff --git a/host/lib/usrp/x300/x300_image_loader.cpp b/host/lib/usrp/x300/x300_image_loader.cpp
index f08b21f9b..e8c2a1329 100644
--- a/host/lib/usrp/x300/x300_image_loader.cpp
+++ b/host/lib/usrp/x300/x300_image_loader.cpp
@@ -167,7 +167,7 @@ static void x300_setup_session(x300_session_t &session,
std::string err_msg = "Could not resolve given args to a single X-Series device.\n"
"Applicable devices:\n";
- BOOST_FOREACH(const uhd::device_addr_t &dev, devs){
+ for(const uhd::device_addr_t &dev: devs){
std::string identifier = dev.has_key("addr") ? "addr"
: "resource";
diff --git a/host/lib/usrp/x300/x300_impl.cpp b/host/lib/usrp/x300/x300_impl.cpp
index 71cb7f341..4601b2789 100644
--- a/host/lib/usrp/x300/x300_impl.cpp
+++ b/host/lib/usrp/x300/x300_impl.cpp
@@ -23,12 +23,11 @@
#include <boost/algorithm/string.hpp>
#include <boost/asio.hpp>
#include <uhd/utils/static.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/paths.hpp>
#include <uhd/utils/safe_call.hpp>
#include <uhd/usrp/subdev_spec.hpp>
#include <uhd/transport/if_addrs.hpp>
-#include <boost/foreach.hpp>
#include <boost/bind.hpp>
#include <boost/make_shared.hpp>
#include <boost/functional/hash.hpp>
@@ -183,7 +182,7 @@ static device_addrs_t x300_find_pcie(const device_addr_t &hint, bool explicit_qu
nirio_status status = niusrprio_session::enumerate(rpc_port_name, dev_info_vtr);
if (explicit_query) nirio_status_to_exception(status, "x300_find_pcie: Error enumerating NI-RIO devices.");
- BOOST_FOREACH(niusrprio_session::device_info &dev_info, dev_info_vtr)
+ for(niusrprio_session::device_info &dev_info: dev_info_vtr)
{
device_addr_t new_addr;
new_addr["type"] = "x300";
@@ -278,7 +277,7 @@ device_addrs_t x300_find(const device_addr_t &hint_)
{
device_addrs_t found_devices;
std::string error_msg;
- BOOST_FOREACH(const device_addr_t &hint_i, hints)
+ for(const device_addr_t &hint_i: hints)
{
device_addrs_t found_devices_i = x300_find(hint_i);
if (found_devices_i.size() != 1) error_msg += str(boost::format(
@@ -310,11 +309,11 @@ device_addrs_t x300_find(const device_addr_t &hint_)
}
catch(const std::exception &ex)
{
- UHD_MSG(error) << "X300 Network discovery error " << ex.what() << std::endl;
+ UHD_LOGGER_ERROR("X300") << "X300 Network discovery error " << ex.what() ;
}
catch(...)
{
- UHD_MSG(error) << "X300 Network discovery unknown error " << std::endl;
+ UHD_LOGGER_ERROR("X300") << "X300 Network discovery unknown error " ;
}
return reply_addrs;
}
@@ -322,7 +321,7 @@ device_addrs_t x300_find(const device_addr_t &hint_)
if (!hint.has_key("resource"))
{
//otherwise, no address was specified, send a broadcast on each interface
- BOOST_FOREACH(const if_addrs_t &if_addrs, get_if_addrs())
+ for(const if_addrs_t &if_addrs: get_if_addrs())
{
//avoid the loopback device
if (if_addrs.inet == asio::ip::address_v4::loopback().to_string()) continue;
@@ -358,7 +357,7 @@ UHD_STATIC_BLOCK(register_x300_device)
static void x300_load_fw(wb_iface::sptr fw_reg_ctrl, const std::string &file_name)
{
- UHD_MSG(status) << "Loading firmware " << file_name << std::flush;
+ UHD_LOGGER_INFO("X300") << "Loading firmware " << file_name;
//load file into memory
std::ifstream fw_file(file_name.c_str());
@@ -373,28 +372,52 @@ static void x300_load_fw(wb_iface::sptr fw_reg_ctrl, const std::string &file_nam
//@TODO: FIXME: Since x300_ctrl_iface acks each write and traps exceptions, the first try for the last word
// written will print an error because it triggers a FW reload and fails to reply.
fw_reg_ctrl->poke32(SR_ADDR(BOOT_LDR_BASE, BL_DATA), uhd::byteswap(fw_file_buff[i/sizeof(uint32_t)]));
- if ((i & 0x1fff) == 0) UHD_MSG(status) << "." << std::flush;
}
//Wait for fimrware to reboot. 3s is an upper bound
boost::this_thread::sleep(boost::posix_time::milliseconds(3000));
- UHD_MSG(status) << " done!" << std::endl;
+ UHD_LOGGER_INFO("X300") << "Firmware loaded!" ;
}
-x300_impl::x300_impl(const uhd::device_addr_t &dev_addr)
+x300_impl::x300_impl(const uhd::device_addr_t &dev_addr)
: device3_impl()
, _sid_framer(0)
{
- UHD_MSG(status) << "X300 initialization sequence..." << std::endl;
+ UHD_LOGGER_INFO("X300") << "X300 initialization sequence...";
_ignore_cal_file = dev_addr.has_key("ignore-cal-file");
_tree->create<std::string>("/name").set("X-Series Device");
const device_addrs_t device_args = separate_device_addr(dev_addr);
_mb.resize(device_args.size());
- for (size_t i = 0; i < device_args.size(); i++)
+
+ // Serialize the initialization process
+ if (dev_addr.has_key("serialize_init") or device_args.size() == 1) {
+ for (size_t i = 0; i < device_args.size(); i++)
+ {
+ this->setup_mb(i, device_args[i]);
+ }
+ return;
+ }
+
+
+ // Initialize groups of USRPs in parallel
+ size_t total_usrps = device_args.size();
+ size_t num_usrps = 0;
+ while (num_usrps < total_usrps)
{
- this->setup_mb(i, device_args[i]);
+ size_t init_usrps = std::min(total_usrps - num_usrps, X300_MAX_INIT_THREADS);
+ boost::thread_group setup_threads;
+ for (size_t i = 0; i < init_usrps; i++)
+ {
+ size_t index = num_usrps + i;
+ setup_threads.create_thread(
+ boost::bind(&x300_impl::setup_mb, this, index, device_args[index])
+ );
+ }
+ setup_threads.join_all();
+ num_usrps += init_usrps;
}
+
}
void x300_impl::mboard_members_t::discover_eth(
@@ -412,15 +435,15 @@ void x300_impl::mboard_members_t::discover_eth(
// Show a warning if there exists duplicate addresses in the mboard eeprom
if (std::find(mb_eeprom_addrs.begin(), mb_eeprom_addrs.end(), mb_eeprom[key]) != mb_eeprom_addrs.end()) {
- UHD_MSG(warning) << str(boost::format(
+ UHD_LOGGER_WARNING("X300") << str(boost::format(
"Duplicate IP address %s found in mboard EEPROM. "
"Device may not function properly.\nView and reprogram the values "
- "using the usrp_burn_mb_eeprom utility.\n") % mb_eeprom[key]);
+ "using the usrp_burn_mb_eeprom utility.") % mb_eeprom[key]);
}
mb_eeprom_addrs.push_back(mb_eeprom[key]);
}
- BOOST_FOREACH(const std::string& addr, ip_addrs) {
+ for(const std::string& addr: ip_addrs) {
x300_eth_conn_t conn_iface;
conn_iface.addr = addr;
conn_iface.type = X300_IFACE_NONE;
@@ -442,7 +465,7 @@ void x300_impl::mboard_members_t::discover_eth(
// Check default IP addresses if we couldn't
// determine the IP from the mboard eeprom
if (conn_iface.type == X300_IFACE_NONE) {
- UHD_MSG(warning) << str(boost::format(
+ UHD_LOGGER_WARNING("X300") << str(boost::format(
"Address %s not found in mboard EEPROM. Address may be wrong or "
"the EEPROM may be corrupt.\n Attempting to continue with default "
"IP addresses.\n") % conn_iface.addr
@@ -505,6 +528,14 @@ void x300_impl::setup_mb(const size_t mb_i, const uhd::device_addr_t &dev_addr)
mboard_members_t &mb = _mb[mb_i];
mb.initialization_done = false;
+ const std::string thread_id(
+ boost::lexical_cast<std::string>(boost::this_thread::get_id())
+ );
+ const std::string thread_msg(
+ "Thread ID " + thread_id + " for motherboard "
+ + boost::lexical_cast<std::string>(mb_i)
+ );
+
std::vector<std::string> eth_addrs;
// Not choosing eth0 based on resource might cause user issues
std::string eth0_addr = dev_addr.has_key("resource") ? dev_addr["resource"] : dev_addr["addr"];
@@ -538,7 +569,7 @@ void x300_impl::setup_mb(const size_t mb_i, const uhd::device_addr_t &dev_addr)
if (dev_addr.has_key("niusrpriorpc_port")) {
rpc_port_name = dev_addr["niusrpriorpc_port"];
}
- UHD_MSG(status) << boost::format("Connecting to niusrpriorpc at localhost:%s...\n") % rpc_port_name;
+ UHD_LOGGER_INFO("X300") << boost::format("Connecting to niusrpriorpc at localhost:%s...") % rpc_port_name;
//Instantiate the correct lvbitx object
nifpga_lvbitx::sptr lvbitx;
@@ -555,7 +586,7 @@ void x300_impl::setup_mb(const size_t mb_i, const uhd::device_addr_t &dev_addr)
drivers have loaded successfully.");
}
//Load the lvbitx onto the device
- UHD_MSG(status) << boost::format("Using LVBITX bitfile %s...\n") % lvbitx->get_bitfile_path();
+ UHD_LOGGER_INFO("X300") << boost::format("Using LVBITX bitfile %s...") % lvbitx->get_bitfile_path();
mb.rio_fpga_interface.reset(new niusrprio_session(dev_addr["resource"], rpc_port_name));
nirio_status_chain(mb.rio_fpga_interface->open(lvbitx, dev_addr.has_key("download-fpga")), status);
nirio_status_to_exception(status, "x300_impl: Could not initialize RIO session.");
@@ -569,7 +600,7 @@ void x300_impl::setup_mb(const size_t mb_i, const uhd::device_addr_t &dev_addr)
_tree->create<double>(mb_path / "link_max_rate").set(X300_MAX_RATE_PCIE);
}
- BOOST_FOREACH(const std::string &key, dev_addr.keys())
+ for(const std::string &key: dev_addr.keys())
{
if (key.find("recv") != std::string::npos) mb.recv_args[key] = dev_addr[key];
if (key.find("send") != std::string::npos) mb.send_args[key] = dev_addr[key];
@@ -628,33 +659,33 @@ void x300_impl::setup_mb(const size_t mb_i, const uhd::device_addr_t &dev_addr)
);
}
} catch(std::exception &e) {
- UHD_MSG(error) << e.what() << std::endl;
+ UHD_LOGGER_ERROR("X300") << e.what() ;
}
if ((mb.recv_args.has_key("recv_frame_size"))
&& (req_max_frame_size.recv_frame_size > _max_frame_sizes.recv_frame_size)) {
- UHD_MSG(warning)
+ UHD_LOGGER_WARNING("X300")
<< boost::format("You requested a receive frame size of (%lu) but your NIC's max frame size is (%lu).")
% req_max_frame_size.recv_frame_size
% _max_frame_sizes.recv_frame_size
- << std::endl
+
<< boost::format("Please verify your NIC's MTU setting using '%s' or set the recv_frame_size argument appropriately.")
- % mtu_tool << std::endl
+ % mtu_tool
<< "UHD will use the auto-detected max frame size for this connection."
- << std::endl;
+ ;
}
if ((mb.recv_args.has_key("send_frame_size"))
&& (req_max_frame_size.send_frame_size > _max_frame_sizes.send_frame_size)) {
- UHD_MSG(warning)
+ UHD_LOGGER_WARNING("X300")
<< boost::format("You requested a send frame size of (%lu) but your NIC's max frame size is (%lu).")
% req_max_frame_size.send_frame_size
% _max_frame_sizes.send_frame_size
- << std::endl
+
<< boost::format("Please verify your NIC's MTU setting using '%s' or set the send_frame_size argument appropriately.")
- % mtu_tool << std::endl
+ % mtu_tool
<< "UHD will use the auto-detected max frame size for this connection."
- << std::endl;
+ ;
}
_tree->create<size_t>(mb_path / "mtu/recv").set(_max_frame_sizes.recv_frame_size);
@@ -663,7 +694,7 @@ void x300_impl::setup_mb(const size_t mb_i, const uhd::device_addr_t &dev_addr)
}
//create basic communication
- UHD_MSG(status) << "Setup basic communication..." << std::endl;
+ UHD_LOGGER_INFO("X300") << "Setup basic communication...";
if (mb.xport_path == "nirio") {
boost::mutex::scoped_lock(pcie_zpu_iface_registry_mutex);
if (get_pcie_zpu_iface_registry().has_key(mb.get_pri_eth().addr)) {
@@ -723,11 +754,10 @@ void x300_impl::setup_mb(const size_t mb_i, const uhd::device_addr_t &dev_addr)
const uint32_t nbor_addr = mb.zpu_ctrl->peek32(SR_ADDR(routes_addr, i*2+1));
if (node_addr != 0 and nbor_addr != 0)
{
- UHD_MSG(status) << boost::format("%u: %s -> %s")
+ UHD_LOGGER_INFO("X300") << boost::format("%u: %s -> %s")
% i
% asio::ip::address_v4(node_addr).to_string()
- % asio::ip::address_v4(nbor_addr).to_string()
- << std::endl;
+ % asio::ip::address_v4(nbor_addr).to_string();
}
}
*/
@@ -735,11 +765,11 @@ void x300_impl::setup_mb(const size_t mb_i, const uhd::device_addr_t &dev_addr)
////////////////////////////////////////////////////////////////////
// setup the mboard eeprom
////////////////////////////////////////////////////////////////////
- UHD_MSG(status) << "Loading values from EEPROM..." << std::endl;
+ UHD_LOGGER_INFO("X300") << "Loading values from EEPROM...";
x300_mb_eeprom_iface::sptr eeprom16 = x300_mb_eeprom_iface::make(mb.zpu_ctrl, mb.zpu_i2c);
if (dev_addr.has_key("blank_eeprom"))
{
- UHD_MSG(warning) << "Obliterating the motherboard EEPROM..." << std::endl;
+ UHD_LOGGER_WARNING("X300") << "Obliterating the motherboard EEPROM..." ;
eeprom16->write_eeprom(0x50, 0, byte_vector_t(256, 0xff));
}
const mboard_eeprom_t mb_eeprom(*eeprom16, "X300");
@@ -749,9 +779,9 @@ void x300_impl::setup_mb(const size_t mb_i, const uhd::device_addr_t &dev_addr)
bool recover_mb_eeprom = dev_addr.has_key("recover_mb_eeprom");
if (recover_mb_eeprom) {
- UHD_MSG(warning) << "UHD is operating in EEPROM Recovery Mode which disables hardware version "
+ UHD_LOGGER_WARNING("X300") << "UHD is operating in EEPROM Recovery Mode which disables hardware version "
"checks.\nOperating in this mode may cause hardware damage and unstable "
- "radio performance!"<< std::endl;
+ "radio performance!";
}
////////////////////////////////////////////////////////////////////
@@ -831,7 +861,7 @@ void x300_impl::setup_mb(const size_t mb_i, const uhd::device_addr_t &dev_addr)
////////////////////////////////////////////////////////////////////
// create clock control objects
////////////////////////////////////////////////////////////////////
- UHD_MSG(status) << "Setup RF frontend clocking..." << std::endl;
+ UHD_LOGGER_INFO("X300") << "Setup RF frontend clocking...";
//Initialize clock control registers. NOTE: This does not configure the LMK yet.
mb.clock = x300_clock_ctrl::make(mb.zpu_spi,
@@ -853,8 +883,7 @@ void x300_impl::setup_mb(const size_t mb_i, const uhd::device_addr_t &dev_addr)
.set_publisher(boost::bind(&x300_clock_ctrl::get_master_clock_rate, mb.clock))
;
- UHD_MSG(status) << "Radio 1x clock:" << (mb.clock->get_master_clock_rate()/1e6)
- << std::endl;
+ UHD_LOGGER_INFO("X300") << "Radio 1x clock:" << (mb.clock->get_master_clock_rate()/1e6);
////////////////////////////////////////////////////////////////////
// Create the GPSDO control
@@ -864,18 +893,18 @@ void x300_impl::setup_mb(const size_t mb_i, const uhd::device_addr_t &dev_addr)
//otherwise if not disabled, look for the internal GPSDO
if (mb.zpu_ctrl->peek32(SR_ADDR(X300_FW_SHMEM_BASE, X300_FW_SHMEM_GPSDO_STATUS)) != dont_look_for_gpsdo)
{
- UHD_MSG(status) << "Detecting internal GPSDO.... " << std::flush;
+ UHD_LOGGER_INFO("X300") << "Detecting internal GPSDO.... ";
try
{
mb.gps = gps_ctrl::make(x300_make_uart_iface(mb.zpu_ctrl));
}
catch(std::exception &e)
{
- UHD_MSG(error) << "An error occurred making GPSDO control: " << e.what() << std::endl;
+ UHD_LOGGER_ERROR("X300") << "An error occurred making GPSDO control: " << e.what() ;
}
if (mb.gps and mb.gps->gps_detected())
{
- BOOST_FOREACH(const std::string &name, mb.gps->get_sensors())
+ for(const std::string &name: mb.gps->get_sensors())
{
_tree->create<sensor_value_t>(mb_path / "sensors" / name)
.set_publisher(boost::bind(&gps_ctrl::get_sensor, mb.gps, name));
@@ -888,14 +917,6 @@ void x300_impl::setup_mb(const size_t mb_i, const uhd::device_addr_t &dev_addr)
}
////////////////////////////////////////////////////////////////////
- //clear router?
- ////////////////////////////////////////////////////////////////////
- for (size_t i = 0; i < 512; i++) {
- mb.zpu_ctrl->poke32(SR_ADDR(SETXB_BASE, i), 0);
- }
-
-
- ////////////////////////////////////////////////////////////////////
// setup time sources and properties
////////////////////////////////////////////////////////////////////
_tree->create<std::string>(mb_path / "time_source" / "value")
@@ -952,8 +973,7 @@ void x300_impl::setup_mb(const size_t mb_i, const uhd::device_addr_t &dev_addr)
n_rfnoc_blocks,
X300_XB_DST_PCI + 1, /* base port */
uhd::sid_t(X300_SRC_ADDR0, 0, X300_DST_ADDR + mb_i, 0),
- dev_addr,
- mb.if_pkt_is_big_endian ? ENDIANNESS_BIG : ENDIANNESS_LITTLE
+ dev_addr
);
//////////////// RFNOC /////////////////
@@ -963,11 +983,11 @@ void x300_impl::setup_mb(const size_t mb_i, const uhd::device_addr_t &dev_addr)
find_blocks<rfnoc::x300_radio_ctrl_impl>(radio_blockid_hint);
if (not radio_ids.empty()) {
if (radio_ids.size() > 2) {
- UHD_MSG(warning) << "Too many Radio Blocks found. Using only the first two." << std::endl;
+ UHD_LOGGER_WARNING("X300") << "Too many Radio Blocks found. Using only the first two." ;
radio_ids.resize(2);
}
- BOOST_FOREACH(const rfnoc::block_id_t &id, radio_ids) {
+ for(const rfnoc::block_id_t &id: radio_ids) {
rfnoc::x300_radio_ctrl_impl::sptr radio(get_block_ctrl<rfnoc::x300_radio_ctrl_impl>(id));
mb.radios.push_back(radio);
radio->setup_radio(
@@ -1005,7 +1025,7 @@ void x300_impl::setup_mb(const size_t mb_i, const uhd::device_addr_t &dev_addr)
}
} else {
- UHD_MSG(status) << "No Radio Block found. Assuming radio-less operation." << std::endl;
+ UHD_LOGGER_INFO("X300") << "No Radio Block found. Assuming radio-less operation.";
} /* end of radio block(s) initialization */
mb.initialization_done = true;
@@ -1015,7 +1035,7 @@ x300_impl::~x300_impl(void)
{
try
{
- BOOST_FOREACH(mboard_members_t &mb, _mb)
+ for(mboard_members_t &mb: _mb)
{
//kill the claimer task and unclaim the device
mb.claimer_task.reset();
@@ -1051,8 +1071,8 @@ uint32_t x300_impl::mboard_members_t::allocate_pcie_dma_chan(const uhd::sid_t &t
if (_dma_chan_pool.count(raw_sid) == 0) {
_dma_chan_pool[raw_sid] = _dma_chan_pool.size() + FIRST_DATA_CHANNEL;
- UHD_LOG << "[X300] Assigning PCIe DMA channel " << _dma_chan_pool[raw_sid]
- << " to SID " << tx_sid.to_pp_string_hex() << std::endl;
+ UHD_LOGGER_DEBUG("X300") << "[X300] Assigning PCIe DMA channel " << _dma_chan_pool[raw_sid]
+ << " to SID " << tx_sid.to_pp_string_hex() ;
}
if (_dma_chan_pool.size() + FIRST_DATA_CHANNEL > X300_PCIE_MAX_CHANNELS) {
@@ -1095,6 +1115,7 @@ uhd::both_xports_t x300_impl::make_transport(
zero_copy_xport_params default_buff_args;
both_xports_t xports;
+ xports.endianness = mb.if_pkt_is_big_endian ? ENDIANNESS_BIG : ENDIANNESS_LITTLE;
if (mb.xport_path == "nirio") {
xports.send_sid = this->allocate_sid(mb, address, X300_SRC_ADDR0, X300_XB_DST_PCI);
xports.recv_sid = xports.send_sid.reversed();
@@ -1214,23 +1235,23 @@ uhd::both_xports_t x300_impl::make_transport(
/* Print a warning if the system's max available frame size is less than the most optimal
* frame size for this type of connection. */
if (_max_frame_sizes.send_frame_size < eth_data_rec_frame_size) {
- UHD_MSG(warning)
+ UHD_LOGGER_WARNING("X300")
<< boost::format("For this connection, UHD recommends a send frame size of at least %lu for best\nperformance, but your system's MTU will only allow %lu.")
% eth_data_rec_frame_size
% _max_frame_sizes.send_frame_size
- << std::endl
+
<< "This will negatively impact your maximum achievable sample rate."
- << std::endl;
+ ;
}
if (_max_frame_sizes.recv_frame_size < eth_data_rec_frame_size) {
- UHD_MSG(warning)
+ UHD_LOGGER_WARNING("X300")
<< boost::format("For this connection, UHD recommends a receive frame size of at least %lu for best\nperformance, but your system's MTU will only allow %lu.")
% eth_data_rec_frame_size
% _max_frame_sizes.recv_frame_size
- << std::endl
+
<< "This will negatively impact your maximum achievable sample rate."
- << std::endl;
+ ;
}
size_t system_max_send_frame_size = (size_t) _max_frame_sizes.send_frame_size;
@@ -1288,8 +1309,8 @@ uhd::both_xports_t x300_impl::make_transport(
//send a mini packet with SID into the ZPU
//ZPU will reprogram the ethernet framer
- UHD_LOG << "programming packet for new xport on "
- << interface_addr << " sid " << xports.send_sid << std::endl;
+ UHD_LOGGER_DEBUG("X300") << "programming packet for new xport on "
+ << interface_addr << " sid " << xports.send_sid ;
//YES, get a __send__ buffer from the __recv__ socket
//-- this is the only way to program the framer for recv:
managed_send_buffer::sptr buff = xports.recv->get_send_buff();
@@ -1299,7 +1320,7 @@ uhd::both_xports_t x300_impl::make_transport(
buff.reset();
//reprogram the ethernet dispatcher's udp port (should be safe to always set)
- UHD_LOG << "reprogram the ethernet dispatcher's udp port" << std::endl;
+ UHD_LOGGER_DEBUG("X300") << "reprogram the ethernet dispatcher's udp port" ;
mb.zpu_ctrl->poke32(SR_ADDR(SET0_BASE, (ZPU_SR_ETHINT0+8+3)), X300_VITA_UDP_PORT);
mb.zpu_ctrl->poke32(SR_ADDR(SET0_BASE, (ZPU_SR_ETHINT1+8+3)), X300_VITA_UDP_PORT);
@@ -1319,7 +1340,7 @@ uhd::sid_t x300_impl::allocate_sid(
) {
uhd::sid_t sid = address;
sid.set_src_addr(src_addr);
- sid.set_src_endpoint(_sid_framer);
+ sid.set_src_endpoint(_sid_framer++); //increment for next setup
// TODO Move all of this setup_mb()
// Program the X300 to recognise it's own local address.
@@ -1331,10 +1352,7 @@ uhd::sid_t x300_impl::allocate_sid(
// This type of packet does not match the XB_LOCAL address and is looked up in the lower half of the CAM
mb.zpu_ctrl->poke32(SR_ADDR(SETXB_BASE, 0 + src_addr), src_dst);
- UHD_LOG << "done router config for sid " << sid << std::endl;
-
- //increment for next setup
- _sid_framer++;
+ UHD_LOGGER_TRACE("X300") << "done router config for sid " << sid ;
return sid;
}
@@ -1388,8 +1406,8 @@ void x300_impl::update_clock_source(mboard_members_t &mb, const std::string &sou
throw uhd::runtime_error((boost::format("Reference Clock PLL failed to lock to %s source.") % source).str());
} else {
//TODO: Re-enable this warning when we figure out a reliable lock time
- //UHD_MSG(warning) << "Reference clock failed to lock to " + source + " during device initialization. " <<
- // "Check for the lock before operation or ignore this warning if using another clock source." << std::endl;
+ //UHD_LOGGER_WARNING("X300") << "Reference clock failed to lock to " + source + " during device initialization. " <<
+ // "Check for the lock before operation or ignore this warning if using another clock source." ;
}
}
}
@@ -1414,7 +1432,7 @@ void x300_impl::update_clock_source(mboard_members_t &mb, const std::string &sou
}
// Reset ADCs and DACs
- BOOST_FOREACH(rfnoc::x300_radio_ctrl_impl::sptr r, mb.radios) {
+ for(rfnoc::x300_radio_ctrl_impl::sptr r: mb.radios) {
r->reset_codec();
}
}
@@ -1446,7 +1464,7 @@ void x300_impl::update_time_source(mboard_members_t &mb, const std::string &sour
void x300_impl::sync_times(mboard_members_t &mb, const uhd::time_spec_t& t)
{
std::vector<rfnoc::block_id_t> radio_ids = find_blocks<rfnoc::x300_radio_ctrl_impl>("Radio");
- BOOST_FOREACH(const rfnoc::block_id_t &id, radio_ids) {
+ for(const rfnoc::block_id_t &id: radio_ids) {
get_block_ctrl<rfnoc::x300_radio_ctrl_impl>(id)->set_time_sync(t);
}
@@ -1607,7 +1625,7 @@ x300_impl::frame_size_t x300_impl::determine_max_frame_size(const std::string &a
size_t min_send_frame_size = sizeof(x300_mtu_t);
size_t max_send_frame_size = std::min(user_frame_size.send_frame_size, X300_10GE_DATA_FRAME_MAX_SIZE) & size_t(~3);
- UHD_MSG(status) << "Determining maximum frame size... ";
+ UHD_LOGGER_INFO("X300") << "Determining maximum frame size... ";
while (min_recv_frame_size < max_recv_frame_size)
{
size_t test_frame_size = (max_recv_frame_size/2 + min_recv_frame_size/2 + 3) & ~3;
@@ -1656,7 +1674,7 @@ x300_impl::frame_size_t x300_impl::determine_max_frame_size(const std::string &a
// of the recv and send frame sizes.
frame_size.recv_frame_size = std::min(min_recv_frame_size, min_send_frame_size);
frame_size.send_frame_size = std::min(min_recv_frame_size, min_send_frame_size);
- UHD_MSG(status) << frame_size.send_frame_size << " bytes." << std::endl;
+ UHD_LOGGER_INFO("X300") << "Maximum frame size: " << frame_size.send_frame_size << " bytes.";
return frame_size;
}
@@ -1833,7 +1851,7 @@ x300_impl::x300_mboard_t x300_impl::get_mb_type_from_eeprom(const uhd::usrp::mbo
case X310_2955R_PCIE_SSID_ADC_18:
mb_type = USRP_X310_MB; break;
default:
- UHD_MSG(warning) << "X300 unknown product code in EEPROM: " << product_num << std::endl;
+ UHD_LOGGER_WARNING("X300") << "X300 unknown product code in EEPROM: " << product_num ;
mb_type = UNKNOWN; break;
}
}
diff --git a/host/lib/usrp/x300/x300_impl.hpp b/host/lib/usrp/x300/x300_impl.hpp
index 982369396..7186e5f4f 100644
--- a/host/lib/usrp/x300/x300_impl.hpp
+++ b/host/lib/usrp/x300/x300_impl.hpp
@@ -39,6 +39,7 @@
#include <uhd/rfnoc/block_ctrl.hpp>
///////////// RFNOC /////////////////////
#include <boost/dynamic_bitset.hpp>
+#include <atomic>
static const std::string X300_FW_FILE_NAME = "usrp_x300_fw.bin";
static const std::string X300_DEFAULT_CLOCK_SOURCE = "internal";
@@ -77,6 +78,9 @@ static const size_t X300_ETH_MSG_NUM_FRAMES = 64;
static const size_t X300_ETH_DATA_NUM_FRAMES = 32;
static const double X300_DEFAULT_SYSREF_RATE = 10e6;
+// Limit the number of initialization threads
+static const size_t X300_MAX_INIT_THREADS = 10;
+
static const size_t X300_MAX_RATE_PCIE = 800000000; // bytes/s
static const size_t X300_MAX_RATE_10GIGE = (size_t)( // bytes/s
10e9 / 8 * // wire speed multiplied by percentage of packets that is sample data
@@ -220,7 +224,7 @@ private:
//task for periodically reclaiming the device from others
void claimer_loop(uhd::wb_iface::sptr);
- size_t _sid_framer;
+ std::atomic<size_t> _sid_framer;
uhd::sid_t allocate_sid(
mboard_members_t &mb,
@@ -291,9 +295,6 @@ private:
/// More IO stuff
uhd::device_addr_t get_tx_hints(size_t mb_index);
uhd::device_addr_t get_rx_hints(size_t mb_index);
- uhd::endianness_t get_transport_endianness(size_t mb_index) {
- return _mb[mb_index].if_pkt_is_big_endian ? uhd::ENDIANNESS_BIG : uhd::ENDIANNESS_LITTLE;
- };
void post_streamer_hooks(uhd::direction_t dir);
};
diff --git a/host/lib/usrp/x300/x300_io_impl.cpp b/host/lib/usrp/x300/x300_io_impl.cpp
index 1584cee24..eeff4091f 100644
--- a/host/lib/usrp/x300/x300_io_impl.cpp
+++ b/host/lib/usrp/x300/x300_io_impl.cpp
@@ -25,8 +25,6 @@
#include <boost/bind.hpp>
#include <uhd/utils/tasks.hpp>
#include <uhd/utils/log.hpp>
-#include <uhd/utils/msg.hpp>
-#include <boost/foreach.hpp>
#include <boost/make_shared.hpp>
using namespace uhd;
@@ -73,7 +71,7 @@ void x300_impl::post_streamer_hooks(direction_t dir)
// Loop through all tx streamers. Find all radios connected to one
// streamer. Sync those.
- BOOST_FOREACH(const boost::weak_ptr<uhd::tx_streamer> &streamer_w, _tx_streamers.vals()) {
+ for(const boost::weak_ptr<uhd::tx_streamer> &streamer_w: _tx_streamers.vals()) {
const boost::shared_ptr<sph::send_packet_streamer> streamer =
boost::dynamic_pointer_cast<sph::send_packet_streamer>(streamer_w.lock());
if (not streamer) {
@@ -83,7 +81,7 @@ void x300_impl::post_streamer_hooks(direction_t dir)
std::vector<rfnoc::x300_radio_ctrl_impl::sptr> radio_ctrl_blks =
streamer->get_terminator()->find_downstream_node<rfnoc::x300_radio_ctrl_impl>();
try {
- //UHD_MSG(status) << "[X300] syncing " << radio_ctrl_blks.size() << " radios " << std::endl;
+ //UHD_LOGGER_INFO("X300") << "[X300] syncing " << radio_ctrl_blks.size() << " radios " ;
rfnoc::x300_radio_ctrl_impl::synchronize_dacs(radio_ctrl_blks);
}
catch(const uhd::io_error &ex) {
diff --git a/host/lib/usrp/x300/x300_mb_eeprom.cpp b/host/lib/usrp/x300/x300_mb_eeprom.cpp
index e39b36af8..084685991 100644
--- a/host/lib/usrp/x300/x300_mb_eeprom.cpp
+++ b/host/lib/usrp/x300/x300_mb_eeprom.cpp
@@ -33,7 +33,7 @@
#include "x300_impl.hpp"
#include <uhd/exception.hpp>
#include <uhd/utils/platform.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/byteswap.hpp>
#include <boost/thread.hpp>
diff --git a/host/lib/usrp/x300/x300_radio_ctrl_impl.cpp b/host/lib/usrp/x300/x300_radio_ctrl_impl.cpp
index a3bc2e691..1a37cbdd1 100644
--- a/host/lib/usrp/x300/x300_radio_ctrl_impl.cpp
+++ b/host/lib/usrp/x300/x300_radio_ctrl_impl.cpp
@@ -22,7 +22,7 @@
#include "gpio_atr_3000.hpp"
#include "apply_corrections.hpp"
#include <uhd/usrp/dboard_eeprom.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/usrp/dboard_iface.hpp>
#include <uhd/rfnoc/node_ctrl_base.hpp>
#include <uhd/transport/chdr.hpp>
@@ -30,6 +30,7 @@
#include <boost/algorithm/string.hpp>
#include <boost/make_shared.hpp>
#include <boost/date_time/posix_time/posix_time_io.hpp>
+#include <boost/assign/list_of.hpp>
using namespace uhd;
using namespace uhd::usrp;
@@ -44,7 +45,7 @@ static const size_t IO_MASTER_RADIO = 0;
UHD_RFNOC_RADIO_BLOCK_CONSTRUCTOR(x300_radio_ctrl)
, _ignore_cal_file(false)
{
- UHD_RFNOC_BLOCK_TRACE() << "x300_radio_ctrl_impl::ctor() " << std::endl;
+ UHD_RFNOC_BLOCK_TRACE() << "x300_radio_ctrl_impl::ctor() " ;
////////////////////////////////////////////////////////////////////
// Set up basic info
@@ -82,7 +83,7 @@ UHD_RFNOC_RADIO_BLOCK_CONSTRUCTOR(x300_radio_ctrl)
if (_radio_type==PRIMARY) {
_fp_gpio = gpio_atr::gpio_atr_3000::make(ctrl, regs::sr_addr(regs::FP_GPIO), regs::RB_FP_GPIO);
- BOOST_FOREACH(const gpio_atr::gpio_attr_map_t::value_type attr, gpio_atr::gpio_attr_map) {
+ for(const gpio_atr::gpio_attr_map_t::value_type attr: gpio_atr::gpio_attr_map) {
_tree->create<uint32_t>(fs_path("gpio") / "FP0" / attr.second)
.set(0)
.add_coerced_subscriber(boost::bind(&gpio_atr::gpio_atr_3000::set_gpio_attr, _fp_gpio, attr.first, _1));
@@ -149,7 +150,7 @@ x300_radio_ctrl_impl::~x300_radio_ctrl_impl()
_tree->remove(_root_path / "rx_fe_corrections");
_tree->remove(_root_path / "tx_fe_corrections");
if (_radio_type==PRIMARY) {
- BOOST_FOREACH(const gpio_atr::gpio_attr_map_t::value_type attr, gpio_atr::gpio_attr_map) {
+ for(const gpio_atr::gpio_attr_map_t::value_type attr: gpio_atr::gpio_attr_map) {
_tree->remove(fs_path("gpio") / "FP0" / attr.second);
}
_tree->remove(fs_path("gpio") / "FP0" / "READBACK");
@@ -171,7 +172,7 @@ double x300_radio_ctrl_impl::set_rate(double rate)
{
const double actual_rate = get_rate();
if (not uhd::math::frequencies_are_equal(rate, actual_rate)) {
- UHD_MSG(warning) << "[X300 Radio] Requesting invalid sampling rate from device: " << rate/1e6 << " MHz. Actual rate is: " << actual_rate/1e6 << " MHz." << std::endl;
+ UHD_LOGGER_WARNING("X300 RADIO") << "Requesting invalid sampling rate from device: " << rate/1e6 << " MHz. Actual rate is: " << actual_rate/1e6 << " MHz." ;
}
// On X3x0, tick rate can't actually be changed at runtime
return actual_rate;
@@ -228,6 +229,20 @@ double x300_radio_ctrl_impl::get_rx_frequency(const size_t chan)
).get();
}
+double x300_radio_ctrl_impl::set_rx_bandwidth(const double bandwidth, const size_t chan)
+{
+ return _tree->access<double>(
+ fs_path("dboards" / _radio_slot / "rx_frontends" / _rx_fe_map.at(chan).db_fe_name / "bandwidth" / "value")
+ ).set(bandwidth).get();
+}
+
+double x300_radio_ctrl_impl::get_rx_bandwidth(const size_t chan)
+{
+ return _tree->access<double>(
+ fs_path("dboards" / _radio_slot / "rx_frontends" / _rx_fe_map.at(chan).db_fe_name / "bandwidth" / "value")
+ ).get();
+}
+
double x300_radio_ctrl_impl::set_tx_gain(const double gain, const size_t chan)
{
//TODO: This is extremely hacky!
@@ -238,7 +253,7 @@ double x300_radio_ctrl_impl::set_tx_gain(const double gain, const size_t chan)
radio_ctrl_impl::set_tx_gain(actual_gain, chan);
return gain;
} else {
- UHD_MSG(warning) << "set_tx_gain: could not apply gain for this daughterboard.";
+ UHD_LOGGER_WARNING("X300 RADIO") << "set_tx_gain: could not apply gain for this daughterboard.";
radio_ctrl_impl::set_tx_gain(0.0, chan);
return 0.0;
}
@@ -254,19 +269,209 @@ double x300_radio_ctrl_impl::set_rx_gain(const double gain, const size_t chan)
radio_ctrl_impl::set_rx_gain(actual_gain, chan);
return gain;
} else {
- UHD_MSG(warning) << "set_rx_gain: could not apply gain for this daughterboard.";
+ UHD_LOGGER_WARNING("X300 RADIO") << "set_rx_gain: could not apply gain for this daughterboard.";
radio_ctrl_impl::set_tx_gain(0.0, chan);
return 0.0;
}
}
+std::vector<std::string> x300_radio_ctrl_impl::get_rx_lo_names(const size_t chan)
+{
+ fs_path rx_fe_fe_root = fs_path("dboards" / _radio_slot / "rx_frontends" / _rx_fe_map.at(chan).db_fe_name);
+
+ std::vector<std::string> lo_names;
+ if (_tree->exists(rx_fe_fe_root / "los")) {
+ for(const std::string &name: _tree->list(rx_fe_fe_root / "los")) {
+ lo_names.push_back(name);
+ }
+ }
+ return lo_names;
+}
+
+std::vector<std::string> x300_radio_ctrl_impl::get_rx_lo_sources(const std::string &name, const size_t chan)
+{
+ fs_path rx_fe_fe_root = fs_path("dboards" / _radio_slot / "rx_frontends" / _rx_fe_map.at(chan).db_fe_name);
+
+ if (_tree->exists(rx_fe_fe_root / "los")) {
+ if (name == ALL_LOS) {
+ if (_tree->exists(rx_fe_fe_root / "los" / ALL_LOS)) {
+ //Special value ALL_LOS support atomically sets the source for all LOs
+ return _tree->access< std::vector<std::string> >(rx_fe_fe_root / "los" / ALL_LOS / "source" / "options").get();
+ } else {
+ return std::vector<std::string>();
+ }
+ } else {
+ if (_tree->exists(rx_fe_fe_root / "los")) {
+ return _tree->access< std::vector<std::string> >(rx_fe_fe_root / "los" / name / "source" / "options").get();
+ } else {
+ throw uhd::runtime_error("Could not find LO stage " + name);
+ }
+ }
+ } else {
+ // If the daughterboard doesn't expose it's LO(s) then it can only be internal
+ return std::vector<std::string> (1, "internal");
+ }
+}
+
+void x300_radio_ctrl_impl::set_rx_lo_source(const std::string &src, const std::string &name, const size_t chan)
+{
+ fs_path rx_fe_fe_root = fs_path("dboards" / _radio_slot / "rx_frontends" / _rx_fe_map.at(chan).db_fe_name);
+
+ if (_tree->exists(rx_fe_fe_root / "los")) {
+ if (name == ALL_LOS) {
+ if (_tree->exists(rx_fe_fe_root / "los" / ALL_LOS)) {
+ //Special value ALL_LOS support atomically sets the source for all LOs
+ _tree->access<std::string>(rx_fe_fe_root / "los" / ALL_LOS / "source" / "value").set(src);
+ } else {
+ for(const std::string &n: _tree->list(rx_fe_fe_root / "los")) {
+ this->set_rx_lo_source(src, n, chan);
+ }
+ }
+ } else {
+ if (_tree->exists(rx_fe_fe_root / "los")) {
+ _tree->access<std::string>(rx_fe_fe_root / "los" / name / "source" / "value").set(src);
+ } else {
+ throw uhd::runtime_error("Could not find LO stage " + name);
+ }
+ }
+ } else {
+ throw uhd::runtime_error("This device does not support manual configuration of LOs");
+ }
+}
+
+const std::string x300_radio_ctrl_impl::get_rx_lo_source(const std::string &name, const size_t chan)
+{
+ fs_path rx_fe_fe_root = fs_path("dboards" / _radio_slot / "rx_frontends" / _rx_fe_map.at(chan).db_fe_name);
+
+ if (_tree->exists(rx_fe_fe_root / "los")) {
+ if (name == ALL_LOS) {
+ //Special value ALL_LOS support atomically sets the source for all LOs
+ return _tree->access<std::string>(rx_fe_fe_root / "los" / ALL_LOS / "source" / "value").get();
+ } else {
+ if (_tree->exists(rx_fe_fe_root / "los")) {
+ return _tree->access<std::string>(rx_fe_fe_root / "los" / name / "source" / "value").get();
+ } else {
+ throw uhd::runtime_error("Could not find LO stage " + name);
+ }
+ }
+ } else {
+ // If the daughterboard doesn't expose it's LO(s) then it can only be internal
+ return "internal";
+ }
+}
+
+void x300_radio_ctrl_impl::set_rx_lo_export_enabled(bool enabled, const std::string &name, const size_t chan)
+{
+ fs_path rx_fe_fe_root = fs_path("dboards" / _radio_slot / "rx_frontends" / _rx_fe_map.at(chan).db_fe_name);
+
+ if (_tree->exists(rx_fe_fe_root / "los")) {
+ if (name == ALL_LOS) {
+ if (_tree->exists(rx_fe_fe_root / "los" / ALL_LOS)) {
+ //Special value ALL_LOS support atomically sets the source for all LOs
+ _tree->access<bool>(rx_fe_fe_root / "los" / ALL_LOS / "export").set(enabled);
+ } else {
+ for(const std::string &n: _tree->list(rx_fe_fe_root / "los")) {
+ this->set_rx_lo_export_enabled(enabled, n, chan);
+ }
+ }
+ } else {
+ if (_tree->exists(rx_fe_fe_root / "los")) {
+ _tree->access<bool>(rx_fe_fe_root / "los" / name / "export").set(enabled);
+ } else {
+ throw uhd::runtime_error("Could not find LO stage " + name);
+ }
+ }
+ } else {
+ throw uhd::runtime_error("This device does not support manual configuration of LOs");
+ }
+}
+
+bool x300_radio_ctrl_impl::get_rx_lo_export_enabled(const std::string &name, const size_t chan)
+{
+ fs_path rx_fe_fe_root = fs_path("dboards" / _radio_slot / "rx_frontends" / _rx_fe_map.at(chan).db_fe_name);
+
+ if (_tree->exists(rx_fe_fe_root / "los")) {
+ if (name == ALL_LOS) {
+ //Special value ALL_LOS support atomically sets the source for all LOs
+ return _tree->access<bool>(rx_fe_fe_root / "los" / ALL_LOS / "export").get();
+ } else {
+ if (_tree->exists(rx_fe_fe_root / "los")) {
+ return _tree->access<bool>(rx_fe_fe_root / "los" / name / "export").get();
+ } else {
+ throw uhd::runtime_error("Could not find LO stage " + name);
+ }
+ }
+ } else {
+ // If the daughterboard doesn't expose it's LO(s), assume it cannot export
+ return false;
+ }
+}
+
+double x300_radio_ctrl_impl::set_rx_lo_freq(double freq, const std::string &name, const size_t chan)
+{
+ fs_path rx_fe_fe_root = fs_path("dboards" / _radio_slot / "rx_frontends" / _rx_fe_map.at(chan).db_fe_name);
+
+ if (_tree->exists(rx_fe_fe_root / "los")) {
+ if (name == ALL_LOS) {
+ throw uhd::runtime_error("LO frequency must be set for each stage individually");
+ } else {
+ if (_tree->exists(rx_fe_fe_root / "los")) {
+ _tree->access<double>(rx_fe_fe_root / "los" / name / "freq" / "value").set(freq);
+ return _tree->access<double>(rx_fe_fe_root / "los" / name / "freq" / "value").get();
+ } else {
+ throw uhd::runtime_error("Could not find LO stage " + name);
+ }
+ }
+ } else {
+ throw uhd::runtime_error("This device does not support manual configuration of LOs");
+ }
+}
+
+double x300_radio_ctrl_impl::get_rx_lo_freq(const std::string &name, const size_t chan)
+{
+ fs_path rx_fe_fe_root = fs_path("dboards" / _radio_slot / "rx_frontends" / _rx_fe_map.at(chan).db_fe_name);
+
+ if (_tree->exists(rx_fe_fe_root / "los")) {
+ if (name == ALL_LOS) {
+ throw uhd::runtime_error("LO frequency must be retrieved for each stage individually");
+ } else {
+ if (_tree->exists(rx_fe_fe_root / "los")) {
+ return _tree->access<double>(rx_fe_fe_root / "los" / name / "freq" / "value").get();
+ } else {
+ throw uhd::runtime_error("Could not find LO stage " + name);
+ }
+ }
+ } else {
+ // Return actual RF frequency if the daughterboard doesn't expose it's LO(s)
+ return _tree->access<double>(rx_fe_fe_root / "freq" /" value").get();
+ }
+}
+
+freq_range_t x300_radio_ctrl_impl::get_rx_lo_freq_range(const std::string &name, const size_t chan)
+{
+ fs_path rx_fe_fe_root = fs_path("dboards" / _radio_slot / "rx_frontends" / _rx_fe_map.at(chan).db_fe_name);
+
+ if (_tree->exists(rx_fe_fe_root / "los")) {
+ if (name == ALL_LOS) {
+ throw uhd::runtime_error("LO frequency range must be retrieved for each stage individually");
+ } else {
+ if (_tree->exists(rx_fe_fe_root / "los")) {
+ return _tree->access<freq_range_t>(rx_fe_fe_root / "los" / name / "freq" / "range").get();
+ } else {
+ throw uhd::runtime_error("Could not find LO stage " + name);
+ }
+ }
+ } else {
+ // Return the actual RF range if the daughterboard doesn't expose it's LO(s)
+ return _tree->access<meta_range_t>(rx_fe_fe_root / "freq" / "range").get();
+ }
+}
+
template <typename map_type>
static size_t _get_chan_from_map(std::map<size_t, map_type> map, const std::string &fe)
{
- // TODO replace with 'auto' when possible
- typedef typename std::map<size_t, map_type>::iterator chan_iterator;
- for (chan_iterator it = map.begin(); it != map.end(); ++it) {
+ for (auto it = map.begin(); it != map.end(); ++it) {
if (it->second.db_fe_name == fe) {
return it->first;
}
@@ -318,6 +523,68 @@ double x300_radio_ctrl_impl::get_output_samp_rate(size_t chan)
return _rx_fe_map.at(chan).core->get_output_rate();
}
+std::vector<std::string> x300_radio_ctrl_impl::get_gpio_banks() const
+{
+ std::vector<std::string> banks = boost::assign::list_of("RX")("TX");
+ // These pairs are the same, but RXA/TXA are from pre-rfnoc era and are kept for backward compat:
+ banks.push_back("RX"+_radio_slot);
+ banks.push_back("TX"+_radio_slot);
+ if (_fp_gpio) {
+ banks.push_back("FP0");
+ }
+ return banks;
+}
+
+void x300_radio_ctrl_impl::set_gpio_attr(
+ const std::string &bank,
+ const std::string &attr,
+ const uint32_t value,
+ const uint32_t mask
+) {
+ if (bank == "FP0" and _fp_gpio) {
+ const uint32_t current = _tree->access<uint32_t>(fs_path("gpio") / bank / attr).get();
+ const uint32_t new_value = (current & ~mask) | (value & mask);
+ _tree->access<uint32_t>(fs_path("gpio") / bank / attr).set(new_value);
+ return;
+ }
+ if (bank.size() > 2 and bank[1] == 'X')
+ {
+ const std::string name = bank.substr(2);
+ const dboard_iface::unit_t unit = (bank[0] == 'R')? dboard_iface::UNIT_RX : dboard_iface::UNIT_TX;
+ dboard_iface::sptr iface = _tree->access<dboard_iface::sptr>(fs_path("dboards") / name / "iface").get();
+ if (attr == "CTRL") iface->set_pin_ctrl(unit, uint16_t(value), uint16_t(mask));
+ if (attr == "DDR") iface->set_gpio_ddr(unit, uint16_t(value), uint16_t(mask));
+ if (attr == "OUT") iface->set_gpio_out(unit, uint16_t(value), uint16_t(mask));
+ if (attr == "ATR_0X") iface->set_atr_reg(unit, gpio_atr::ATR_REG_IDLE, uint16_t(value), uint16_t(mask));
+ if (attr == "ATR_RX") iface->set_atr_reg(unit, gpio_atr::ATR_REG_RX_ONLY, uint16_t(value), uint16_t(mask));
+ if (attr == "ATR_TX") iface->set_atr_reg(unit, gpio_atr::ATR_REG_TX_ONLY, uint16_t(value), uint16_t(mask));
+ if (attr == "ATR_XX") iface->set_atr_reg(unit, gpio_atr::ATR_REG_FULL_DUPLEX, uint16_t(value), uint16_t(mask));
+ }
+}
+
+uint32_t x300_radio_ctrl_impl::get_gpio_attr(
+ const std::string &bank,
+ const std::string &attr
+) {
+ if (bank == "FP0" and _fp_gpio) {
+ return uint32_t(_tree->access<uint64_t>(fs_path("gpio") / bank / attr).get());
+ }
+ if (bank.size() > 2 and bank[1] == 'X') {
+ const std::string name = bank.substr(2);
+ const dboard_iface::unit_t unit = (bank[0] == 'R')? dboard_iface::UNIT_RX : dboard_iface::UNIT_TX;
+ dboard_iface::sptr iface = _tree->access<dboard_iface::sptr>(fs_path("dboards") / name / "iface").get();
+ if (attr == "CTRL") return iface->get_pin_ctrl(unit);
+ if (attr == "DDR") return iface->get_gpio_ddr(unit);
+ if (attr == "OUT") return iface->get_gpio_out(unit);
+ if (attr == "ATR_0X") return iface->get_atr_reg(unit, gpio_atr::ATR_REG_IDLE);
+ if (attr == "ATR_RX") return iface->get_atr_reg(unit, gpio_atr::ATR_REG_RX_ONLY);
+ if (attr == "ATR_TX") return iface->get_atr_reg(unit, gpio_atr::ATR_REG_TX_ONLY);
+ if (attr == "ATR_XX") return iface->get_atr_reg(unit, gpio_atr::ATR_REG_FULL_DUPLEX);
+ if (attr == "READBACK") return iface->read_gpio(unit);
+ }
+ return 0;
+}
+
/****************************************************************************
* Radio control and setup
***************************************************************************/
@@ -380,7 +647,7 @@ void x300_radio_ctrl_impl::setup_radio(
);
size_t rx_chan = 0, tx_chan = 0;
- BOOST_FOREACH(const std::string& fe, _db_manager->get_rx_frontends()) {
+ for(const std::string& fe: _db_manager->get_rx_frontends()) {
if (rx_chan >= _get_num_radios()) {
break;
}
@@ -393,7 +660,7 @@ void x300_radio_ctrl_impl::setup_radio(
_rx_fe_map[rx_chan].core->set_fe_connection(usrp::fe_connection_t(conn, if_freq));
rx_chan++;
}
- BOOST_FOREACH(const std::string& fe, _db_manager->get_tx_frontends()) {
+ for(const std::string& fe: _db_manager->get_tx_frontends()) {
if (tx_chan >= _get_num_radios()) {
break;
}
@@ -555,31 +822,26 @@ void x300_radio_ctrl_impl::self_test_adc(uint32_t ramp_time_ms)
void x300_radio_ctrl_impl::extended_adc_test(const std::vector<x300_radio_ctrl_impl::sptr>& radios, double duration_s)
{
static const size_t SECS_PER_ITER = 5;
- UHD_MSG(status) << boost::format("Running Extended ADC Self-Test (Duration=%.0fs, %ds/iteration)...\n")
+ UHD_LOGGER_INFO("X300 RADIO") << boost::format("Running Extended ADC Self-Test (Duration=%.0fs, %ds/iteration)...")
% duration_s % SECS_PER_ITER;
size_t num_iters = static_cast<size_t>(ceil(duration_s/SECS_PER_ITER));
size_t num_failures = 0;
for (size_t iter = 0; iter < num_iters; iter++) {
- //Print date and time
- boost::posix_time::time_facet *facet = new boost::posix_time::time_facet("%d-%b-%Y %H:%M:%S");
- std::ostringstream time_strm;
- time_strm.imbue(std::locale(std::locale::classic(), facet));
- time_strm << boost::posix_time::second_clock::local_time();
//Run self-test
- UHD_MSG(status) << boost::format("-- [%s] Iteration %06d... ") % time_strm.str() % (iter+1);
+ UHD_LOGGER_INFO("X300 RADIO") << boost::format("Extended ADC Self-Test Iteration %06d... ") % (iter+1);
try {
for (size_t i = 0; i < radios.size(); i++) {
radios[i]->self_test_adc((SECS_PER_ITER*1000)/radios.size());
}
- UHD_MSG(status) << "passed" << std::endl;
+ UHD_LOGGER_INFO("X300 RADIO") << boost::format("Extended ADC Self-Test Iteration %06d passed ") % (iter+1);
} catch(std::exception &e) {
num_failures++;
- UHD_MSG(status) << e.what() << std::endl;
+ UHD_LOGGER_ERROR("X300 RADIO") << e.what();
}
}
if (num_failures == 0) {
- UHD_MSG(status) << "Extended ADC Self-Test PASSED\n";
+ UHD_LOGGER_INFO("X300 RADIO") << "Extended ADC Self-Test PASSED";
} else {
throw uhd::runtime_error(
(boost::format("Extended ADC Self-Test FAILED!!! (%d/%d failures)\n") % num_failures % num_iters).str());
@@ -647,7 +909,7 @@ double x300_radio_ctrl_impl::self_cal_adc_xfer_delay(
boost::function<void(double)> wait_for_clk_locked,
bool apply_delay)
{
- UHD_MSG(status) << "Running ADC transfer delay self-cal: " << std::flush;
+ UHD_LOGGER_INFO("X300 RADIO") << "Running ADC transfer delay self-cal: ";
//Effective resolution of the self-cal.
static const size_t NUM_DELAY_STEPS = 100;
@@ -657,7 +919,6 @@ double x300_radio_ctrl_impl::self_cal_adc_xfer_delay(
double delay_range = 2 * master_clk_period;
double delay_incr = delay_range / NUM_DELAY_STEPS;
- UHD_MSG(status) << "Measuring..." << std::flush;
double cached_clk_delay = clock->get_clock_delay(X300_CLOCK_WHICH_ADC0);
double fpga_clk_delay = clock->get_clock_delay(X300_CLOCK_WHICH_FPGA);
@@ -703,7 +964,7 @@ double x300_radio_ctrl_impl::self_cal_adc_xfer_delay(
err_code += 100; //Increment error code by 100 to indicate no lock
}
}
- //UHD_MSG(status) << (boost::format("XferDelay=%fns, Error=%d\n") % delay % err_code);
+ //UHD_LOGGER_INFO("X300 RADIO") << (boost::format("XferDelay=%fns, Error=%d") % delay % err_code);
results.push_back(std::pair<double,bool>(delay, err_code==0));
}
@@ -755,7 +1016,6 @@ double x300_radio_ctrl_impl::self_cal_adc_xfer_delay(
}
if (apply_delay) {
- UHD_MSG(status) << "Validating..." << std::flush;
//Apply delay
win_center = clock->set_clock_delay(X300_CLOCK_WHICH_ADC0, win_center); //Sets ADC0 and ADC1
wait_for_clk_locked(0.1);
@@ -773,7 +1033,7 @@ double x300_radio_ctrl_impl::self_cal_adc_xfer_delay(
radios[r]->_adc->set_test_word("normal", "normal");
radios[r]->_regs->misc_outs_reg.write(radio_regmap_t::misc_outs_reg_t::ADC_CHECKER_ENABLED, 0);
}
- UHD_MSG(status) << (boost::format(" done (FPGA->ADC=%.3fns%s, Window=%.3fns)\n") %
+ UHD_LOGGER_INFO("X300 RADIO") << (boost::format("ADC transfer delay self-cal done (FPGA->ADC=%.3fns%s, Window=%.3fns)") %
(win_center-fpga_clk_delay) % (cycle_slip?" +cyc":"") % win_length);
return win_center;
@@ -797,7 +1057,7 @@ void x300_radio_ctrl_impl::_update_atr_leds(const std::string &rx_ant, const siz
void x300_radio_ctrl_impl::_self_cal_adc_capture_delay(bool print_status)
{
- if (print_status) UHD_MSG(status) << "Running ADC capture delay self-cal..." << std::flush;
+ if (print_status) UHD_LOGGER_INFO("X300 RADIO") << "Running ADC capture delay self-cal...";
static const uint32_t NUM_DELAY_STEPS = 32; //The IDELAYE2 element has 32 steps
static const uint32_t NUM_RETRIES = 2; //Retry self-cal if it fails in warmup situations
@@ -821,8 +1081,8 @@ void x300_radio_ctrl_impl::_self_cal_adc_capture_delay(bool print_status)
//and count deviations from the expected value
_regs->misc_outs_reg.write(radio_regmap_t::misc_outs_reg_t::ADC_CHECKER_ENABLED, 0);
_regs->misc_outs_reg.write(radio_regmap_t::misc_outs_reg_t::ADC_CHECKER_ENABLED, 1);
- //10ms @ 200MHz = 2 million samples
- boost::this_thread::sleep(boost::posix_time::milliseconds(10));
+ //5ms @ 200MHz = 1 million samples
+ boost::this_thread::sleep(boost::posix_time::milliseconds(5));
if (_regs->misc_ins_reg.read(radio_regmap_t::misc_ins_reg_t::ADC_CHECKER0_I_LOCKED)) {
err_code += _regs->misc_ins_reg.get(radio_regmap_t::misc_ins_reg_t::ADC_CHECKER0_I_ERROR);
} else {
@@ -836,8 +1096,8 @@ void x300_radio_ctrl_impl::_self_cal_adc_capture_delay(bool print_status)
//and count deviations from the expected value
_regs->misc_outs_reg.write(radio_regmap_t::misc_outs_reg_t::ADC_CHECKER_ENABLED, 0);
_regs->misc_outs_reg.write(radio_regmap_t::misc_outs_reg_t::ADC_CHECKER_ENABLED, 1);
- //10ms @ 200MHz = 2 million samples
- boost::this_thread::sleep(boost::posix_time::milliseconds(10));
+ //5ms @ 200MHz = 1 million samples
+ boost::this_thread::sleep(boost::posix_time::milliseconds(5));
if (_regs->misc_ins_reg.read(radio_regmap_t::misc_ins_reg_t::ADC_CHECKER0_Q_LOCKED)) {
err_code += _regs->misc_ins_reg.get(radio_regmap_t::misc_ins_reg_t::ADC_CHECKER0_Q_ERROR);
} else {
@@ -860,7 +1120,7 @@ void x300_radio_ctrl_impl::_self_cal_adc_capture_delay(bool print_status)
}
}
}
- //UHD_MSG(status) << (boost::format("CapTap=%d, Error=%d\n") % dly_tap % err_code);
+ //UHD_LOGGER_INFO("X300 RADIO") << (boost::format("CapTap=%d, Error=%d") % dly_tap % err_code);
}
//Retry the self-cal if it fails
@@ -890,7 +1150,7 @@ void x300_radio_ctrl_impl::_self_cal_adc_capture_delay(bool print_status)
if (print_status) {
double tap_delay = (1.0e12 / _radio_clk_rate) / (2*32); //in ps
- UHD_MSG(status) << boost::format(" done (Tap=%d, Window=%d, TapDelay=%.3fps, Iter=%d)\n") % ideal_tap % (win_stop-win_start) % tap_delay % iter;
+ UHD_LOGGER_INFO("X300 RADIO") << boost::format("ADC capture delay self-cal done (Tap=%d, Window=%d, TapDelay=%.3fps, Iter=%d)") % ideal_tap % (win_stop-win_start) % tap_delay % iter;
}
}
@@ -924,7 +1184,7 @@ void x300_radio_ctrl_impl::_set_command_time(const time_spec_t &spec, const size
***************************************************************************/
bool x300_radio_ctrl_impl::check_radio_config()
{
- UHD_RFNOC_BLOCK_TRACE() << "x300_radio_ctrl_impl::check_radio_config() " << std::endl;
+ UHD_RFNOC_BLOCK_TRACE() << "x300_radio_ctrl_impl::check_radio_config() " ;
const fs_path rx_fe_path = fs_path("dboards" / _radio_slot / "rx_frontends");
for (size_t chan = 0; chan < _get_num_radios(); chan++) {
if (_tree->exists(rx_fe_path / _rx_fe_map.at(chan).db_fe_name / "enabled")) {
diff --git a/host/lib/usrp/x300/x300_radio_ctrl_impl.hpp b/host/lib/usrp/x300/x300_radio_ctrl_impl.hpp
index 417c88f9e..9e3f298d8 100644
--- a/host/lib/usrp/x300/x300_radio_ctrl_impl.hpp
+++ b/host/lib/usrp/x300/x300_radio_ctrl_impl.hpp
@@ -56,15 +56,34 @@ public:
double set_tx_frequency(const double freq, const size_t chan);
double set_rx_frequency(const double freq, const size_t chan);
+ double set_rx_bandwidth(const double bandwidth, const size_t chan);
double get_tx_frequency(const size_t chan);
double get_rx_frequency(const size_t chan);
+ double get_rx_bandwidth(const size_t chan);
double set_tx_gain(const double gain, const size_t chan);
double set_rx_gain(const double gain, const size_t chan);
+ std::vector<std::string> get_rx_lo_names(const size_t chan);
+ std::vector<std::string> get_rx_lo_sources(const std::string &name, const size_t chan);
+ freq_range_t get_rx_lo_freq_range(const std::string &name, const size_t chan);
+
+ void set_rx_lo_source(const std::string &src, const std::string &name, const size_t chan);
+ const std::string get_rx_lo_source(const std::string &name, const size_t chan);
+
+ void set_rx_lo_export_enabled(bool enabled, const std::string &name, const size_t chan);
+ bool get_rx_lo_export_enabled(const std::string &name, const size_t chan);
+
+ double set_rx_lo_freq(double freq, const std::string &name, const size_t chan);
+ double get_rx_lo_freq(const std::string &name, const size_t chan);
+
size_t get_chan_from_dboard_fe(const std::string &fe, const direction_t dir);
std::string get_dboard_fe_from_chan(const size_t chan, const direction_t dir);
+ std::vector<std::string> get_gpio_banks() const;
+ void set_gpio_attr(const std::string &bank, const std::string &attr, const uint32_t value, const uint32_t mask);
+ uint32_t get_gpio_attr(const std::string &bank, const std::string &attr);
+
double get_output_samp_rate(size_t port);
/************************************************************************
diff --git a/host/lib/usrp_clock/multi_usrp_clock.cpp b/host/lib/usrp_clock/multi_usrp_clock.cpp
index 71521190e..bcc0b57d6 100644
--- a/host/lib/usrp_clock/multi_usrp_clock.cpp
+++ b/host/lib/usrp_clock/multi_usrp_clock.cpp
@@ -19,11 +19,10 @@
#include <uhd/usrp_clock/multi_usrp_clock.hpp>
#include <uhd/usrp_clock/octoclock_eeprom.hpp>
-#include <uhd/utils/msg.hpp>
+
#include <uhd/exception.hpp>
#include <uhd/utils/log.hpp>
#include <boost/assign/list_of.hpp>
-#include <boost/foreach.hpp>
#include <boost/format.hpp>
using namespace uhd;
@@ -92,6 +91,6 @@ multi_usrp_clock::~multi_usrp_clock(void){
* Multi USRP Clock factory function
**********************************************************************/
multi_usrp_clock::sptr multi_usrp_clock::make(const device_addr_t &dev_addr){
- UHD_LOG << "multi_usrp_clock::make with args " << dev_addr.to_pp_string() << std::endl;
+ UHD_LOGGER_TRACE("OCTOCLOCK") << "multi_usrp_clock::make with args " << dev_addr.to_pp_string() ;
return sptr(new multi_usrp_clock_impl(dev_addr));
}
diff --git a/host/lib/usrp_clock/octoclock/octoclock_eeprom.cpp b/host/lib/usrp_clock/octoclock/octoclock_eeprom.cpp
index b0d24deec..d18f94278 100644
--- a/host/lib/usrp_clock/octoclock/octoclock_eeprom.cpp
+++ b/host/lib/usrp_clock/octoclock/octoclock_eeprom.cpp
@@ -25,7 +25,6 @@
#include <boost/assign/list_of.hpp>
#include <boost/asio.hpp>
#include <boost/lexical_cast.hpp>
-#include <boost/foreach.hpp>
#include <iostream>
diff --git a/host/lib/usrp_clock/octoclock/octoclock_image_loader.cpp b/host/lib/usrp_clock/octoclock/octoclock_image_loader.cpp
index d3502113e..f317106a3 100644
--- a/host/lib/usrp_clock/octoclock/octoclock_image_loader.cpp
+++ b/host/lib/usrp_clock/octoclock/octoclock_image_loader.cpp
@@ -140,7 +140,7 @@ static void octoclock_setup_session(octoclock_session_t &session,
std::string err_msg = "Could not resolve given args to a single OctoClock device.\n"
"Applicable devices:\n";
- BOOST_FOREACH(const uhd::device_addr_t &dev, devs){
+ for(const uhd::device_addr_t &dev: devs){
std::string name = (dev["type"] == "octoclock") ? str(boost::format("OctoClock r%d")
% dev.get("revision","4"))
: "OctoClock Bootloader";
diff --git a/host/lib/usrp_clock/octoclock/octoclock_impl.cpp b/host/lib/usrp_clock/octoclock/octoclock_impl.cpp
index 4da9db19f..4f2f44add 100644
--- a/host/lib/usrp_clock/octoclock/octoclock_impl.cpp
+++ b/host/lib/usrp_clock/octoclock/octoclock_impl.cpp
@@ -21,7 +21,6 @@
#include <boost/assign.hpp>
#include <stdint.h>
#include <boost/filesystem.hpp>
-#include <boost/foreach.hpp>
#include <boost/format.hpp>
#include <boost/thread.hpp>
@@ -34,7 +33,7 @@
#include <uhd/usrp_clock/octoclock_eeprom.hpp>
#include <uhd/utils/byteswap.hpp>
#include <uhd/utils/paths.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/utils/paths.hpp>
#include <uhd/utils/static.hpp>
@@ -57,7 +56,7 @@ device_addrs_t octoclock_find(const device_addr_t &hint){
if (hints.size() > 1){
device_addrs_t found_devices;
std::string error_msg;
- BOOST_FOREACH(const device_addr_t &hint_i, hints){
+ for(const device_addr_t &hint_i: hints){
device_addrs_t found_devices_i = octoclock_find(hint_i);
if (found_devices_i.size() != 1) error_msg += str(boost::format(
"Could not resolve device hint \"%s\" to a single device."
@@ -84,7 +83,7 @@ device_addrs_t octoclock_find(const device_addr_t &hint){
//If no address was specified, send a broadcast on each interface
if (not _hint.has_key("addr")){
- BOOST_FOREACH(const if_addrs_t &if_addrs, get_if_addrs()){
+ for(const if_addrs_t &if_addrs: get_if_addrs()){
//avoid the loopback device
if (if_addrs.inet == asio::ip::address_v4::loopback().to_string()) continue;
@@ -118,10 +117,10 @@ device_addrs_t octoclock_find(const device_addr_t &hint){
udp_transport->send(boost::asio::buffer(&pkt_out, sizeof(pkt_out)));
}
catch(const std::exception &ex){
- UHD_MSG(error) << "OctoClock network discovery error - " << ex.what() << std::endl;
+ UHD_LOGGER_ERROR("OCTOCLOCK") << "OctoClock network discovery error - " << ex.what() ;
}
catch(...){
- UHD_MSG(error) << "OctoClock network discovery unknown error" << std::endl;
+ UHD_LOGGER_ERROR("OCTOCLOCK") << "OctoClock network discovery unknown error" ;
}
uint8_t octoclock_data[udp_simple::mtu];
@@ -187,7 +186,7 @@ UHD_STATIC_BLOCK(register_octoclock_device){
* Structors
**********************************************************************/
octoclock_impl::octoclock_impl(const device_addr_t &_device_addr){
- UHD_MSG(status) << "Opening an OctoClock device..." << std::endl;
+ UHD_LOGGER_INFO("OCTOCLOCK") << "Opening an OctoClock device...";
_type = device::CLOCK;
device_addrs_t device_args = separate_device_addr(_device_addr);
// To avoid replicating sequence numbers between sessions
@@ -265,9 +264,9 @@ octoclock_impl::octoclock_impl(const device_addr_t &_device_addr){
std::string asterisk = (device_args.size() > 1) ? " * " : "";
if(device_args.size() > 1){
- UHD_MSG(status) << std::endl << "Checking status of " << addr << ":" << std::endl;
+ UHD_LOGGER_INFO("OCTOCLOCK") << "Checking status of " << addr;
}
- UHD_MSG(status) << boost::format("%sDetecting internal GPSDO...") % asterisk << std::flush;
+ UHD_LOGGER_INFO("OCTOCLOCK") << boost::format("%sDetecting internal GPSDO...") % asterisk;
_get_state(oc);
if(_oc_dict[oc].state.gps_detected){
@@ -275,7 +274,7 @@ octoclock_impl::octoclock_impl(const device_addr_t &_device_addr){
_oc_dict[oc].gps = gps_ctrl::make(octoclock_make_uart_iface(_oc_dict[oc].gpsdo_xport, _proto_ver));
if(_oc_dict[oc].gps and _oc_dict[oc].gps->gps_detected()){
- BOOST_FOREACH(const std::string &name, _oc_dict[oc].gps->get_sensors()){
+ for(const std::string &name: _oc_dict[oc].gps->get_sensors()){
_tree->create<sensor_value_t>(oc_path / "sensors" / name)
.set_publisher(boost::bind(&gps_ctrl::get_sensor, _oc_dict[oc].gps, name));
}
@@ -283,26 +282,23 @@ octoclock_impl::octoclock_impl(const device_addr_t &_device_addr){
else{
//If GPSDO communication failed, set gps_detected to false
_oc_dict[oc].state.gps_detected = 0;
- UHD_MSG(warning) << "Device reports that it has a GPSDO, but we cannot communicate with it." << std::endl;
- std::cout << std::endl;
+ UHD_LOGGER_WARNING("OCTOCLOCK") << "Device reports that it has a GPSDO, but we cannot communicate with it.";
}
}
catch(std::exception &e){
- UHD_MSG(error) << "An error occurred making GPSDO control: " << e.what() << std::endl;
+ UHD_LOGGER_ERROR("OCTOCLOCK") << "An error occurred making GPSDO control: " << e.what();
}
}
- else UHD_MSG(status) << "No GPSDO found" << std::endl;
- UHD_MSG(status) << boost::format("%sDetecting external reference...%s") % asterisk
- % _ext_ref_detected(oc).value
- << std::endl;
- UHD_MSG(status) << boost::format("%sDetecting switch position...%s") % asterisk
- % _switch_pos(oc).value
- << std::endl;
+ else UHD_LOGGER_INFO("OCTOCLOCK") << "No GPSDO found";
+ UHD_LOGGER_INFO("OCTOCLOCK") << boost::format("%sDetecting external reference...%s") % asterisk
+ % _ext_ref_detected(oc).value;
+ UHD_LOGGER_INFO("OCTOCLOCK") << boost::format("%sDetecting switch position...%s") % asterisk
+ % _switch_pos(oc).value;
std::string ref = _which_ref(oc).value;
- if(ref == "none") UHD_MSG(status) << boost::format("%sDevice is not using any reference") % asterisk << std::endl;
- else UHD_MSG(status) << boost::format("%sDevice is using %s reference") % asterisk
+ if(ref == "none") UHD_LOGGER_INFO("OCTOCLOCK") << boost::format("%sDevice is not using any reference") % asterisk;
+ else UHD_LOGGER_INFO("OCTOCLOCK") << boost::format("%sDevice is using %s reference") % asterisk
% _which_ref(oc).value
- << std::endl;
+ ;
}
}
@@ -324,7 +320,7 @@ void octoclock_impl::_set_eeprom(const std::string &oc, const octoclock_eeprom_t
* what it currently has in the EEPROM, so store the relevant values
* from the user's input and send that instead.
*/
- BOOST_FOREACH(const std::string &key, oc_eeprom.keys()){
+ for(const std::string &key: oc_eeprom.keys()){
if(_oc_dict[oc].eeprom.has_key(key)) _oc_dict[oc].eeprom[key] = oc_eeprom[key];
}
_oc_dict[oc].eeprom.commit();
diff --git a/host/lib/usrp_clock/usrp_clock_c.cpp b/host/lib/usrp_clock/usrp_clock_c.cpp
index 220112f37..274eba5d1 100644
--- a/host/lib/usrp_clock/usrp_clock_c.cpp
+++ b/host/lib/usrp_clock/usrp_clock_c.cpp
@@ -22,7 +22,6 @@
#include <uhd/usrp_clock/usrp_clock.h>
-#include <boost/foreach.hpp>
#include <boost/thread/mutex.hpp>
#include <string.h>
@@ -64,7 +63,7 @@ uhd_error uhd_usrp_clock_find(
uhd::device_addrs_t devs = uhd::device::find(std::string(args), uhd::device::CLOCK);
devices_out->string_vector_cpp.clear();
- BOOST_FOREACH(const uhd::device_addr_t &dev, devs){
+ for(const uhd::device_addr_t &dev: devs){
devices_out->string_vector_cpp.push_back(dev.to_string());
}
)
diff --git a/host/lib/utils/CMakeLists.txt b/host/lib/utils/CMakeLists.txt
index 128d7c00a..f76c5763a 100644
--- a/host/lib/utils/CMakeLists.txt
+++ b/host/lib/utils/CMakeLists.txt
@@ -142,7 +142,6 @@ LIBUHD_APPEND_SOURCES(
${CMAKE_CURRENT_SOURCE_DIR}/ihex.cpp
${CMAKE_CURRENT_SOURCE_DIR}/load_modules.cpp
${CMAKE_CURRENT_SOURCE_DIR}/log.cpp
- ${CMAKE_CURRENT_SOURCE_DIR}/msg.cpp
${CMAKE_CURRENT_SOURCE_DIR}/paths.cpp
${CMAKE_CURRENT_SOURCE_DIR}/platform.cpp
${CMAKE_CURRENT_SOURCE_DIR}/static.cpp
diff --git a/host/lib/utils/csv.cpp b/host/lib/utils/csv.cpp
index 2ffa70196..e0cadcb96 100644
--- a/host/lib/utils/csv.cpp
+++ b/host/lib/utils/csv.cpp
@@ -16,7 +16,6 @@
//
#include <uhd/utils/csv.hpp>
-#include <boost/foreach.hpp>
using namespace uhd;
@@ -29,7 +28,7 @@ csv::rows_type csv::to_rows(std::istream &input){
bool in_quote = false;
char last_ch, next_ch = ' ';
//for each character in the line
- BOOST_FOREACH(char ch, line){
+ for(char ch: line){
last_ch = next_ch;
next_ch = ch;
//catch a quote character and change the state
diff --git a/host/lib/utils/gain_group.cpp b/host/lib/utils/gain_group.cpp
index 71caf33be..1a6c0407f 100644
--- a/host/lib/utils/gain_group.cpp
+++ b/host/lib/utils/gain_group.cpp
@@ -20,7 +20,6 @@
#include <uhd/types/dict.hpp>
#include <uhd/utils/algorithm.hpp>
#include <uhd/exception.hpp>
-#include <boost/foreach.hpp>
#include <boost/bind.hpp>
#include <algorithm>
#include <vector>
@@ -73,7 +72,7 @@ public:
if (not name.empty()) return _name_to_fcns.get(name).get_range();
double overall_min = 0, overall_max = 0, overall_step = 0;
- BOOST_FOREACH(const gain_fcns_t &fcns, get_all_fcns()){
+ for(const gain_fcns_t &fcns: get_all_fcns()){
const gain_range_t range = fcns.get_range();
overall_min += range.start();
overall_max += range.stop();
@@ -88,7 +87,7 @@ public:
if (not name.empty()) return _name_to_fcns.get(name).get_value();
double overall_gain = 0;
- BOOST_FOREACH(const gain_fcns_t &fcns, get_all_fcns()){
+ for(const gain_fcns_t &fcns: get_all_fcns()){
overall_gain += fcns.get_value();
}
return overall_gain;
@@ -102,7 +101,7 @@ public:
//get the max step size among the gains
double max_step = 0;
- BOOST_FOREACH(const gain_fcns_t &fcns, all_fcns){
+ for(const gain_fcns_t &fcns: all_fcns){
max_step = std::max(max_step, fcns.get_range().step());
}
@@ -111,7 +110,7 @@ public:
//distribute power according to priority (round to max step)
double gain_left_to_distribute = gain;
- BOOST_FOREACH(const gain_fcns_t &fcns, all_fcns){
+ for(const gain_fcns_t &fcns: all_fcns){
const gain_range_t range = fcns.get_range();
gain_bucket.push_back(floor_step(uhd::clip(
gain_left_to_distribute, range.start(), range.stop()
@@ -135,7 +134,7 @@ public:
//distribute the remainder (less than max step)
//fill in the largest step sizes first that are less than the remainder
- BOOST_FOREACH(size_t i, indexes_step_size_dec){
+ for(size_t i: indexes_step_size_dec){
const gain_range_t range = all_fcns.at(i).get_range();
double additional_gain = floor_step(uhd::clip(
gain_bucket.at(i) + gain_left_to_distribute, range.start(), range.stop()
@@ -143,11 +142,11 @@ public:
gain_bucket.at(i) += additional_gain;
gain_left_to_distribute -= additional_gain;
}
- UHD_LOGV(often) << "gain_left_to_distribute " << gain_left_to_distribute << std::endl;
+ UHD_LOGGER_DEBUG("UHD") << "gain_left_to_distribute " << gain_left_to_distribute ;
//now write the bucket out to the individual gain values
for (size_t i = 0; i < gain_bucket.size(); i++){
- UHD_LOGV(often) << i << ": " << gain_bucket.at(i) << std::endl;
+ UHD_LOGGER_DEBUG("UHD") << i << ": " << gain_bucket.at(i) ;
all_fcns.at(i).set_value(gain_bucket.at(i));
}
}
@@ -173,7 +172,7 @@ private:
//! get the gain function sets in order (highest priority first)
std::vector<gain_fcns_t> get_all_fcns(void){
std::vector<gain_fcns_t> all_fcns;
- BOOST_FOREACH(size_t key, uhd::sorted(_registry.keys())){
+ for(size_t key: uhd::sorted(_registry.keys())){
const std::vector<gain_fcns_t> &fcns = _registry[key];
all_fcns.insert(all_fcns.begin(), fcns.begin(), fcns.end());
}
diff --git a/host/lib/utils/load_modules.cpp b/host/lib/utils/load_modules.cpp
index aba3adeed..3ef3c418c 100644
--- a/host/lib/utils/load_modules.cpp
+++ b/host/lib/utils/load_modules.cpp
@@ -19,7 +19,6 @@
#include <uhd/utils/static.hpp>
#include <uhd/exception.hpp>
#include <boost/format.hpp>
-#include <boost/foreach.hpp>
#include <boost/filesystem.hpp>
#include <iostream>
#include <string>
@@ -102,7 +101,7 @@ static void load_module_path(const fs::path &path){
* Load all the modules given in the module paths.
*/
UHD_STATIC_BLOCK(load_modules){
- BOOST_FOREACH(const fs::path &path, uhd::get_module_paths()){
+ for(const fs::path &path: uhd::get_module_paths()){
load_module_path(path);
}
}
diff --git a/host/lib/utils/log.cpp b/host/lib/utils/log.cpp
index 4e58ce894..6a694ed8e 100644
--- a/host/lib/utils/log.cpp
+++ b/host/lib/utils/log.cpp
@@ -16,150 +16,455 @@
//
#include <uhd/utils/log.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log_add.hpp>
#include <uhd/utils/static.hpp>
#include <uhd/utils/paths.hpp>
-#include <boost/filesystem.hpp>
-#include <boost/format.hpp>
-#include <boost/thread/mutex.hpp>
+#include <uhd/transport/bounded_buffer.hpp>
+#include <uhd/version.hpp>
#include <boost/date_time/posix_time/posix_time.hpp>
-#include <boost/thread/locks.hpp>
-#include <boost/interprocess/sync/file_lock.hpp>
+#include <boost/make_shared.hpp>
#include <fstream>
#include <cctype>
+#include <atomic>
+#include <mutex>
+#include <memory>
+#include <thread>
-namespace fs = boost::filesystem;
namespace pt = boost::posix_time;
-namespace ip = boost::interprocess;
+
+// Don't make these static const std::string -- we need their lifetime guaranteed!
+#define PURPLE "\033[35;1m" // purple
+#define BLUE "\033[34;1m" // blue
+#define GREEN "\033[32;1m" // green
+#define YELLOW "\033[33;1m" // yellow
+#define RED "\033[31;0m" // red
+#define BRED "\033[31;1m" // bright red
+#define RESET_COLORS "\033[39;0m" // reset colors
/***********************************************************************
- * Global resources for the logger
+ * Helpers
**********************************************************************/
-class log_resource_type{
+static std::string verbosity_color(const uhd::log::severity_level &level){
+ switch(level){
+ case (uhd::log::trace):
+ return PURPLE;
+ case(uhd::log::debug):
+ return BLUE;
+ case(uhd::log::info):
+ return GREEN;
+ case(uhd::log::warning):
+ return YELLOW;
+ case(uhd::log::error):
+ return RED;
+ case(uhd::log::fatal):
+ return BRED;
+ default:
+ return RESET_COLORS;
+ }
+}
+
+static std::string verbosity_name(const uhd::log::severity_level &level){
+ switch(level){
+ case (uhd::log::trace):
+ return "TRACE";
+ case(uhd::log::debug):
+ return "DEBUG";
+ case(uhd::log::info):
+ return "INFO";
+ case(uhd::log::warning):
+ return "WARNING";
+ case(uhd::log::error):
+ return "ERROR";
+ case(uhd::log::fatal):
+ return "FATAL";
+ default:
+ return "-";
+ }
+ return "";
+}
+
+//! get the relative file path from the host directory
+inline std::string path_to_filename(std::string path)
+{
+ return path.substr(path.find_last_of("/\\") + 1);
+}
+
+/***********************************************************************
+ * Logger backends
+ **********************************************************************/
+void console_log(
+ const uhd::log::logging_info &log_info
+) {
+
+ std::clog
+#ifdef UHD_LOG_CONSOLE_COLOR
+ << verbosity_color(log_info.verbosity)
+#endif
+#ifdef UHD_LOG_CONSOLE_TIME
+ << "[" << pt::to_simple_string(log_info.time) << "] "
+#endif
+#ifdef UHD_LOG_CONSOLE_THREAD
+ << "[0x" << log_info.thread_id << "] "
+#endif
+#ifdef UHD_LOG_CONSOLE_SRC
+ << "[" << path_to_filename(log_info.file) << ":" << log_info.line << "] "
+#endif
+ << "[" << verbosity_name(log_info.verbosity) << "] "
+ << "[" << log_info.component << "] "
+#ifdef UHD_LOG_CONSOLE_COLOR
+ << RESET_COLORS
+#endif
+ << log_info.message
+ << std::endl
+ ;
+}
+
+/*! Helper class to implement file logging
+ *
+ * The class holds references to the file stream object, and handles closing
+ * and cleanup.
+ */
+class file_logger_backend
+{
public:
- uhd::_log::verbosity_t level;
+ file_logger_backend(const std::string &file_path)
+ {
+ _file_stream.exceptions(std::ofstream::failbit | std::ofstream::badbit);
+ if (!file_path.empty()){
+ try {
+ _file_stream.open(file_path.c_str(), std::fstream::out | std::fstream::app);
+ } catch (const std::ofstream::failure& fail){
+ std::cerr << "Error opening log file: " << fail.what() << std::endl;
+ }
+ }
+ }
+
+ void log(const uhd::log::logging_info &log_info)
+ {
+ if (_file_stream.is_open()){
+ _file_stream
+ << pt::to_simple_string(log_info.time) << ","
+ << "0x" << log_info.thread_id << ","
+ << path_to_filename(log_info.file) << ":" << log_info.line << ","
+ << log_info.verbosity << ","
+ << log_info.component << ","
+ << log_info.message
+ << std::endl;
+ ;
+ }
+ }
- log_resource_type(void){
- //file lock pointer must be null
- _file_lock = NULL;
+ ~file_logger_backend()
+ {
+ if (_file_stream.is_open()){
+ _file_stream.close();
+ }
+ }
- //set the default log level
- level = uhd::_log::never;
+private:
+ std::ofstream _file_stream;
+};
- //allow override from macro definition
- #ifdef UHD_LOG_LEVEL
- _set_log_level(BOOST_STRINGIZE(UHD_LOG_LEVEL));
- #endif
+/***********************************************************************
+ * Global resources for the logger
+ **********************************************************************/
+
+#define UHD_CONSOLE_LOGGER_KEY "console"
+#define UHD_FILE_LOGGER_KEY "file"
+
+class log_resource {
+public:
+ uhd::log::severity_level global_level;
+ std::map<std::string, uhd::log::severity_level> logger_level;
- //allow override from environment variable
+ log_resource(void):
+ global_level(uhd::log::off),
+ _exit(false),
+#ifndef UHD_LOG_FASTPATH_DISABLE
+ _fastpath_queue(10),
+#endif
+ _log_queue(10)
+ {
+ //allow override from macro definition
+#ifdef UHD_LOG_MIN_LEVEL
+ this->global_level = _get_log_level(BOOST_STRINGIZE(UHD_LOG_MIN_LEVEL), this->global_level);
+#endif
+ //allow override from environment variables
const char * log_level_env = std::getenv("UHD_LOG_LEVEL");
- if (log_level_env != NULL) _set_log_level(log_level_env);
+ if (log_level_env != NULL && log_level_env[0] != '\0') {
+ this->global_level =
+ _get_log_level(log_level_env, this->global_level);
+ }
+
+
+ /***** Console logging ***********************************************/
+#ifndef UHD_LOG_CONSOLE_DISABLE
+ uhd::log::severity_level console_level = uhd::log::trace;
+#ifdef UHD_LOG_CONSOLE_LEVEL
+ console_level = _get_log_level(BOOST_STRINGIZE(UHD_LOG_CONSOLE_LEVEL), console_level);
+#endif
+ const char * log_console_level_env = std::getenv("UHD_LOG_CONSOLE_LEVEL");
+ if (log_console_level_env != NULL && log_console_level_env[0] != '\0') {
+ console_level =
+ _get_log_level(log_console_level_env, console_level);
+ }
+ logger_level[UHD_CONSOLE_LOGGER_KEY] = console_level;
+ _loggers[UHD_CONSOLE_LOGGER_KEY] = &console_log;
+#endif
+
+ /***** File logging **************************************************/
+ uhd::log::severity_level file_level = uhd::log::trace;
+ std::string log_file_target;
+#if defined(UHD_LOG_FILE_LEVEL) && defined(UHD_LOG_FILE_PATH)
+ file_level = _get_log_level(BOOST_STRINGIZE(UHD_LOG_FILE_LEVEL), file_level);
+ log_file_target = BOOST_STRINGIZE(UHD_LOG_FILE);
+#endif
+ const char * log_file_level_env = std::getenv("UHD_LOG_FILE_LEVEL");
+ if (log_file_level_env != NULL && log_file_level_env[0] != '\0'){
+ file_level = _get_log_level(log_file_level_env, file_level);
+ }
+ const char* log_file_env = std::getenv("UHD_LOG_FILE");
+ if ((log_file_env != NULL) && (log_file_env[0] != '\0')) {
+ log_file_target = std::string(log_file_env);
+ }
+ if (!log_file_target.empty()){
+ logger_level[UHD_FILE_LOGGER_KEY] = file_level;
+ auto F = boost::make_shared<file_logger_backend>(log_file_target);
+ _loggers[UHD_FILE_LOGGER_KEY] = [F](const uhd::log::logging_info& log_info){F->log(log_info);};
+ }
+ std::ostringstream sys_info;
+ sys_info \
+ << "UHD" \
+ << BOOST_PLATFORM << "; "
+ << BOOST_COMPILER << "; "
+ << "Boost_"
+ << BOOST_VERSION << "; "
+ << "UHD_" << uhd::get_version_string();
+ _log_queue.push_with_timed_wait(
+ uhd::log::logging_info(
+ pt::microsec_clock::local_time(),
+ uhd::log::info,
+ __FILE__,
+ __LINE__,
+ sys_info.str(),
+ boost::this_thread::get_id()),
+ 0.25);
+
+ // Launch log message consumer
+ _pop_task = std::make_shared<std::thread>(std::thread([this](){this->pop_task();}));
+ _pop_fastpath_task = std::make_shared<std::thread>(std::thread([this](){this->pop_fastpath_task();}));
}
- ~log_resource_type(void){
- boost::lock_guard<boost::mutex> lock(_mutex);
- _file_stream.close();
- if (_file_lock != NULL) delete _file_lock;
+ ~log_resource(void){
+ _exit = true;
+ // We push a final message to kick the pop task out of it's wait state.
+ // This wouldn't be necessary if pop_with_wait() could fail. Should
+ // that ever get fixed, we can remove this.
+ auto final_message = uhd::log::logging_info(
+ pt::microsec_clock::local_time(),
+ uhd::log::trace,
+ __FILE__,
+ __LINE__,
+ "LOGGING",
+ boost::this_thread::get_id()
+ );
+ final_message.message = "Terminating logger.";
+ push(final_message);
+#ifndef UHD_LOG_FASTPATH_DISABLE
+ push_fastpath("");
+#endif
+ _pop_task->join();
+ {
+ std::lock_guard<std::mutex> l(_logmap_mutex);
+ _loggers.clear();
+ }
+ _pop_task.reset();
+#ifndef UHD_LOG_FASTPATH_DISABLE
+ _pop_fastpath_task->join();
+ _pop_fastpath_task.reset();
+#endif
}
- void log_to_file(const std::string &log_msg){
- boost::lock_guard<boost::mutex> lock(_mutex);
- if (_file_lock == NULL){
- const std::string log_path = (fs::path(uhd::get_tmp_path()) / "uhd.log").string();
- _file_stream.open(log_path.c_str(), std::fstream::out | std::fstream::app);
- _file_lock = new ip::file_lock(log_path.c_str());
+ void push(const uhd::log::logging_info& log_info)
+ {
+ static const double PUSH_TIMEOUT = 0.25; // seconds
+ _log_queue.push_with_timed_wait(log_info, PUSH_TIMEOUT);
+ }
+
+ void push_fastpath(const std::string &message)
+ {
+ // Never wait. If the buffer is full, we just don't see the message.
+ // Too bad.
+#ifndef UHD_LOG_FASTPATH_DISABLE
+ _fastpath_queue.push_with_haste(message);
+#endif
+ }
+
+ void pop_task()
+ {
+ uhd::log::logging_info log_info;
+ log_info.message = "";
+
+ while (!_exit) {
+ _log_queue.pop_with_wait(log_info);
+ {
+ std::lock_guard<std::mutex> l(_logmap_mutex);
+ for (const auto &logger : _loggers) {
+ auto level = logger_level.find(logger.first);
+ if(level != logger_level.end() && log_info.verbosity < level->second){
+ continue;
+ }
+ logger.second(log_info);
+ }
+ }
+ }
+
+ // Exit procedure: Clear the queue
+ while (_log_queue.pop_with_haste(log_info)) {
+ std::lock_guard<std::mutex> l(_logmap_mutex);
+ for (const auto &logger : _loggers) {
+ auto level = logger_level.find(logger.first);
+ if (level != logger_level.end() && log_info.verbosity < level->second){
+ continue;
+ }
+ logger.second(log_info);
+ }
}
- _file_lock->lock();
- _file_stream << log_msg << std::flush;
- _file_lock->unlock();
+ }
+
+ void pop_fastpath_task()
+ {
+#ifndef UHD_LOG_FASTPATH_DISABLE
+ while (!_exit) {
+ std::string msg;
+ _fastpath_queue.pop_with_wait(msg);
+ {
+ std::cerr << msg << std::flush;
+ }
+ }
+
+ // Exit procedure: Clear the queue
+ std::string msg;
+ while (_fastpath_queue.pop_with_haste(msg)) {
+ std::cerr << msg << std::flush;
+ }
+#endif
+ }
+
+
+ void add_logger(const std::string &key, uhd::log::log_fn_t logger_fn)
+ {
+ std::lock_guard<std::mutex> l(_logmap_mutex);
+ _loggers[key] = logger_fn;
}
private:
- //! set the log level from a string that is either a digit or an enum name
- void _set_log_level(const std::string &log_level_str){
- const uhd::_log::verbosity_t log_level_num = uhd::_log::verbosity_t(log_level_str[0]-'0');
- if (std::isdigit(log_level_str[0]) and log_level_num >= uhd::_log::always and log_level_num <= uhd::_log::never){
- this->level = log_level_num;
- return;
+ std::shared_ptr<std::thread> _pop_task;
+#ifndef UHD_LOG_FASTPATH_DISABLE
+ std::shared_ptr<std::thread> _pop_fastpath_task;
+#endif
+ uhd::log::severity_level _get_log_level(const std::string &log_level_str,
+ const uhd::log::severity_level &previous_level){
+ if (std::isdigit(log_level_str[0])) {
+ const uhd::log::severity_level log_level_num =
+ uhd::log::severity_level(std::stoi(log_level_str));
+ if (log_level_num >= uhd::log::trace and
+ log_level_num <= uhd::log::fatal) {
+ return log_level_num;
+ }else{
+ UHD_LOGGER_ERROR("LOG") << "Failed to set log level to: " << log_level_str;
+ return previous_level;
+ }
}
- #define if_lls_equal(name) else if(log_level_str == #name) this->level = uhd::_log::name
- if_lls_equal(always);
- if_lls_equal(often);
- if_lls_equal(regularly);
- if_lls_equal(rarely);
- if_lls_equal(very_rarely);
- if_lls_equal(never);
+
+#define if_loglevel_equal(name) \
+ else if (log_level_str == #name) return uhd::log::name
+ if_loglevel_equal(trace);
+ if_loglevel_equal(debug);
+ if_loglevel_equal(info);
+ if_loglevel_equal(warning);
+ if_loglevel_equal(error);
+ if_loglevel_equal(fatal);
+ if_loglevel_equal(off);
+ return previous_level;
}
- //file stream and lock:
- std::ofstream _file_stream;
- ip::file_lock *_file_lock;
- boost::mutex _mutex;
+ std::mutex _logmap_mutex;
+ std::atomic<bool> _exit;
+ std::map<std::string, uhd::log::log_fn_t> _loggers;
+#ifndef UHD_LOG_FASTPATH_DISABLE
+ uhd::transport::bounded_buffer<std::string> _fastpath_queue;
+#endif
+ uhd::transport::bounded_buffer<uhd::log::logging_info> _log_queue;
};
-UHD_SINGLETON_FCN(log_resource_type, log_rs);
+UHD_SINGLETON_FCN(log_resource, log_rs);
/***********************************************************************
* The logger object implementation
**********************************************************************/
-//! get the relative file path from the host directory
-static std::string get_rel_file_path(const fs::path &file){
- fs::path abs_path = file.parent_path();
- fs::path rel_path = file.leaf();
- while (not abs_path.empty() and abs_path.leaf() != "host"){
- rel_path = abs_path.leaf() / rel_path;
- abs_path = abs_path.parent_path();
- }
- return rel_path.string();
-}
-
-
uhd::_log::log::log(
- const verbosity_t verbosity,
+ const uhd::log::severity_level verbosity,
const std::string &file,
const unsigned int line,
- const std::string &function
- )
+ const std::string &component,
+ const boost::thread::id thread_id
+ ) :
+ _log_it(verbosity >= log_rs().global_level)
{
- _log_it = (verbosity >= log_rs().level);
- if (_log_it)
- {
- const std::string time = pt::to_simple_string(pt::microsec_clock::local_time());
- const std::string header1 = str(boost::format("-- %s - level %d") % time % int(verbosity));
- const std::string header2 = str(boost::format("-- %s") % function).substr(0, 80);
- const std::string header3 = str(boost::format("-- %s:%u") % get_rel_file_path(file) % line);
- const std::string border = std::string(std::max(std::max(header1.size(), header2.size()), header3.size()), '-');
- _ss << std::endl
- << border << std::endl
- << header1 << std::endl
- << header2 << std::endl
- << header3 << std::endl
- << border << std::endl
- ;
- }
+ if (_log_it){
+ this->_log_info = uhd::log::logging_info(
+ pt::microsec_clock::local_time(),
+ verbosity,
+ file,
+ line,
+ component,
+ thread_id);
+ }
}
uhd::_log::log::~log(void)
{
- if (not _log_it)
- return;
-
- _ss << std::endl;
- try{
- log_rs().log_to_file(_ss.str());
- }
- catch(const std::exception &e){
- /*!
- * Critical behavior below.
- * The following steps must happen in order to avoid a lock-up condition.
- * This is because the message facility will call into the logging facility.
- * Therefore we must disable the logger (level = never) before messaging.
- */
- log_rs().level = never;
- UHD_MSG(error)
- << "Logging failed: " << e.what() << std::endl
- << "Logging has been disabled for this process" << std::endl
- ;
+ if (_log_it) {
+ this->_log_info.message = _ss.str();
+ log_rs().push(this->_log_info);
}
}
+
+void uhd::_log::log_fastpath(const std::string &msg)
+{
+#ifndef UHD_LOG_FASTPATH_DISABLE
+ log_rs().push_fastpath(msg);
+#endif
+}
+
+/***********************************************************************
+ * Public API calls
+ **********************************************************************/
+void
+uhd::log::add_logger(const std::string &key, log_fn_t logger_fn)
+{
+ log_rs().add_logger(key, logger_fn);
+}
+
+void
+uhd::log::set_log_level(uhd::log::severity_level level){
+ log_rs().global_level = level;
+}
+
+void
+uhd::log::set_logger_level(const std::string &key, uhd::log::severity_level level){
+ log_rs().logger_level[key] = level;
+}
+
+void
+uhd::log::set_console_level(uhd::log::severity_level level){
+ set_logger_level(UHD_CONSOLE_LOGGER_KEY, level);
+}
+
+void
+uhd::log::set_file_level(uhd::log::severity_level level){
+ set_logger_level(UHD_FILE_LOGGER_KEY, level);
+}
+
diff --git a/host/lib/utils/msg.cpp b/host/lib/utils/msg.cpp
deleted file mode 100644
index 95879a116..000000000
--- a/host/lib/utils/msg.cpp
+++ /dev/null
@@ -1,130 +0,0 @@
-//
-// Copyright 2011 Ettus Research LLC
-//
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU General Public License for more details.
-//
-// You should have received a copy of the GNU General Public License
-// along with this program. If not, see <http://www.gnu.org/licenses/>.
-//
-
-#include <uhd/utils/msg.hpp>
-#include <uhd/utils/log.hpp>
-#include <uhd/utils/static.hpp>
-#include <boost/thread/mutex.hpp>
-#include <boost/foreach.hpp>
-#include <boost/tokenizer.hpp>
-#include <sstream>
-#include <iostream>
-
-/***********************************************************************
- * Helper functions
- **********************************************************************/
-#define tokenizer(inp, sep) \
- boost::tokenizer<boost::char_separator<char> > \
- (inp, boost::char_separator<char>(sep))
-
-static void msg_to_cout(const std::string &msg){
- std::stringstream ss;
-
- static bool just_had_a_newline = true;
- BOOST_FOREACH(char ch, msg){
- if (just_had_a_newline){
- just_had_a_newline = false;
- ss << "-- ";
- }
- if (ch == '\n'){
- just_had_a_newline = true;
- }
- ss << ch;
- }
-
- std::cout << ss.str() << std::flush;
-}
-
-static void msg_to_cerr(const std::string &title, const std::string &msg){
- std::stringstream ss;
-
- ss << std::endl << title << ":" << std::endl;
- BOOST_FOREACH(const std::string &line, tokenizer(msg, "\n")){
- ss << " " << line << std::endl;
- }
-
- std::cerr << ss.str() << std::flush;
-}
-
-/***********************************************************************
- * Global resources for the messenger
- **********************************************************************/
-struct msg_resource_type{
- boost::mutex mutex;
- uhd::msg::handler_t handler;
-};
-
-UHD_SINGLETON_FCN(msg_resource_type, msg_rs);
-
-/***********************************************************************
- * Setup the message handlers
- **********************************************************************/
-void uhd::msg::register_handler(const handler_t &handler){
- boost::mutex::scoped_lock lock(msg_rs().mutex);
- msg_rs().handler = handler;
-}
-
-static void default_msg_handler(uhd::msg::type_t type, const std::string &msg){
- static boost::mutex msg_mutex;
- boost::mutex::scoped_lock lock(msg_mutex);
- switch(type){
- case uhd::msg::fastpath:
- std::cerr << msg << std::flush;
- break;
-
- case uhd::msg::status:
- msg_to_cout(msg);
- UHD_LOG << "Status message" << std::endl << msg;
- break;
-
- case uhd::msg::warning:
- msg_to_cerr("UHD Warning", msg);
- UHD_LOG << "Warning message" << std::endl << msg;
- break;
-
- case uhd::msg::error:
- msg_to_cerr("UHD Error", msg);
- UHD_LOG << "Error message" << std::endl << msg;
- break;
- }
-}
-
-UHD_STATIC_BLOCK(msg_register_default_handler){
- uhd::msg::register_handler(&default_msg_handler);
-}
-
-/***********************************************************************
- * The message object implementation
- **********************************************************************/
-struct uhd::msg::_msg::impl{
- std::ostringstream ss;
- type_t type;
-};
-
-uhd::msg::_msg::_msg(const type_t type){
- _impl = UHD_PIMPL_MAKE(impl, ());
- _impl->type = type;
-}
-
-uhd::msg::_msg::~_msg(void){
- boost::mutex::scoped_lock lock(msg_rs().mutex);
- msg_rs().handler(_impl->type, _impl->ss.str());
-}
-
-std::ostream & uhd::msg::_msg::operator()(void){
- return _impl->ss;
-}
diff --git a/host/lib/utils/paths.cpp b/host/lib/utils/paths.cpp
index 38839c8d4..5d53f95bd 100644
--- a/host/lib/utils/paths.cpp
+++ b/host/lib/utils/paths.cpp
@@ -22,7 +22,6 @@
#include <boost/algorithm/string.hpp>
#include <boost/bind.hpp>
#include <boost/filesystem.hpp>
-#include <boost/foreach.hpp>
#include <boost/format.hpp>
#include <boost/regex.hpp>
#include <boost/tokenizer.hpp>
@@ -98,7 +97,7 @@ static std::vector<std::string> get_env_paths(const std::string &var_name){
//convert to full filesystem path, filter blank paths
if (var_value.empty()) return paths;
- BOOST_FOREACH(const std::string &path_string, path_tokenizer(var_value)){
+ for(const std::string &path_string: path_tokenizer(var_value)){
if (path_string.empty()) continue;
paths.push_back(fs::system_complete(path_string).string());
}
@@ -190,7 +189,7 @@ std::vector<fs::path> uhd::get_module_paths(void){
std::vector<fs::path> paths;
std::vector<std::string> env_paths = get_env_paths("UHD_MODULE_PATH");
- BOOST_FOREACH(std::string &str_path, env_paths) {
+ for(std::string &str_path: env_paths) {
paths.push_back(str_path);
}
@@ -272,7 +271,7 @@ std::string uhd::get_images_dir(const std::string &search_paths) {
/* We will start by looking for a path indicated by the `UHD_IMAGES_DIR`
* environment variable. */
std::vector<std::string> env_paths = get_env_paths("UHD_IMAGES_DIR");
- BOOST_FOREACH(possible_dir, env_paths) {
+ for(auto possible_dir: env_paths) {
if (fs::is_directory(fs::path(possible_dir))) {
return possible_dir;
}
@@ -293,7 +292,7 @@ std::string uhd::get_images_dir(const std::string &search_paths) {
std::vector<std::string> search_paths_vector;
boost::split(search_paths_vector, _search_paths, boost::is_any_of(",;"));
- BOOST_FOREACH(std::string& search_path, search_paths_vector) {
+ for(std::string& search_path: search_paths_vector) {
boost::algorithm::trim(search_path);
if (search_path.empty()) continue;
diff --git a/host/lib/utils/tasks.cpp b/host/lib/utils/tasks.cpp
index 661315ae8..5dac729c8 100644
--- a/host/lib/utils/tasks.cpp
+++ b/host/lib/utils/tasks.cpp
@@ -17,7 +17,7 @@
#include <uhd/utils/tasks.hpp>
#include <uhd/utils/msg_task.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <boost/thread/thread.hpp>
#include <boost/thread/barrier.hpp>
#include <exception>
@@ -67,10 +67,10 @@ private:
}
void do_error_msg(const std::string &msg){
- UHD_MSG(error)
- << "An unexpected exception was caught in a task loop." << std::endl
- << "The task loop will now exit, things may not work." << std::endl
- << msg << std::endl
+ UHD_LOGGER_ERROR("UHD")
+ << "An unexpected exception was caught in a task loop."
+ << "The task loop will now exit, things may not work."
+ << msg
;
}
@@ -162,10 +162,10 @@ private:
}
void do_error_msg(const std::string &msg){
- UHD_MSG(error)
- << "An unexpected exception was caught in a task loop." << std::endl
- << "The task loop will now exit, things may not work." << std::endl
- << msg << std::endl
+ UHD_LOGGER_ERROR("UHD")
+ << "An unexpected exception was caught in a task loop."
+ << "The task loop will now exit, things may not work."
+ << msg
;
}
diff --git a/host/lib/utils/thread_priority.cpp b/host/lib/utils/thread_priority.cpp
index 98023c5aa..729edcf0a 100644
--- a/host/lib/utils/thread_priority.cpp
+++ b/host/lib/utils/thread_priority.cpp
@@ -16,7 +16,7 @@
//
#include <uhd/utils/thread_priority.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/exception.hpp>
#include <boost/format.hpp>
#include <iostream>
@@ -26,10 +26,10 @@ bool uhd::set_thread_priority_safe(float priority, bool realtime){
set_thread_priority(priority, realtime);
return true;
}catch(const std::exception &e){
- UHD_MSG(warning) << boost::format(
+ UHD_LOGGER_WARNING("UHD") << boost::format(
"Unable to set the thread priority. Performance may be negatively affected.\n"
"Please see the general application notes in the manual for instructions.\n"
- "%s\n"
+ "%s"
) % e.what();
return false;
}
diff --git a/host/lib/utils/thread_priority_c.cpp b/host/lib/utils/thread_priority_c.cpp
index fe019e51d..b2de9970d 100644
--- a/host/lib/utils/thread_priority_c.cpp
+++ b/host/lib/utils/thread_priority_c.cpp
@@ -18,7 +18,7 @@
#include <uhd/error.h>
#include <uhd/utils/thread_priority.h>
#include <uhd/utils/thread_priority.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
#include <uhd/exception.hpp>
#include <boost/format.hpp>
#include <iostream>
diff --git a/host/lib/version.cpp b/host/lib/version.cpp
index 2b7125e55..2ba2dd0d6 100644
--- a/host/lib/version.cpp
+++ b/host/lib/version.cpp
@@ -17,21 +17,10 @@
#include <uhd/version.hpp>
#include <uhd/utils/static.hpp>
+#include <uhd/utils/log.hpp>
#include <boost/version.hpp>
#include <iostream>
-#ifndef UHD_DONT_PRINT_SYSTEM_INFO
-UHD_STATIC_BLOCK(print_system_info){
- std::cout
- << BOOST_PLATFORM << "; "
- << BOOST_COMPILER << "; "
- << "Boost_" << BOOST_VERSION << "; "
- << "UHD_" << uhd::get_version_string()
- << std::endl << std::endl
- ;
-}
-#endif
-
std::string uhd::get_version_string(void){
return "@UHD_VERSION@";
}
diff --git a/host/tests/CMakeLists.txt b/host/tests/CMakeLists.txt
index 8f7fdcd7c..ebda2cf70 100644
--- a/host/tests/CMakeLists.txt
+++ b/host/tests/CMakeLists.txt
@@ -28,6 +28,7 @@ SET(test_sources
buffer_test.cpp
byteswap_test.cpp
cast_test.cpp
+ cal_container_test.cpp
chdr_test.cpp
convert_test.cpp
dict_test.cpp
@@ -35,8 +36,8 @@ SET(test_sources
fp_compare_delta_test.cpp
fp_compare_epsilon_test.cpp
gain_group_test.cpp
+ log_test.cpp
math_test.cpp
- msg_test.cpp
property_test.cpp
ranges_test.cpp
sid_t_test.cpp
diff --git a/host/tests/addr_test.cpp b/host/tests/addr_test.cpp
index 61bb6d049..92863e44f 100644
--- a/host/tests/addr_test.cpp
+++ b/host/tests/addr_test.cpp
@@ -20,7 +20,6 @@
#include <uhd/types/device_addr.hpp>
#include <uhd/usrp/dboard_id.hpp>
#include <boost/assign/list_of.hpp>
-#include <boost/foreach.hpp>
#include <algorithm>
#include <iostream>
diff --git a/host/tests/blockdef_test.cpp b/host/tests/blockdef_test.cpp
index 11ddc4b59..5ca8a2472 100644
--- a/host/tests/blockdef_test.cpp
+++ b/host/tests/blockdef_test.cpp
@@ -21,7 +21,6 @@
#include <boost/assign/list_of.hpp>
#include <boost/test/unit_test.hpp>
#include <boost/format.hpp>
-#include <boost/foreach.hpp>
#include <uhd/rfnoc/blockdef.hpp>
using namespace uhd::rfnoc;
diff --git a/host/tests/cal_container_test.cpp b/host/tests/cal_container_test.cpp
new file mode 100644
index 000000000..f45ca429d
--- /dev/null
+++ b/host/tests/cal_container_test.cpp
@@ -0,0 +1,200 @@
+//
+// Copyright 2016 Ettus Research
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU General Public License for more details.
+//
+// You should have received a copy of the GNU General Public License
+// along with this program. If not, see <http://www.gnu.org/licenses/>.
+//
+
+#include <uhd/cal/power_container.hpp>
+#include <uhd/exception.hpp>
+#include <boost/test/unit_test.hpp>
+#include <boost/shared_ptr.hpp>
+#include <vector>
+#include <fstream>
+
+using namespace uhd;
+using namespace uhd::cal;
+
+static const double eps = 1e-8;
+
+////////////////////////////////////////////////////////////////////////
+BOOST_AUTO_TEST_CASE(test_power_container_bilinear){
+////////////////////////////////////////////////////////////////////////
+
+ // Create the data container
+ power_container::sptr container = power_container::make();
+
+ // Create some data points to add
+ std::vector<double> pt0(2, 0.0);
+ std::vector<double> pt1(2, 0.0);
+ std::vector<double> pt2(2, 0.0);
+ std::vector<double> pt3(2, 2.0);
+
+ pt1[0] = 2.0;
+ pt2[1] = 2.0;
+
+ container->add(1.0, pt0);
+ container->add(1.0, pt1);
+ container->add(0.0, pt2);
+ container->add(0.0, pt3);
+
+ // Add points to interpolate against
+ std::vector<double> test0(2, 1.0);
+ std::vector<double> test1(2, 1.5);
+ std::vector<double> test2(2, 0.0);
+ test2[1] = 1.0;
+
+ BOOST_CHECK_CLOSE(container->get(test0), 0.50, eps);
+ BOOST_CHECK_CLOSE(container->get(test1), 0.25, eps);
+ BOOST_CHECK_CLOSE(container->get(test2), 0.50, eps);
+}
+
+
+////////////////////////////////////////////////////////////////////////
+BOOST_AUTO_TEST_CASE(test_power_temp_container){
+////////////////////////////////////////////////////////////////////////
+
+ // Create the data container
+ power_container::sptr container = power_container::make();
+
+ // Create some data points to add
+ std::vector<double> pt0(3, 1.0);
+ std::vector<double> pt1(3, 2.0);
+ std::vector<double> pt2(3, 3.0);
+
+ container->add(1.0, pt0);
+ container->add(2.0, pt1);
+ container->add(5.0, pt2);
+
+ // Add points to interpolate against
+ std::vector<double> test0(3, 1.99);
+ std::vector<double> test1(3, 1.29);
+ std::vector<double> test2;
+ test2.push_back(2.59);
+ test2.push_back(1.29);
+ test2.push_back(2.99);
+
+ BOOST_CHECK_CLOSE(container->get(test0), 2.0, eps);
+ BOOST_CHECK_CLOSE(container->get(test1), 1.0, eps);
+ BOOST_CHECK_CLOSE(container->get(test2), 5.0, eps);
+}
+
+////////////////////////////////////////////////////////////////////////
+BOOST_AUTO_TEST_CASE(test_power_container_metadata){
+////////////////////////////////////////////////////////////////////////
+
+ // Create the data container
+ power_container::sptr container = power_container::make();
+
+ // Create some metadata to add
+ base_container::metadata_t data;
+
+ std::string fake_serial = "F2A432";
+ data["x300"] = fake_serial;
+
+ // Add some metadata
+ container->add_metadata(data);
+
+ // Check to see if the metadata matches
+ power_container::metadata_t recovered_data = container->get_metadata();
+
+ BOOST_CHECK_EQUAL(recovered_data["x300"], fake_serial);
+}
+
+////////////////////////////////////////////////////////////////////////
+BOOST_AUTO_TEST_CASE(test_power_serialization){
+////////////////////////////////////////////////////////////////////////
+
+ // Create the data container
+ power_container::sptr container = power_container::make();
+
+ // Create some metadata to add
+ base_container::metadata_t data;
+
+ std::string fake_serial = "F2A432";
+ data["x300"] = fake_serial;
+
+ // Add some metadata
+ container->add_metadata(data);
+
+ // Create some data points to add
+ std::vector<double> pt0(3, 1.0);
+ std::vector<double> pt1(3, 2.0);
+ std::vector<double> pt2(3, 3.0);
+
+ container->add(1.0, pt0);
+ container->add(2.0, pt1);
+ container->add(5.0, pt2);
+
+ std::string filename("test_power_serialization");
+
+ // Create/open a file to store the container
+ {
+ std::ofstream ofile(filename.c_str());
+
+ boost::archive::text_oarchive oarchive(ofile);
+ oarchive << *container;
+ }
+
+ // Restore to another data container
+ power_container::sptr new_container = power_container::make();
+
+ {
+ std::ifstream ifile(filename.c_str());
+ boost::archive::text_iarchive iarchive(ifile);
+
+ iarchive >> *new_container;
+ }
+
+ // Add points to interpolate against
+ std::vector<double> test0(3, 1.99);
+ std::vector<double> test1(3, 1.29);
+ std::vector<double> test2;
+ test2.push_back(2.59);
+ test2.push_back(1.29);
+ test2.push_back(2.99);
+
+ power_container::metadata_t recovered_data = new_container->get_metadata();
+
+ BOOST_CHECK_CLOSE(new_container->get(test0), 2.0, eps);
+ BOOST_CHECK_CLOSE(new_container->get(test1), 1.0, eps);
+ BOOST_CHECK_CLOSE(new_container->get(test2), 5.0, eps);
+
+ // Check to see if the metadata matches
+ BOOST_CHECK_EQUAL(recovered_data["x300"], fake_serial);
+
+ std::remove(filename.c_str());
+}
+
+////////////////////////////////////////////////////////////////////////
+BOOST_AUTO_TEST_CASE(test_interp_singular){
+////////////////////////////////////////////////////////////////////////
+
+ // Create the data container
+ power_container::sptr container = power_container::make();
+
+ // Create some data points to add
+ // that result in a singular matrix
+ std::vector<double> pt0(2, 1.0);
+ std::vector<double> pt1(2, 2.0);
+ std::vector<double> pt2(2, 3.0);
+ std::vector<double> pt3(2, 4.0);
+
+ container->add(1.0, pt0);
+ container->add(2.0, pt1);
+ container->add(3.0, pt2);
+ container->add(4.0, pt3);
+
+ std::vector<double> test(2, 2.5);
+ BOOST_CHECK_CLOSE(container->get(test), 2.5, eps);
+}
diff --git a/host/tests/convert_test.cpp b/host/tests/convert_test.cpp
index dd04dcafc..b4a616133 100644
--- a/host/tests/convert_test.cpp
+++ b/host/tests/convert_test.cpp
@@ -17,7 +17,6 @@
#include <uhd/convert.hpp>
#include <boost/test/unit_test.hpp>
-#include <boost/foreach.hpp>
#include <stdint.h>
#include <boost/assign/list_of.hpp>
#include <complex>
@@ -75,7 +74,7 @@ static void test_convert_types_sc16(
){
//fill the input samples
std::vector<sc16_t> input(nsamps), output(nsamps);
- BOOST_FOREACH(sc16_t &in, input) in = sc16_t(
+ for(sc16_t &in: input) in = sc16_t(
short((float((std::rand())/(double(RAND_MAX)/2)) - 1)*32767/extra_div),
short((float((std::rand())/(double(RAND_MAX)/2)) - 1)*32767/extra_div)
);
@@ -126,7 +125,7 @@ static void test_convert_types_for_floats(
//fill the input samples
std::vector<data_type> input(nsamps), output(nsamps);
- BOOST_FOREACH(data_type &in, input) in = data_type(
+ for(data_type &in: input) in = data_type(
((std::rand()/(value_type(RAND_MAX)/2)) - 1)*float(extra_scale),
((std::rand()/(value_type(RAND_MAX)/2)) - 1)*float(extra_scale)
);
@@ -145,7 +144,7 @@ static void test_convert_types_for_floats(
;
//loopback foreach prio combo (generic vs best)
- BOOST_FOREACH(const int_pair_t &prio, prios){
+ for(const int_pair_t &prio: prios){
loopback(nsamps, in_id, out_id, input, output, prio.first, prio.second);
for (size_t i = 0; i < nsamps; i++){
MY_CHECK_CLOSE(input[i].real(), output[i].real(), value_type(1./(1 << 14)));
@@ -284,7 +283,7 @@ BOOST_AUTO_TEST_CASE(test_convert_types_fc32_to_sc16){
const size_t nsamps = 13;
std::vector<fc32_t> input(nsamps);
- BOOST_FOREACH(fc32_t &in, input) in = fc32_t(
+ for(fc32_t &in: input) in = fc32_t(
(std::rand()/(RAND_MAX/2.0)) - 1,
(std::rand()/(RAND_MAX/2.0)) - 1
);
@@ -329,7 +328,7 @@ BOOST_AUTO_TEST_CASE(test_convert_types_sc16_to_fc32){
const size_t nsamps = 13;
std::vector<sc16_t> input(nsamps);
- BOOST_FOREACH(sc16_t &in, input) in = sc16_t(
+ for(sc16_t &in: input) in = sc16_t(
std::rand()-(RAND_MAX/2),
std::rand()-(RAND_MAX/2)
);
@@ -424,9 +423,9 @@ static void test_convert_types_u8(
){
//fill the input samples
std::vector<uint8_t> input(nsamps), output(nsamps);
- BOOST_FOREACH(uint8_t &in, input) in = uint8_t(std::rand() & 0xFF);
+ for(uint8_t &in: input) in = uint8_t(std::rand() & 0xFF);
//uint32_t d = 48;
- //BOOST_FOREACH(uint8_t &in, input) in = d++;
+ //for(uint8_t &in: input) in = d++;
//run the loopback and test
convert::id_type in_id = id;
@@ -464,7 +463,7 @@ static void test_convert_types_s8(
){
//fill the input samples
std::vector<int8_t> input(nsamps), output(nsamps);
- BOOST_FOREACH(int8_t &in, input) in = int8_t(std::rand() & 0xFF);
+ for(int8_t &in: input) in = int8_t(std::rand() & 0xFF);
//run the loopback and test
convert::id_type in_id = id;
@@ -502,7 +501,7 @@ static void test_convert_types_s16(
){
//fill the input samples
std::vector<int16_t> input(nsamps), output(nsamps);
- BOOST_FOREACH(int16_t &in, input) in = int16_t(std::rand() & 0xFFFF);
+ for(int16_t &in: input) in = int16_t(std::rand() & 0xFFFF);
//run the loopback and test
convert::id_type in_id = id;
@@ -540,7 +539,7 @@ static void test_convert_types_fc32(
){
//fill the input samples
std::vector< std::complex<float> > input(nsamps), output(nsamps);
- BOOST_FOREACH(fc32_t &in, input) in = fc32_t(
+ for(fc32_t &in: input) in = fc32_t(
(std::rand()/float(RAND_MAX/2)) - 1,
(std::rand()/float(RAND_MAX/2)) - 1
);
@@ -581,7 +580,7 @@ static void test_convert_types_f32(
){
//fill the input samples
std::vector<float> input(nsamps), output(nsamps);
- BOOST_FOREACH(float &in, input) in = float((float(std::rand())/float(RAND_MAX/2)) - 1);
+ for(float &in: input) in = float((float(std::rand())/float(RAND_MAX/2)) - 1);
//run the loopback and test
convert::id_type in_id = id;
diff --git a/host/tests/device3_test.cpp b/host/tests/device3_test.cpp
index 657436717..a81f4ca0a 100644
--- a/host/tests/device3_test.cpp
+++ b/host/tests/device3_test.cpp
@@ -87,7 +87,6 @@ class pseudo_device3_impl : public uhd::device3
make_args.base_address = TEST_SID0.get_dst();
make_args.device_index = 0;
make_args.tree = _tree;
- make_args.is_big_endian = false;
std::cout << "[PSEUDO] Generating block controls 1/2:" << std::endl;
_rfnoc_block_ctrl.push_back( block_ctrl_base::make(make_args) );
diff --git a/host/tests/graph_search_test.cpp b/host/tests/graph_search_test.cpp
index 8c96bb954..d39f767e9 100644
--- a/host/tests/graph_search_test.cpp
+++ b/host/tests/graph_search_test.cpp
@@ -83,7 +83,7 @@ BOOST_AUTO_TEST_CASE(test_linear_downstream_search)
std::cout << "size: " << result.size() << std::endl;
BOOST_CHECK_EQUAL(result.size(), 1);
BOOST_CHECK_EQUAL(result[0]->get_test_id(), "node_B");
- BOOST_FOREACH(const result_node::sptr &node, result) {
+ for(const result_node::sptr &node: result) {
std::cout << node->get_test_id() << std::endl;
}
}
@@ -111,7 +111,7 @@ BOOST_AUTO_TEST_CASE(test_multi_iter_downstream_search)
// This time, we search for result_node
std::vector< result_node::sptr > result = node_A->find_downstream_node<result_node>();
BOOST_REQUIRE(result.size() == 4);
- BOOST_FOREACH(const result_node::sptr &node, result) {
+ for(const result_node::sptr &node: result) {
std::cout << node->get_test_id() << std::endl;
}
}
@@ -140,7 +140,7 @@ BOOST_AUTO_TEST_CASE(test_multi_iter_cycle_downstream_search)
// This time, we search for result_node
std::vector< result_node::sptr > result = node_A->find_downstream_node<result_node>();
BOOST_REQUIRE(result.size() == 4);
- BOOST_FOREACH(const result_node::sptr &node, result) {
+ for(const result_node::sptr &node: result) {
std::cout << node->get_test_id() << std::endl;
}
}
diff --git a/host/tests/msg_test.cpp b/host/tests/log_test.cpp
index 94b81268c..b0b17ea84 100644
--- a/host/tests/msg_test.cpp
+++ b/host/tests/log_test.cpp
@@ -16,25 +16,34 @@
//
#include <boost/test/unit_test.hpp>
-#include <uhd/utils/msg.hpp>
+#include <uhd/utils/log.hpp>
+#include <uhd/utils/log_add.hpp>
#include <iostream>
BOOST_AUTO_TEST_CASE(test_messages){
- std::cerr << "---begin print test ---" << std::endl;
- UHD_MSG(status) <<
- "This is a test print for a status message.\n"
- "And this is the second line of the test print.\n"
+ UHD_LOG_FASTPATH("foo");
+ UHD_LOG_FASTPATH("bar");
+ uhd::log::set_log_level(uhd::log::debug);
+ uhd::log::set_console_level(uhd::log::info);
+ uhd::log::add_logger("test",
+ [](const uhd::log::logging_info &I){
+ std::cout << "<TEST> " << I.message << std::endl;
+ }
+ );
+ uhd::log::set_logger_level("test", uhd::log::debug);
+ UHD_LOGGER_DEBUG("logger_test") <<
+ "This is a test print for a debug log."
;
- UHD_MSG(warning) <<
- "This is a test print for a warning message.\n"
- "And this is the second line of the test print.\n"
+ UHD_LOGGER_INFO("logger_test") <<
+ "This is a test print for a info log."
;
- UHD_MSG(error) <<
- "This is a test print for an error message.\n"
- "And this is the second line of the test print.\n"
+ UHD_LOGGER_WARNING("logger_test") <<
+ "This is a test print for a warning log."
+ ;
+ UHD_LOGGER_ERROR("logger_test") <<
+ "This is a test print for an error log."
;
UHD_HERE();
const int x = 42;
UHD_VAR(x);
- std::cerr << "---end print test ---" << std::endl;
}
diff --git a/host/tests/nocscript_expr_test.cpp b/host/tests/nocscript_expr_test.cpp
index f82a613a3..31bbc8725 100644
--- a/host/tests/nocscript_expr_test.cpp
+++ b/host/tests/nocscript_expr_test.cpp
@@ -18,7 +18,6 @@
#include "../lib/rfnoc/nocscript/function_table.hpp"
#include <boost/test/unit_test.hpp>
#include <boost/test/floating_point_comparison.hpp>
-#include <boost/foreach.hpp>
#include <boost/bind.hpp>
#include <boost/make_shared.hpp>
#include <boost/format.hpp>
diff --git a/host/tests/nocscript_ftable_test.cpp b/host/tests/nocscript_ftable_test.cpp
index 283245132..546da0c2c 100644
--- a/host/tests/nocscript_ftable_test.cpp
+++ b/host/tests/nocscript_ftable_test.cpp
@@ -19,7 +19,6 @@
#include <boost/test/unit_test.hpp>
#include <boost/test/floating_point_comparison.hpp>
#include <boost/assign/list_of.hpp>
-#include <boost/foreach.hpp>
#include <boost/bind.hpp>
#include <boost/make_shared.hpp>
#include <algorithm>
diff --git a/host/tests/nocscript_parser_test.cpp b/host/tests/nocscript_parser_test.cpp
index a9c25977e..c21f40ad6 100644
--- a/host/tests/nocscript_parser_test.cpp
+++ b/host/tests/nocscript_parser_test.cpp
@@ -21,7 +21,6 @@
#include <boost/test/unit_test.hpp>
#include <boost/test/floating_point_comparison.hpp>
#include <boost/assign/list_of.hpp>
-#include <boost/foreach.hpp>
#include <boost/bind.hpp>
#include <boost/make_shared.hpp>
#include <algorithm>
diff --git a/host/tests/ranges_test.cpp b/host/tests/ranges_test.cpp
index 85bb4c3c4..49607b9a3 100644
--- a/host/tests/ranges_test.cpp
+++ b/host/tests/ranges_test.cpp
@@ -68,3 +68,12 @@ BOOST_AUTO_TEST_CASE(test_ranges_clip2){
BOOST_CHECK_CLOSE(mr.clip(3.1, true), 3., tolerance);
BOOST_CHECK_CLOSE(mr.clip(4., true), 3., tolerance);
}
+
+BOOST_AUTO_TEST_CASE(test_ranges_compare){
+ range_t range(1);
+ range_t n_range(1);
+ range_t d_range(2);
+
+ BOOST_CHECK(range == n_range);
+ BOOST_CHECK(range != d_range);
+}
diff --git a/host/tests/sid_t_test.cpp b/host/tests/sid_t_test.cpp
index 2043398a1..e07e1c9bc 100644
--- a/host/tests/sid_t_test.cpp
+++ b/host/tests/sid_t_test.cpp
@@ -118,8 +118,9 @@ BOOST_AUTO_TEST_CASE(test_sid_t_set) {
BOOST_CHECK_EQUAL(sid.get_dst_addr(), (uint32_t)0x0c);
BOOST_CHECK_EQUAL(sid.get_dst_endpoint(), (uint32_t)0xbc);
- sid_t flipped_sid = sid.reversed();
+ const sid_t flipped_sid = sid.reversed();
BOOST_CHECK_EQUAL(flipped_sid.get(), (uint32_t)0x0cbc0a0b);
+ BOOST_CHECK_EQUAL(flipped_sid.reversed(), sid);
// In-place
sid.reverse();
diff --git a/host/tests/subdev_spec_test.cpp b/host/tests/subdev_spec_test.cpp
index aa0b9a119..81f86380b 100644
--- a/host/tests/subdev_spec_test.cpp
+++ b/host/tests/subdev_spec_test.cpp
@@ -17,7 +17,6 @@
#include <boost/test/unit_test.hpp>
#include <uhd/usrp/subdev_spec.hpp>
-#include <boost/foreach.hpp>
#include <iostream>
BOOST_AUTO_TEST_CASE(test_subdevice_spec){
@@ -28,6 +27,11 @@ BOOST_AUTO_TEST_CASE(test_subdevice_spec){
sd_spec.push_back(uhd::usrp::subdev_spec_pair_t("A", "AB"));
sd_spec.push_back(uhd::usrp::subdev_spec_pair_t("B", "AB"));
+ //create a subdev_spec with something different
+ uhd::usrp::subdev_spec_t diff_sd_spec;
+ diff_sd_spec.push_back(uhd::usrp::subdev_spec_pair_t("B", "BA"));
+ diff_sd_spec.push_back(uhd::usrp::subdev_spec_pair_t("B", "BA"));
+
//convert to and from args string
std::cout << "Pretty Print: " << std::endl << sd_spec.to_pp_string();
std::string markup_str = sd_spec.to_string();
@@ -41,5 +45,8 @@ BOOST_AUTO_TEST_CASE(test_subdevice_spec){
for (size_t i = 0; i < sd_spec.size(); i++){
BOOST_CHECK_EQUAL(sd_spec.at(i).db_name, new_sd_spec.at(i).db_name);
BOOST_CHECK_EQUAL(sd_spec.at(i).sd_name, new_sd_spec.at(i).sd_name);
+
+ BOOST_CHECK(sd_spec.at(i) == new_sd_spec.at(i));
+ BOOST_CHECK(sd_spec.at(i) != diff_sd_spec.at(i));
}
}
diff --git a/host/tests/time_spec_test.cpp b/host/tests/time_spec_test.cpp
index 76dfb1930..9ff89ab0e 100644
--- a/host/tests/time_spec_test.cpp
+++ b/host/tests/time_spec_test.cpp
@@ -17,7 +17,6 @@
#include <boost/test/unit_test.hpp>
#include <uhd/types/time_spec.hpp>
-#include <boost/foreach.hpp>
#include <boost/thread.hpp> //sleep
#include <iostream>
#include <iomanip>
diff --git a/host/utils/CMakeLists.txt b/host/utils/CMakeLists.txt
index eb5a29df9..33cb5a972 100644
--- a/host/utils/CMakeLists.txt
+++ b/host/utils/CMakeLists.txt
@@ -156,9 +156,7 @@ ENDIF(NOT HAVE_PYTHON_MODULE_REQUESTS)
IF(ENABLE_USRP2)
SET(burners
usrp2_card_burner.py
- usrp2_card_burner_gui.py
usrp_n2xx_net_burner.py
- usrp_n2xx_net_burner_gui.py
)
IF(WIN32 AND UHD_RELEASE_MODE) #include dd.exe
diff --git a/host/utils/b100_eeprom.h b/host/utils/b100_eeprom.h
new file mode 100644
index 000000000..afb6f7ec5
--- /dev/null
+++ b/host/utils/b100_eeprom.h
@@ -0,0 +1,15 @@
+unsigned const char b100_eeprom_bin[] = {
+ 0xc2, 0x00, 0x25, 0x02, 0x00, 0x04, 0x00, 0x00, 0x00, 0x7b, 0x00, 0x00,
+ 0x02, 0x00, 0x6b, 0x12, 0x00, 0x08, 0x80, 0xfe, 0x75, 0x80, 0x00, 0x75,
+ 0xb2, 0x0b, 0x75, 0xa0, 0x03, 0x75, 0xb4, 0x03, 0x75, 0xb1, 0x00, 0x75,
+ 0xb6, 0x00, 0x90, 0xe6, 0x8a, 0xe4, 0xf0, 0x00, 0x90, 0xe6, 0x80, 0xe4,
+ 0xf0, 0x7f, 0x00, 0xef, 0x24, 0xe0, 0xf5, 0x82, 0xe4, 0x34, 0xe1, 0xf5,
+ 0x83, 0xe4, 0xf0, 0x0f, 0xbf, 0x10, 0xf0, 0x7e, 0x00, 0x7f, 0x00, 0x0e,
+ 0xbe, 0x00, 0x01, 0x0f, 0xef, 0x30, 0xe7, 0xf7, 0x63, 0xa0, 0x01, 0x80,
+ 0xf2, 0x78, 0x00, 0xe8, 0x44, 0x00, 0x60, 0x0c, 0x79, 0x00, 0x90, 0x18,
+ 0x00, 0xe4, 0xf0, 0xa3, 0xd8, 0xfc, 0xd9, 0xfa, 0xd0, 0x83, 0xd0, 0x82,
+ 0xf6, 0xd8, 0xfd, 0xc0, 0x82, 0xc0, 0x83, 0x75, 0x82, 0x00, 0x22, 0x75,
+ 0x81, 0x07, 0x12, 0x00, 0x49, 0xe5, 0x82, 0x60, 0x03, 0x02, 0x00, 0x03,
+ 0x02, 0x00, 0x03, 0x80, 0x01, 0xe6, 0x00, 0x00
+};
+unsigned int b100_eeprom_bin_len = 140;
diff --git a/host/utils/converter_benchmark.cpp b/host/utils/converter_benchmark.cpp
index f6d7e9650..ba4819f49 100644
--- a/host/utils/converter_benchmark.cpp
+++ b/host/utils/converter_benchmark.cpp
@@ -354,7 +354,7 @@ int UHD_SAFE_MAIN(int argc, char *argv[])
boost::is_any_of(","), // Split at ,
boost::token_compress_on // Avoid empty results
);
- BOOST_FOREACH(const std::string &this_prio, prios_in_list) {
+ for(const std::string &this_prio: prios_in_list) {
size_t prio_index = boost::lexical_cast<size_t>(this_prio);
converter::sptr conv_for_prio = get_converter(converter_id, prio_index)(); // Can throw a uhd::key_error
conv_list[prio_index] = conv_for_prio;
@@ -384,7 +384,7 @@ int UHD_SAFE_MAIN(int argc, char *argv[])
/// Final configurations to the converter:
std::cout << "Configuring converters:" << std::endl;
- BOOST_FOREACH(priority_type prio_i, conv_list.keys()) {
+ for(priority_type prio_i: conv_list.keys()) {
std::cout << "* [" << prio_i << "]: ";
configure_conv(conv_list[prio_i], in_type, out_type);
}
@@ -393,7 +393,7 @@ int UHD_SAFE_MAIN(int argc, char *argv[])
std::cout << "{{{" << std::endl;
if (not debug_mode) {
std::cout << "prio,duration_ms,avg_duration_ms,n_samples,iterations" << std::endl;
- BOOST_FOREACH(priority_type prio_i, conv_list.keys()) {
+ for(priority_type prio_i: conv_list.keys()) {
double duration = run_benchmark(
conv_list[prio_i],
input_buf_refs,
diff --git a/host/utils/fx2_init_eeprom.cpp b/host/utils/fx2_init_eeprom.cpp
index cf7fb2de2..e0915d9f2 100644
--- a/host/utils/fx2_init_eeprom.cpp
+++ b/host/utils/fx2_init_eeprom.cpp
@@ -20,11 +20,13 @@
#include <uhd/property_tree.hpp>
#include <boost/program_options.hpp>
#include <boost/format.hpp>
+#include <boost/filesystem.hpp>
#include <boost/algorithm/string/predicate.hpp>
#include <iostream>
-//#include <cstdlib>
-#ifdef UHD_PLATFORM_LINUX
#include <fstream>
+#include "usrp1_eeprom.h"
+#include "b100_eeprom.h"
+#ifdef UHD_PLATFORM_LINUX
#include <unistd.h> // syscall constants
#include <fcntl.h> // O_NONBLOCK
#include <sys/syscall.h>
@@ -39,13 +41,14 @@ namespace po = boost::program_options;
int UHD_SAFE_MAIN(int argc, char *argv[]){
std::string type;
+ std::string image;
po::options_description desc("Allowed options");
desc.add_options()
("help", "help message")
- ("image", po::value<std::string>(), "BIN image file")
+ ("image", po::value<std::string>(), "BIN image file; if not specified, use built-in image")
("vid", po::value<std::string>(), "VID of device to program")
("pid", po::value<std::string>(), "PID of device to program")
- ("type", po::value<std::string>(), "device type (usrp1 or b100)")
+ ("type", po::value<std::string>(&type), "device type (usrp1 or b100, required if using built-in image)")
;
po::variables_map vm;
@@ -78,9 +81,9 @@ int UHD_SAFE_MAIN(int argc, char *argv[]){
//load the options into the address
uhd::device_addr_t device_addr;
device_addr["type"] = type;
- if(vm.count("vid") or vm.count("pid") or vm.count("type")) {
+ if(vm.count("vid") or vm.count("pid")) {
if(not (vm.count("vid") and vm.count("pid") and vm.count("type"))) {
- std::cerr << "ERROR: Must specify vid, pid, and type if specifying any of the three args" << std::endl;
+ std::cerr << "ERROR: Must specify vid, pid, and type if specifying any of the two former args" << std::endl;
} else {
device_addr["vid"] = vm["vid"].as<std::string>();
device_addr["pid"] = vm["pid"].as<std::string>();
@@ -90,6 +93,38 @@ int UHD_SAFE_MAIN(int argc, char *argv[]){
device_addr["vid"] = FX2_VENDOR_ID;
device_addr["pid"] = FX2_PRODUCT_ID;
}
+ if(vm.count("image")) {
+ //if specified, use external image file
+ image = vm["image"].as<std::string>();
+ } else {
+ //if not specified, use built-ins; requires user to define type
+ size_t image_len;
+ unsigned const char* image_data;
+
+ if(!vm.count("type")) {
+ std::cerr << boost::format("ERROR: Image file not specified and type of device not given. Cannot use built-in images.\n");
+ return EXIT_FAILURE;
+ }
+
+ std::cout << boost::format("Using built-in image for \"%s\".\n") % type;
+
+ if(vm["type"].as<std::string>() == "usrp1") {
+ image_len = usrp1_eeprom_bin_len;
+ image_data = usrp1_eeprom_bin;
+ } else if(vm["type"].as<std::string>() == "b100") {
+ image_len = b100_eeprom_bin_len;
+ image_data = b100_eeprom_bin;
+ } else {
+ std::cerr << boost::format("ERROR: Unsupported device type \"%s\" specified and no EEPROM image file given.\n") % type;
+ return EXIT_FAILURE;
+ }
+
+ //get temporary file name, and write image to that.
+ image = boost::filesystem::unique_path().string();
+ std::ofstream tmp_image(image, std::ofstream::binary);
+ tmp_image.write((const char*)image_data, image_len);
+ tmp_image.close();
+ }
//find and create a control transport to do the writing.
@@ -105,9 +140,13 @@ int UHD_SAFE_MAIN(int argc, char *argv[]){
//uhd::device_addrs_t devs = uhd::device::find(found_addrs[i]);
uhd::device::sptr dev = uhd::device::make(found_addrs[i], uhd::device::USRP);
uhd::property_tree::sptr tree = dev->get_tree();
- tree->access<std::string>("/mboards/0/load_eeprom").set(vm["image"].as<std::string>());
+ tree->access<std::string>("/mboards/0/load_eeprom").set(image);
}
+ //delete temporary image file if we created one
+ if(!vm.count("image")) {
+ boost::filesystem::remove(image);
+ }
std::cout << "Power-cycle the usrp for the changes to take effect." << std::endl;
return EXIT_SUCCESS;
diff --git a/host/utils/latency/include/Responder.hpp b/host/utils/latency/include/Responder.hpp
index 31dcbc16b..4769312d9 100644
--- a/host/utils/latency/include/Responder.hpp
+++ b/host/utils/latency/include/Responder.hpp
@@ -24,7 +24,6 @@
#include <stdint.h>
#include <uhd/usrp/multi_usrp.hpp>
-#include <uhd/utils/msg.hpp>
using namespace std;
@@ -141,9 +140,6 @@ class Responder
// Main entry point after constructor.
int run();
- // Public method to inject UHD messages in the main ncurses window.
- void print_uhd_late_handler(uhd::msg::type_t type, const std::string& msg);
-
int get_return_code(){return _return_code;}
protected:
@@ -151,6 +147,7 @@ class Responder
// These 2 variables are used for ncurses output.
WINDOW* _window;
std::stringstream _ss;
+ std::stringstream _ss_cerr;
// struct which holds all arguments as constants settable from outside the class
const Options _opt;
diff --git a/host/utils/latency/lib/Responder.cpp b/host/utils/latency/lib/Responder.cpp
index e5be9e275..ba278f338 100644
--- a/host/utils/latency/lib/Responder.cpp
+++ b/host/utils/latency/lib/Responder.cpp
@@ -23,51 +23,33 @@
#include <complex>
#include <csignal>
#include <cmath>
+#include <sstream>
#include <boost/format.hpp>
#include <boost/algorithm/string.hpp>
+#include <boost/thread/condition_variable.hpp>
#include <boost/filesystem.hpp>
#include <uhd/utils/thread_priority.hpp>
#include <uhd/property_tree.hpp>
const std::string _eth_file("eths_info.txt");
-// UHD screen handler during initialization. Error messages will be printed to log file
-static std::string uhd_error_msgs;
-static void screen_handler(uhd::msg::type_t type, const std::string& msg)
-{
- printw( msg.c_str() );
- //printw("\n");
- refresh();
- if(type == uhd::msg::error){
- uhd_error_msgs.append(msg);
- uhd_error_msgs.append("\n");
- }
-}
-// UHD screen handler during test run. Error messages will be printed to log file
-static int s_late_count = 0;
-static Responder* s_responder; // needed here to have a way to inject uhd msg into Responder.
-// function is only called by UHD, if s_responder points to a valid instance.
-// this instance sets the function to be the output callback for UHD.
-static void _late_handler(uhd::msg::type_t type, const std::string& msg)
-{
- s_responder->print_uhd_late_handler(type, msg);
-}
+// Redirect output to stderr
+struct cerr_redirect {
+ cerr_redirect( std::streambuf * new_buffer )
+ : old( std::cerr.rdbuf( new_buffer ) )
+ { }
-void Responder::print_uhd_late_handler(uhd::msg::type_t type, const std::string& msg)
-{
- if (msg == "L") // This is just a test
- {
- ++s_late_count;
- }
- if(type == uhd::msg::error){
- uhd_error_msgs.append(msg);
- uhd_error_msgs.append("\n");
- // Only print error messages. There will be very many 'L's due to the way the test works.
- print_msg(msg);
+ ~cerr_redirect( ) {
+ std::cerr.rdbuf( old );
}
-}
+
+private:
+ std::streambuf * old;
+};
+
+
// Catch keyboard interrupts for clean manual abort
static bool s_stop_signal_called = false;
@@ -132,7 +114,9 @@ Responder::Responder( Options& opt)
_last_overrun_count(0)
{
time( &_dbginfo.start_time ); // for debugging
- s_responder = this;
+
+ // Disable logging to console
+ uhd::log::set_console_level(uhd::log::off);
if (uhd::set_thread_priority_safe(_opt.rt_priority, _opt.realtime) == false) // try to set realtime scheduling
{
@@ -141,7 +125,6 @@ Responder::Responder( Options& opt)
_return_code = calculate_dependent_values();
- uhd::msg::register_handler(&screen_handler); // used to print USRP initialization status
// From this point on, everything is written to a ncurses window!
create_ncurses_window();
@@ -180,7 +163,9 @@ Responder::Responder( Options& opt)
}
// set up handlers for test run
- uhd::msg::register_handler(&_late_handler); // capture UHD output.
+ // uhd::msg::register_handler(&_late_handler); // capture UHD output.
+
+ cerr_redirect(_ss_cerr.rdbuf());
register_stop_signal_handler();
}
@@ -1211,11 +1196,6 @@ Responder::write_log_file()
write_debug_info(logs);
- if(uhd_error_msgs.length() > 0)
- {
- logs << endl << "%% UHD ERROR MESSAGES %%" << endl;
- logs << uhd_error_msgs;
- }
}
}
catch(...)
diff --git a/host/utils/octoclock_firmware_burner.cpp b/host/utils/octoclock_firmware_burner.cpp
index 4ffaeb0d7..57d6f3cc8 100644
--- a/host/utils/octoclock_firmware_burner.cpp
+++ b/host/utils/octoclock_firmware_burner.cpp
@@ -15,95 +15,12 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//
-#include <algorithm>
-#include <csignal>
-#include <iostream>
-#include <fstream>
-#include <stdexcept>
-#include <vector>
-
-#include <boost/foreach.hpp>
-#include <boost/asio.hpp>
#include <boost/program_options.hpp>
-#include <boost/assign.hpp>
-#include <stdint.h>
-#include <boost/assign/list_of.hpp>
#include <boost/format.hpp>
-#include <boost/filesystem.hpp>
-#include <boost/thread.hpp>
-
-#include <uhd/device.hpp>
-#include <uhd/transport/udp_simple.hpp>
-#include <uhd/types/device_addr.hpp>
-#include <uhd/types/time_spec.hpp>
-#include <uhd/utils/byteswap.hpp>
-#include <uhd/utils/paths.hpp>
-#include <uhd/utils/paths.hpp>
-#include <uhd/utils/safe_main.hpp>
-
-#include "../lib/usrp_clock/octoclock/common.h"
-#include "../lib/utils/ihex.hpp"
-
-#define MAX_FIRMWARE_SIZE 1024*120
-#define BLOCK_SIZE 256
-#define UDP_TIMEOUT 5
+#include <iostream>
-namespace fs = boost::filesystem;
namespace po = boost::program_options;
-using namespace uhd;
-using namespace uhd::transport;
-
-static int num_ctrl_c = 0;
-void sig_int_handler(int){
- num_ctrl_c++;
- if(num_ctrl_c == 1){
- std::cout << std::endl << "Are you sure you want to abort the image burning? If you do, your "
- "OctoClock device will be bricked!" << std::endl
- << "Press Ctrl+C again to abort the image burning procedure." << std::endl << std::endl;
- }
- else{
- std::cout << std::endl << "Aborting. Your OctoClock device will be bricked." << std::endl
- << "Refer to http://files.ettus.com/manual/page_octoclock.html#bootloader" << std::endl
- << "for details on restoring your device." << std::endl;
- exit(EXIT_FAILURE);
- }
-}
-
-uint8_t firmware_image[MAX_FIRMWARE_SIZE];
-size_t firmware_size = 0;
-uint8_t octoclock_data[udp_simple::mtu];
-octoclock_packet_t *pkt_in = reinterpret_cast<octoclock_packet_t *>(octoclock_data);
-std::string firmware_path, actual_firmware_path;
-size_t num_blocks = 0;
-bool hex = true;
-
-static uint16_t calculate_crc(uint8_t* buffer, uint16_t len){
- uint16_t crc = 0xFFFF;
-
- for(size_t i = 0; i < len; i++){
- crc ^= buffer[i];
- for(uint8_t j = 0; j < 8; ++j){
- if(crc & 1) crc = (crc >> 1) ^ 0xA001;
- else crc = (crc >> 1);
- }
- }
-
- return crc;
-}
-
-/*
- * Functions
- */
-void list_octoclocks(){
- device_addrs_t found_octoclocks = device::find(uhd::device_addr_t(), device::CLOCK);
-
- std::cout << "Available OctoClock devices:" << std::endl;
- BOOST_FOREACH(const device_addr_t &oc, found_octoclocks){
- std::cout << " * " << oc["addr"] << std::endl;
- }
-}
-
void print_image_loader_warning(const std::string &fw_path, const po::variables_map &vm){
// Newline + indent
#ifdef UHD_PLATFORM_WIN32
@@ -115,10 +32,8 @@ void print_image_loader_warning(const std::string &fw_path, const po::variables_
std::string uhd_image_loader = str(boost::format("uhd_image_loader --args=\"type=octoclock,addr=%s\""
"%s --fw-path=%s")
% vm["addr"].as<std::string>() % nl % fw_path);
-
std::cout << "************************************************************************************************" << std::endl
- << "WARNING: This utility will be removed in an upcoming version of UHD. In the future, use" << std::endl
- << " this command:" << std::endl
+ << "ERROR: This utility has been removed in this version of UHD. Use this command:" << std::endl
<< std::endl
<< uhd_image_loader << std::endl
<< std::endl
@@ -126,301 +41,21 @@ void print_image_loader_warning(const std::string &fw_path, const po::variables_
<< std::endl;
}
-/*
- * Manually find bootloader. This sends multiple packets in order to increase chances of getting
- * bootloader before it switches to the application.
- */
-device_addrs_t bootloader_find(const std::string &ip_addr){
- udp_simple::sptr udp_transport = udp_simple::make_connected(ip_addr, BOOST_STRINGIZE(OCTOCLOCK_UDP_CTRL_PORT));
-
- octoclock_packet_t pkt_out;
- pkt_out.sequence = uhd::htonx<uint32_t>(std::rand());
- pkt_out.code = OCTOCLOCK_QUERY_CMD;
- pkt_out.len = 0;
- size_t len = 0;
-
- device_addrs_t addrs;
-
- boost::system_time comm_timeout = boost::get_system_time() + boost::posix_time::milliseconds(3000);
-
- while(boost::get_system_time() < comm_timeout){
- UHD_OCTOCLOCK_SEND_AND_RECV(udp_transport, OCTOCLOCK_FW_COMPAT_NUM, OCTOCLOCK_QUERY_CMD, pkt_out, len, octoclock_data);
- if(UHD_OCTOCLOCK_PACKET_MATCHES(OCTOCLOCK_QUERY_ACK, pkt_out, pkt_in, len) and
- pkt_in->proto_ver == OCTOCLOCK_BOOTLOADER_PROTO_VER){
- addrs.push_back(device_addr_t());
- addrs[0]["type"] = "octoclock-bootloader";
- addrs[0]["addr"] = udp_transport->get_recv_addr();
- break;
- }
- }
-
- return addrs;
-}
-
-void read_firmware(){
- std::ifstream firmware_file(actual_firmware_path.c_str(), std::ios::binary);
- firmware_size = size_t(fs::file_size(actual_firmware_path));
- if(firmware_size > MAX_FIRMWARE_SIZE){
- firmware_file.close();
- throw uhd::runtime_error(str(boost::format("Firmware file too large: %d > %d")
- % firmware_size % (MAX_FIRMWARE_SIZE)));
- }
- firmware_file.read((char*)firmware_image, firmware_size);
- firmware_file.close();
-
- num_blocks = (firmware_size % BLOCK_SIZE) ? ((firmware_size / BLOCK_SIZE) + 1)
- : (firmware_size / BLOCK_SIZE);
-}
-
-void burn_firmware(udp_simple::sptr udp_transport){
- octoclock_packet_t pkt_out;
- pkt_out.sequence = uhd::htonx<uint32_t>(std::rand());
- pkt_out.len = (uint16_t)firmware_size;
- pkt_out.crc = calculate_crc(firmware_image, firmware_size);
- size_t len = 0, current_pos = 0;
-
- //Tell OctoClock not to jump to application, wait for us instead
- std::cout << "Telling OctoClock to prepare for firmware download..." << std::flush;
- UHD_OCTOCLOCK_SEND_AND_RECV(udp_transport, OCTOCLOCK_FW_COMPAT_NUM, PREPARE_FW_BURN_CMD, pkt_out, len, octoclock_data);
- if(UHD_OCTOCLOCK_PACKET_MATCHES(FW_BURN_READY_ACK, pkt_out, pkt_in, len)) std::cout << "ready." << std::endl;
- else{
- std::cout << std::endl;
- if(hex) fs::remove(actual_firmware_path);
- throw uhd::runtime_error("Could not get OctoClock in valid state for firmware download.");
- }
-
- std::cout << std::endl << "Burning firmware." << std::endl;
- pkt_out.code = FILE_TRANSFER_CMD;
-
- //Actual burning below
- size_t num_tries = 0;
- for(size_t i = 0; i < num_blocks; i++){
- num_tries = 0;
- pkt_out.sequence++;
- pkt_out.addr = i*BLOCK_SIZE;
- std::cout << "\r * Progress: " << int(double(i)/double(num_blocks)*100)
- << "% (" << (i+1) << "/" << num_blocks << " blocks)" << std::flush;
-
- memset(pkt_out.data, 0, BLOCK_SIZE);
- memcpy((void*)(pkt_out.data), &firmware_image[i*BLOCK_SIZE], BLOCK_SIZE);
-
- bool success = false;
- while(num_tries <= 5){
- UHD_OCTOCLOCK_SEND_AND_RECV(udp_transport, OCTOCLOCK_FW_COMPAT_NUM, FILE_TRANSFER_CMD, pkt_out, len, octoclock_data);
- if(UHD_OCTOCLOCK_PACKET_MATCHES(FILE_TRANSFER_ACK, pkt_out, pkt_in, len)){
- success = true;
- break;
- }
- else{
- num_tries++;
- boost::this_thread::sleep(boost::posix_time::milliseconds(100));
- }
- }
- if(not success){
- std::cout << std::endl;
- if(hex) fs::remove(actual_firmware_path);
- throw uhd::runtime_error("Failed to burn firmware to OctoClock!");
- }
-
- current_pos += BLOCK_SIZE;
- }
-
- std::cout << "\r * Progress: 100% (" << num_blocks << "/" << num_blocks << " blocks)" << std::endl;
-}
-
-void verify_firmware(udp_simple::sptr udp_transport){
- octoclock_packet_t pkt_out;
- pkt_out.proto_ver = OCTOCLOCK_FW_COMPAT_NUM;
- pkt_out.sequence = uhd::htonx<uint32_t>(std::rand());
- size_t len = 0, current_pos = 0;
-
- for(size_t i = 0; i < num_blocks; i++){
- pkt_out.sequence++;
- pkt_out.addr = i*BLOCK_SIZE;
- std::cout << "\r * Progress: " << int(double(i)/double(num_blocks)*100)
- << "% (" << (i+1) << "/" << num_blocks << " blocks)" << std::flush;
-
- UHD_OCTOCLOCK_SEND_AND_RECV(udp_transport, OCTOCLOCK_FW_COMPAT_NUM, READ_FW_CMD, pkt_out, len, octoclock_data);
- if(UHD_OCTOCLOCK_PACKET_MATCHES(READ_FW_ACK, pkt_out, pkt_in, len)){
- if(memcmp((void*)(pkt_in->data), &firmware_image[i*BLOCK_SIZE],
- std::min(int(firmware_size-current_pos), BLOCK_SIZE))){
- std::cout << std::endl;
- if(hex) fs::remove(actual_firmware_path);
- throw uhd::runtime_error("Failed to verify OctoClock firmware!");
- }
- }
- else{
- std::cout << std::endl;
- if(hex) fs::remove(actual_firmware_path);
- throw uhd::runtime_error("Failed to verify OctoClock firmware!");
- }
- }
-
- std::cout << "\r * Progress: 100% (" << num_blocks << "/" << num_blocks << " blocks)" << std::endl;
-}
-
-bool reset_octoclock(const std::string &ip_addr){
- udp_simple::sptr udp_transport = udp_simple::make_connected(ip_addr, BOOST_STRINGIZE(OCTOCLOCK_UDP_CTRL_PORT));
-
- octoclock_packet_t pkt_out;
- pkt_out.sequence = uhd::htonx<uint32_t>(std::rand());
- size_t len;
-
- UHD_OCTOCLOCK_SEND_AND_RECV(udp_transport, OCTOCLOCK_FW_COMPAT_NUM, RESET_CMD, pkt_out, len, octoclock_data);
- if(not UHD_OCTOCLOCK_PACKET_MATCHES(RESET_ACK, pkt_out, pkt_in, len)){
- std::cout << std::endl;
- if(hex) fs::remove(actual_firmware_path);
- throw uhd::runtime_error("Failed to place device in state to receive firmware.");
- }
-
- boost::this_thread::sleep(boost::posix_time::milliseconds(500));
- return (bootloader_find(ip_addr).size() == 1);
-}
-
-void finalize(udp_simple::sptr udp_transport){
- octoclock_packet_t pkt_out;
- pkt_out.len = 0;
- pkt_out.sequence = uhd::htonx<uint32_t>(std::rand());
- size_t len = 0;
-
- UHD_OCTOCLOCK_SEND_AND_RECV(udp_transport, OCTOCLOCK_FW_COMPAT_NUM, FINALIZE_BURNING_CMD, pkt_out, len, octoclock_data);
- if(not UHD_OCTOCLOCK_PACKET_MATCHES(FINALIZE_BURNING_ACK, pkt_out, pkt_in, len)){
- std::cout << std::endl;
- if(hex) fs::remove(actual_firmware_path);
- std::cout << "no ACK. Bootloader may not have loaded application." << std::endl;
- }
-}
-
-void octoclock_convert_ihex(const std::string &hex_path, const std::string &bin_path){
- ihex_reader hex_reader(hex_path);
- hex_reader.to_bin_file(bin_path);
-}
-
-int UHD_SAFE_MAIN(UHD_UNUSED(int argc), UHD_UNUSED(char *argv[])){
-
- std::string ip_addr;
+int main(int argc, const char *argv[])
+{
+ std::string ip_addr, firmware_path;
po::options_description desc("Allowed options");
desc.add_options()
("help", "Display this help message.")
- ("addr", po::value<std::string>(&ip_addr), "Specify an IP address.")
- ("fw-path", po::value<std::string>(&firmware_path), "Specify a custom firmware path.")
+ ("addr", po::value<std::string>(&ip_addr)->default_value("addr=1.2.3.4"), "Specify an IP address.")
+ ("fw-path", po::value<std::string>(&firmware_path)->default_value("path/to/firmware"), "Specify a custom firmware path.")
("list", "List all available OctoClock devices.")
;
po::variables_map vm;
po::store(po::parse_command_line(argc, argv, desc), vm);
po::notify(vm);
- //Print help message
- if(vm.count("help")){
- std::cout << "OctoClock Firmware Burner" << std::endl << std::endl;
-
- std::cout << "Burns a firmware image file onto an OctoClock device. Specify" << std::endl
- << "the address of the OctoClock with the --addr option. To burn" << std::endl
- << "a custom firmware image, use the --fw-path option. Otherwise, the" << std::endl
- << "utility will use the default image. To list all available" << std::endl
- << "OctoClock devices without burning firmware, use the --list" << std::endl
- << "option." << std::endl << std::endl;
-
- std::cout << desc << std::endl;
- return EXIT_SUCCESS;
- }
-
- //List all available devices
- if(vm.count("list")){
- list_octoclocks();
- return EXIT_SUCCESS;
- }
-
- if(not (vm.count("addr"))){
- throw uhd::runtime_error("You must specify an address with the --addr option!");
- }
- udp_simple::sptr udp_transport = udp_simple::make_connected(ip_addr, BOOST_STRINGIZE(OCTOCLOCK_UDP_FW_PORT));
-
- //If custom path given, make sure it exists
- if(vm.count("fw-path")){
- //Expand tilde usage if applicable
- #ifndef UHD_PLATFORM_WIN32
- if(firmware_path.find("~/") == 0) firmware_path.replace(0,1,getenv("HOME"));
- #endif
-
- if(not fs::exists(firmware_path)){
- throw uhd::runtime_error(str(boost::format("This filepath does not exist: %s") % firmware_path));
- }
- }
- else firmware_path = find_image_path("octoclock_r4_fw.hex");
-
- //If Intel hex file detected, convert to binary
- std::string ext = fs::extension(firmware_path);
- if(ext == ".hex"){
- std::cout << "Found firmware at path: " << firmware_path << std::endl;
-
- //Write firmware .bin file to temporary directory
- fs::path temp_bin = fs::path(fs::path(get_tmp_path()) / str(boost::format("octoclock_fw_%d.bin")
- % time_spec_t::get_system_time().get_full_secs()));
- octoclock_convert_ihex(firmware_path, temp_bin.string());
-
- actual_firmware_path = temp_bin.string();
- }
- else if(ext == ".bin"){
- hex = false;
- actual_firmware_path = firmware_path;
- std::cout << "Found firmware at path: " << firmware_path << std::endl;
- }
- else throw uhd::runtime_error("The firmware file has in improper extension (must be .hex or .bin).");
-
- std::cout << std::endl << boost::format("Searching for OctoClock with IP address %s...") % ip_addr << std::flush;
- device_addrs_t octoclocks = device::find(str(boost::format("addr=%s") % ip_addr), device::CLOCK);
- if(octoclocks.size() == 1){
- if(octoclocks[0]["type"] == "octoclock"){
- std::cout << "found. Resetting..." << std::flush;
- if(reset_octoclock(ip_addr)) std::cout << "successful." << std::endl;
- else{
- std::cout << "failed." << std::endl;
- if(hex) fs::remove(actual_firmware_path);
- throw uhd::runtime_error("Failed to reset OctoClock device into its bootloader.");
- }
- }
- else std::cout << "found." << std::endl;
- }
- else{
- std::cout << "failed." << std::endl;
- if(hex) fs::remove(actual_firmware_path);
- throw uhd::runtime_error("Could not find OctoClock with given IP address!");
- }
-
- read_firmware();
-
print_image_loader_warning(firmware_path, vm);
- std::signal(SIGINT, &sig_int_handler);
-
- burn_firmware(udp_transport);
- std::cout << "Verifying firmware." << std::endl;
- verify_firmware(udp_transport);
- std::cout << std::endl << "Telling OctoClock bootloader to load application..." << std::flush;
- finalize(udp_transport);
- std::cout << "done." << std::endl;
-
- std::cout << "Waiting for OctoClock to reinitialize..." << std::flush;
- boost::this_thread::sleep(boost::posix_time::milliseconds(500));
- octoclocks = device::find(str(boost::format("addr=%s") % ip_addr), device::CLOCK);
- if(octoclocks.size() == 1){
- if(octoclocks[0]["type"] == "octoclock-bootloader"){
- std::cout << std::endl;
- if(hex) fs::remove(actual_firmware_path);
- throw uhd::runtime_error("Firmware did not load properly.");
- }
- else{
- std::cout << "found." << std::endl << std::endl
- << "Successfully burned firmware." << std::endl << std::endl;
- }
- }
- else{
- std::cout << std::endl;
- if(hex) fs::remove(actual_firmware_path);
- throw uhd::runtime_error("Failed to reinitialize OctoClock.");
- }
- if(hex) fs::remove(actual_firmware_path);
-
- return EXIT_SUCCESS;
+ return EXIT_FAILURE;
}
diff --git a/host/utils/uhd_cal_rx_iq_balance.cpp b/host/utils/uhd_cal_rx_iq_balance.cpp
index 99a5abdda..d599a4e03 100644
--- a/host/utils/uhd_cal_rx_iq_balance.cpp
+++ b/host/utils/uhd_cal_rx_iq_balance.cpp
@@ -20,7 +20,6 @@
#include <uhd/utils/safe_main.hpp>
#include <uhd/utils/paths.hpp>
#include <uhd/utils/algorithm.hpp>
-#include <uhd/utils/msg.hpp>
#include <uhd/usrp/multi_usrp.hpp>
#include <boost/program_options.hpp>
#include <boost/format.hpp>
@@ -184,7 +183,7 @@ int UHD_SAFE_MAIN(int argc, char *argv[])
return EXIT_FAILURE;
}
- UHD_MSG(status) << boost::format("Calibration frequency range: %d MHz -> %d MHz") % (freq_start/1e6) % (freq_stop/1e6) << std::endl;
+ std::cout << boost::format("Calibration frequency range: %d MHz -> %d MHz") % (freq_start/1e6) % (freq_stop/1e6) << std::endl;
for (double rx_lo_i = freq_start; rx_lo_i <= freq_stop; rx_lo_i += freq_step)
{
diff --git a/host/utils/uhd_cal_tx_dc_offset.cpp b/host/utils/uhd_cal_tx_dc_offset.cpp
index 8b12f7e95..e3cdc087e 100644
--- a/host/utils/uhd_cal_tx_dc_offset.cpp
+++ b/host/utils/uhd_cal_tx_dc_offset.cpp
@@ -20,7 +20,6 @@
#include <uhd/utils/safe_main.hpp>
#include <uhd/utils/paths.hpp>
#include <uhd/utils/algorithm.hpp>
-#include <uhd/utils/msg.hpp>
#include <uhd/usrp/multi_usrp.hpp>
#include <boost/program_options.hpp>
#include <boost/format.hpp>
@@ -192,7 +191,7 @@ int UHD_SAFE_MAIN(int argc, char *argv[])
return EXIT_FAILURE;
}
- UHD_MSG(status) << boost::format("Calibration frequency range: %d MHz -> %d MHz") % (freq_start/1e6) % (freq_stop/1e6) << std::endl;
+ std::cout << boost::format("Calibration frequency range: %d MHz -> %d MHz") % (freq_start/1e6) % (freq_stop/1e6) << std::endl;
//set RX gain
usrp->set_rx_gain(0);
diff --git a/host/utils/uhd_cal_tx_iq_balance.cpp b/host/utils/uhd_cal_tx_iq_balance.cpp
index f08f8c9d1..445b86694 100644
--- a/host/utils/uhd_cal_tx_iq_balance.cpp
+++ b/host/utils/uhd_cal_tx_iq_balance.cpp
@@ -189,7 +189,7 @@ int UHD_SAFE_MAIN(int argc, char *argv[])
return EXIT_FAILURE;
}
- UHD_MSG(status) << boost::format("Calibration frequency range: %d MHz -> %d MHz") % (freq_start/1e6) % (freq_stop/1e6) << std::endl;
+ std::cout << boost::format("Calibration frequency range: %d MHz -> %d MHz") % (freq_start/1e6) % (freq_stop/1e6) << std::endl;
for (double tx_lo_i = freq_start; tx_lo_i <= freq_stop; tx_lo_i += freq_step)
{
diff --git a/host/utils/uhd_find_devices.cpp b/host/utils/uhd_find_devices.cpp
index c258c580e..e74307695 100644
--- a/host/utils/uhd_find_devices.cpp
+++ b/host/utils/uhd_find_devices.cpp
@@ -17,11 +17,11 @@
#include <uhd/utils/safe_main.hpp>
#include <uhd/device.hpp>
+#include <boost/lexical_cast.hpp>
#include <boost/program_options.hpp>
#include <boost/format.hpp>
#include <iostream>
#include <cstdlib>
-
namespace po = boost::program_options;
int UHD_SAFE_MAIN(int argc, char *argv[]){
@@ -49,12 +49,50 @@ int UHD_SAFE_MAIN(int argc, char *argv[]){
return EXIT_FAILURE;
}
- for (size_t i = 0; i < device_addrs.size(); i++){
- std::cout << "--------------------------------------------------" << std::endl;
+ typedef std::map<std::string, std::set<std::string> > device_multi_addrs_t;
+ typedef std::map<std::string, device_multi_addrs_t> device_addrs_filtered_t;
+ device_addrs_filtered_t found_devices;
+ for (auto it = device_addrs.begin(); it != device_addrs.end(); ++it) {
+ std::string serial = (*it)["serial"];
+ found_devices[serial] = device_multi_addrs_t();
+ for(std::string key: it->keys()) {
+ if (key != "serial") {
+ found_devices[serial][key].insert(it->get(key));
+ }
+ }
+ for (auto sit = it + 1; sit != device_addrs.end();) {
+ if ((*sit)["serial"] == serial) {
+ for(std::string key: sit->keys()) {
+ if (key != "serial") {
+ found_devices[serial][key].insert(sit->get(key));
+ }
+ }
+ sit = device_addrs.erase(sit);
+ } else {
+ sit++;
+ }
+ }
+ }
+
+ int i = 0;
+ for (auto dit = found_devices.begin(); dit != found_devices.end(); ++dit) {
+ std::cout << "--------------------------------------------------"
+ << std::endl;
std::cout << "-- UHD Device " << i << std::endl;
- std::cout << "--------------------------------------------------" << std::endl;
- std::cout << device_addrs[i].to_pp_string() << std::endl << std::endl;
- //uhd::device::make(device_addrs[i]); //test make
+ std::cout << "--------------------------------------------------"
+ << std::endl;
+ std::stringstream ss;
+ ss << "Device Address:" << std::endl;
+ ss << boost::format(" serial: %s") % dit->first << std::endl;
+ for (auto mit = dit->second.begin(); mit != dit->second.end(); ++mit) {
+ for (auto vit = mit->second.begin(); vit != mit->second.end();
+ ++vit) {
+ ss << boost::format(" %s: %s") % mit->first % *vit
+ << std::endl;
+ }
+ }
+ std::cout << ss.str() << std::endl << std::endl;
+ i++;
}
return EXIT_SUCCESS;
diff --git a/host/utils/uhd_usrp_probe.cpp b/host/utils/uhd_usrp_probe.cpp
index 5fc407bfa..fc66b7107 100644
--- a/host/utils/uhd_usrp_probe.cpp
+++ b/host/utils/uhd_usrp_probe.cpp
@@ -28,7 +28,6 @@
#include <uhd/types/sensors.hpp>
#include <boost/program_options.hpp>
#include <boost/format.hpp>
-#include <boost/foreach.hpp>
#include <iostream>
#include <sstream>
#include <vector>
@@ -44,7 +43,7 @@ static std::string make_border(const std::string &text){
std::vector<std::string> lines; boost::split(lines, text, boost::is_any_of("\n"));
while (lines.back().empty()) lines.pop_back(); //strip trailing newlines
if (lines.size()) lines[0] = " " + lines[0]; //indent the title line
- BOOST_FOREACH(const std::string &line, lines){
+ for(const std::string &line: lines){
ss << boost::format("| %s") % line << std::endl;
}
//ss << boost::format(" \\_____________________________________________________") << std::endl;
@@ -62,7 +61,7 @@ static std::string get_dsp_pp_string(const std::string &type, property_tree::spt
static std::string prop_names_to_pp_string(const std::vector<std::string> &prop_names){
std::stringstream ss; size_t count = 0;
- BOOST_FOREACH(const std::string &prop_name, prop_names){
+ for(const std::string &prop_name: prop_names){
ss << ((count++)? ", " : "") << prop_name;
}
return ss.str();
@@ -75,14 +74,16 @@ static std::string get_frontend_pp_string(const std::string &type, property_tree
ss << boost::format("Name: %s") % (tree->access<std::string>(path / "name").get()) << std::endl;
ss << boost::format("Antennas: %s") % prop_names_to_pp_string(tree->access<std::vector<std::string> >(path / "antenna/options").get()) << std::endl;
- ss << boost::format("Sensors: %s") % prop_names_to_pp_string(tree->list(path / "sensors")) << std::endl;
+ if (tree->exists(path/ "sensors")) {
+ ss << boost::format("Sensors: %s") % prop_names_to_pp_string(tree->list(path / "sensors")) << std::endl;
+ }
meta_range_t freq_range = tree->access<meta_range_t>(path / "freq/range").get();
ss << boost::format("Freq range: %.3f to %.3f MHz") % (freq_range.start()/1e6) % (freq_range.stop()/1e6) << std::endl;
std::vector<std::string> gain_names = tree->list(path / "gains");
if (gain_names.size() == 0) ss << "Gain Elements: None" << std::endl;
- BOOST_FOREACH(const std::string &name, gain_names){
+ for(const std::string &name: gain_names){
meta_range_t gain_range = tree->access<meta_range_t>(path / "gains" / name / "range").get();
ss << boost::format("Gain range %s: %.1f to %.1f step %.1f dB") % name % gain_range.start() % gain_range.stop() % gain_range.step() << std::endl;
}
@@ -93,22 +94,25 @@ static std::string get_frontend_pp_string(const std::string &type, property_tree
}
ss << boost::format("Connection Type: %s") % (tree->access<std::string>(path / "connection").get()) << std::endl;
- ss << boost::format("Uses LO offset: %s") % ((tree->access<bool>(path / "use_lo_offset").get())? "Yes" : "No") << std::endl;
+ ss << boost::format("Uses LO offset: %s")
+ % ((tree->exists(path / "use_lo_offset") and tree->access<bool>(path / "use_lo_offset").get())? "Yes" : "No")
+ << std::endl;
return ss.str();
}
static std::string get_codec_pp_string(const std::string &type, property_tree::sptr tree, const fs_path &path){
std::stringstream ss;
- ss << boost::format("%s Codec: %s") % type % path.leaf() << std::endl;
- //ss << std::endl;
-
- ss << boost::format("Name: %s") % (tree->access<std::string>(path / "name").get()) << std::endl;
- std::vector<std::string> gain_names = tree->list(path / "gains");
- if (gain_names.size() == 0) ss << "Gain Elements: None" << std::endl;
- BOOST_FOREACH(const std::string &name, gain_names){
- meta_range_t gain_range = tree->access<meta_range_t>(path / "gains" / name / "range").get();
- ss << boost::format("Gain range %s: %.1f to %.1f step %.1f dB") % name % gain_range.start() % gain_range.stop() % gain_range.step() << std::endl;
+ if (tree->exists(path / "name")) {
+ ss << boost::format("%s Codec: %s") % type % path.leaf() << std::endl;
+
+ ss << boost::format("Name: %s") % (tree->access<std::string>(path / "name").get()) << std::endl;
+ std::vector<std::string> gain_names = tree->list(path / "gains");
+ if (gain_names.size() == 0) ss << "Gain Elements: None" << std::endl;
+ for(const std::string &name: gain_names){
+ meta_range_t gain_range = tree->access<meta_range_t>(path / "gains" / name / "range").get();
+ ss << boost::format("Gain range %s: %.1f to %.1f step %.1f dB") % name % gain_range.start() % gain_range.stop() % gain_range.step() << std::endl;
+ }
}
return ss.str();
}
@@ -129,8 +133,10 @@ static std::string get_dboard_pp_string(const std::string &type, property_tree::
if (not gdb_eeprom.serial.empty()) ss << boost::format("Serial: %s") % gdb_eeprom.serial << std::endl;
}
}
- BOOST_FOREACH(const std::string &name, tree->list(path / (prefix + "_frontends"))){
- ss << make_border(get_frontend_pp_string(type, tree, path / (prefix + "_frontends") / name));
+ if (tree->exists(path / (prefix + "_frontends"))) {
+ for(const std::string &name: tree->list(path / (prefix + "_frontends"))){
+ ss << make_border(get_frontend_pp_string(type, tree, path / (prefix + "_frontends") / name));
+ }
}
ss << make_border(get_codec_pp_string(type, tree, path.branch_path().branch_path() / (prefix + "_codecs") / path.leaf()));
return ss.str();
@@ -140,7 +146,7 @@ static std::string get_dboard_pp_string(const std::string &type, property_tree::
static std::string get_rfnoc_pp_string(property_tree::sptr tree, const fs_path &path){
std::stringstream ss;
ss << "RFNoC blocks on this device:" << std::endl << std::endl;
- BOOST_FOREACH(const std::string &name, tree->list(path)){
+ for(const std::string &name: tree->list(path)){
ss << "* " << name << std::endl;
}
return ss.str();
@@ -149,10 +155,14 @@ static std::string get_rfnoc_pp_string(property_tree::sptr tree, const fs_path &
static std::string get_mboard_pp_string(property_tree::sptr tree, const fs_path &path){
std::stringstream ss;
ss << boost::format("Mboard: %s") % (tree->access<std::string>(path / "name").get()) << std::endl;
- //ss << std::endl;
- usrp::mboard_eeprom_t mb_eeprom = tree->access<usrp::mboard_eeprom_t>(path / "eeprom").get();
- BOOST_FOREACH(const std::string &key, mb_eeprom.keys()){
- if (not mb_eeprom[key].empty()) ss << boost::format("%s: %s") % key % mb_eeprom[key] << std::endl;
+
+ if (tree->exists(path / "eeprom")){
+ usrp::mboard_eeprom_t mb_eeprom = tree->access<usrp::mboard_eeprom_t>(path / "eeprom").get();
+ for(const std::string &key: mb_eeprom.keys()){
+ if (not mb_eeprom[key].empty()) ss << boost::format("%s: %s") % key % mb_eeprom[key] << std::endl;
+ }
+ } else {
+ ss << "No mboard EEPROM found." << std::endl;
}
if (tree->exists(path / "fw_version")){
ss << "FW Version: " << tree->access<std::string>(path / "fw_version").get() << std::endl;
@@ -160,6 +170,9 @@ static std::string get_mboard_pp_string(property_tree::sptr tree, const fs_path
if (tree->exists(path / "fpga_version")){
ss << "FPGA Version: " << tree->access<std::string>(path / "fpga_version").get() << std::endl;
}
+ if (tree->exists(path / "fpga_version_hash")){
+ ss << "FPGA git hash: " << tree->access<std::string>(path / "fpga_version_hash").get() << std::endl;
+ }
if (tree->exists(path / "xbar")){
ss << "RFNoC capable: Yes" << std::endl;
}
@@ -173,27 +186,33 @@ static std::string get_mboard_pp_string(property_tree::sptr tree, const fs_path
const std::vector< std::string > clock_sources = tree->access<std::vector<std::string> >(path / "clock_source" / "options").get();
ss << "Clock sources: " << prop_names_to_pp_string(clock_sources) << std::endl;
}
- ss << "Sensors: " << prop_names_to_pp_string(tree->list(path / "sensors")) << std::endl;
+ if (tree->exists(path / "sensors")){
+ ss << "Sensors: " << prop_names_to_pp_string(tree->list(path / "sensors")) << std::endl;
+ }
if (tree->exists(path / "rx_dsps")){
- BOOST_FOREACH(const std::string &name, tree->list(path / "rx_dsps")){
+ for(const std::string &name: tree->list(path / "rx_dsps")){
ss << make_border(get_dsp_pp_string("RX", tree, path / "rx_dsps" / name));
}
}
- BOOST_FOREACH(const std::string &name, tree->list(path / "dboards")){
- ss << make_border(get_dboard_pp_string("RX", tree, path / "dboards" / name));
- }
- if (tree->exists(path / "tx_dsps")){
- BOOST_FOREACH(const std::string &name, tree->list(path / "tx_dsps")){
- ss << make_border(get_dsp_pp_string("TX", tree, path / "tx_dsps" / name));
+ if (tree->exists(path / "dboards")) {
+ for(const std::string &name: tree->list(path / "dboards")){
+ ss << make_border(get_dboard_pp_string("RX", tree, path / "dboards" / name));
+ }
+ if (tree->exists(path / "tx_dsps")){
+ for(const std::string &name: tree->list(path / "tx_dsps")){
+ ss << make_border(get_dsp_pp_string("TX", tree, path / "tx_dsps" / name));
+ }
+ }
+ for(const std::string &name: tree->list(path / "dboards")){
+ ss << make_border(get_dboard_pp_string("TX", tree, path / "dboards" / name));
}
}
- BOOST_FOREACH(const std::string &name, tree->list(path / "dboards")){
- ss << make_border(get_dboard_pp_string("TX", tree, path / "dboards" / name));
- }
+ if (tree->exists(path / "xbar")){
ss << make_border(get_rfnoc_pp_string(tree, path / "xbar"));
+ }
}
- catch (const uhd::lookup_error&) {
- /* nop */
+ catch (const uhd::lookup_error& ex) {
+ std::cout << "Exited device probe on " << ex.what() << std::endl;
}
return ss.str();
}
@@ -203,7 +222,7 @@ static std::string get_device_pp_string(property_tree::sptr tree){
std::stringstream ss;
ss << boost::format("Device: %s") % (tree->access<std::string>("/name").get()) << std::endl;
//ss << std::endl;
- BOOST_FOREACH(const std::string &name, tree->list("/mboards")){
+ for(const std::string &name: tree->list("/mboards")){
ss << make_border(get_mboard_pp_string(tree, "/mboards/" + name));
}
return ss.str();
@@ -211,7 +230,7 @@ static std::string get_device_pp_string(property_tree::sptr tree){
void print_tree(const uhd::fs_path &path, uhd::property_tree::sptr tree){
std::cout << path << std::endl;
- BOOST_FOREACH(const std::string &name, tree->list(path)){
+ for(const std::string &name: tree->list(path)){
print_tree(path / name, tree);
}
}
@@ -254,7 +273,7 @@ int UHD_SAFE_MAIN(int argc, char *argv[]){
if (vm.count("vector")) {
std::vector<std::string> str_vector = tree->access< std::vector<std::string> >(vm["string"].as<std::string>()).get();
std::cout << "(";
- BOOST_FOREACH(const std::string &str, str_vector) {
+ for(const std::string &str: str_vector) {
std::cout << str << ",";
}
std::cout << ")" << std::endl;
diff --git a/host/utils/usrp1_eeprom.h b/host/utils/usrp1_eeprom.h
new file mode 100644
index 000000000..d544073b0
--- /dev/null
+++ b/host/utils/usrp1_eeprom.h
@@ -0,0 +1,15 @@
+unsigned const char usrp1_eeprom_bin[] = {
+ 0xc2, 0xfe, 0xff, 0x02, 0x00, 0x04, 0x00, 0x00, 0x00, 0x7b, 0x00, 0x00,
+ 0x02, 0x00, 0x6b, 0x12, 0x00, 0x08, 0x80, 0xfe, 0x75, 0x80, 0x38, 0x75,
+ 0xb2, 0x3b, 0x75, 0xa0, 0xc0, 0x75, 0xb4, 0xcf, 0x75, 0xb1, 0xf0, 0x75,
+ 0xb6, 0xf8, 0x90, 0xe6, 0x8a, 0xe4, 0xf0, 0x00, 0x90, 0xe6, 0x80, 0xe4,
+ 0xf0, 0x7f, 0x00, 0xef, 0x24, 0xe0, 0xf5, 0x82, 0xe4, 0x34, 0xe1, 0xf5,
+ 0x83, 0xe4, 0xf0, 0x0f, 0xbf, 0x10, 0xf0, 0x7e, 0x00, 0x7f, 0x00, 0x0e,
+ 0xbe, 0x00, 0x01, 0x0f, 0xef, 0x30, 0xe7, 0xf7, 0x63, 0xa0, 0x40, 0x80,
+ 0xf2, 0x78, 0x00, 0xe8, 0x44, 0x00, 0x60, 0x0c, 0x79, 0x00, 0x90, 0x18,
+ 0x00, 0xe4, 0xf0, 0xa3, 0xd8, 0xfc, 0xd9, 0xfa, 0xd0, 0x83, 0xd0, 0x82,
+ 0xf6, 0xd8, 0xfd, 0xc0, 0x82, 0xc0, 0x83, 0x75, 0x82, 0x00, 0x22, 0x75,
+ 0x81, 0x07, 0x12, 0x00, 0x49, 0xe5, 0x82, 0x60, 0x03, 0x02, 0x00, 0x03,
+ 0x02, 0x00, 0x03, 0x80, 0x01, 0xe6, 0x00, 0x00
+};
+unsigned int usrp1_eeprom_bin_len = 140;
diff --git a/host/utils/usrp_cal_utils.hpp b/host/utils/usrp_cal_utils.hpp
index ccdb0a61d..0b807e341 100644
--- a/host/utils/usrp_cal_utils.hpp
+++ b/host/utils/usrp_cal_utils.hpp
@@ -21,7 +21,6 @@
#include <uhd/usrp/dboard_eeprom.hpp>
#include <uhd/utils/paths.hpp>
#include <uhd/utils/algorithm.hpp>
-#include <uhd/utils/msg.hpp>
#include <boost/filesystem.hpp>
#include <boost/format.hpp>
#include <iostream>
@@ -303,9 +302,9 @@ static uhd::usrp::multi_usrp::sptr setup_usrp_for_cal(std::string &args, std::st
usrp->set_tx_subdev_spec(subdev);
usrp->set_rx_subdev_spec(subdev);
}
- UHD_MSG(status) << "Running calibration for " << usrp->get_tx_subdev_name(0) << std::endl;
+ std::cout << "Running calibration for " << usrp->get_tx_subdev_name(0);
serial = get_serial(usrp, "tx");
- UHD_MSG(status) << "Daughterboard serial: " << serial << std::endl;
+ std::cout << "Daughterboard serial: " << serial;
//set the antennas to cal
if (not uhd::has(usrp->get_rx_antennas(), "CAL") or not uhd::has(usrp->get_tx_antennas(), "CAL"))
diff --git a/host/utils/usrp_e3x0_network_mode.cpp b/host/utils/usrp_e3x0_network_mode.cpp
index dae4b6ff7..881c6c8ca 100644
--- a/host/utils/usrp_e3x0_network_mode.cpp
+++ b/host/utils/usrp_e3x0_network_mode.cpp
@@ -19,7 +19,6 @@
#include <uhd/device.hpp>
#include <uhd/exception.hpp>
-#include <uhd/utils/msg.hpp>
#include <uhd/transport/if_addrs.hpp>
#include <boost/program_options.hpp>
@@ -69,11 +68,11 @@ int main(int argc, char *argv[])
uhd::usrp::e300::network_server::sptr server = uhd::usrp::e300::network_server::make(args);
server->run();
} catch (uhd::assertion_error &e) {
- UHD_MSG(error) << "This executable is supposed to run on the device, not on the host." << std::endl
+ std::cout << "This executable is supposed to run on the device, not on the host." << std::endl
<< "Please refer to the manual section on operating your e3x0 device in network mode." << std::endl;
return EXIT_FAILURE;
} catch (uhd::runtime_error &e) {
- UHD_MSG(error) << e.what() << std::endl;
+ std::cerr << e.what() << std::endl;
return EXIT_FAILURE;
}
return EXIT_SUCCESS;
diff --git a/host/utils/usrp_n2xx_net_burner.py b/host/utils/usrp_n2xx_net_burner.py
index 5605b0028..372078f91 100755
--- a/host/utils/usrp_n2xx_net_burner.py
+++ b/host/utils/usrp_n2xx_net_burner.py
@@ -16,8 +16,6 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
-# TODO: make it autodetect UHD devices
-
import optparse
import math
import os
@@ -29,72 +27,7 @@ import time
import platform
import subprocess
-########################################################################
-# constants
-########################################################################
-UDP_FW_UPDATE_PORT = 49154
-UDP_MAX_XFER_BYTES = 1024
-UDP_TIMEOUT = 3
-UDP_POLL_INTERVAL = 0.10 #in seconds
-
-USRP2_FW_PROTO_VERSION = 7 #should be unused after r6
-
-#from bootloader_utils.h
-
-FPGA_IMAGE_SIZE_BYTES = 1572864
-FW_IMAGE_SIZE_BYTES = 31744
-SAFE_FPGA_IMAGE_LOCATION_ADDR = 0x00000000
-SAFE_FW_IMAGE_LOCATION_ADDR = 0x003F0000
-PROD_FPGA_IMAGE_LOCATION_ADDR = 0x00180000
-PROD_FW_IMAGE_LOCATION_ADDR = 0x00300000
-
-FLASH_DATA_PACKET_SIZE = 256
-
-#see fw_common.h
-FLASH_ARGS_FMT = '!LLLLL256s'
-FLASH_INFO_FMT = '!LLLLL256x'
-FLASH_IP_FMT = '!LLLL260x'
-FLASH_HW_REV_FMT = '!LLLL260x'
-
-n2xx_revs = {
- 0x0a00: ["n200_r3", "n200_r2"],
- 0x0a10: ["n200_r4"],
- 0x0a01: ["n210_r3", "n210_r2"],
- 0x0a11: ["n210_r4"]
- }
-
-class update_id_t:
- USRP2_FW_UPDATE_ID_WAT = ord(' ')
- USRP2_FW_UPDATE_ID_OHAI_LOL = ord('a')
- USRP2_FW_UPDATE_ID_OHAI_OMG = ord('A')
- USRP2_FW_UPDATE_ID_WATS_TEH_FLASH_INFO_LOL = ord('f')
- USRP2_FW_UPDATE_ID_HERES_TEH_FLASH_INFO_OMG = ord('F')
- USRP2_FW_UPDATE_ID_ERASE_TEH_FLASHES_LOL = ord('e')
- USRP2_FW_UPDATE_ID_ERASING_TEH_FLASHES_OMG = ord('E')
- USRP2_FW_UPDATE_ID_R_U_DONE_ERASING_LOL = ord('d')
- USRP2_FW_UPDATE_ID_IM_DONE_ERASING_OMG = ord('D')
- USRP2_FW_UPDATE_ID_NOPE_NOT_DONE_ERASING_OMG = ord('B')
- USRP2_FW_UPDATE_ID_WRITE_TEH_FLASHES_LOL = ord('w')
- USRP2_FW_UPDATE_ID_WROTE_TEH_FLASHES_OMG = ord('W')
- USRP2_FW_UPDATE_ID_READ_TEH_FLASHES_LOL = ord('r')
- USRP2_FW_UPDATE_ID_KK_READ_TEH_FLASHES_OMG = ord('R')
- USRP2_FW_UPDATE_ID_RESET_MAH_COMPUTORZ_LOL = ord('s')
- USRP2_FW_UPDATE_ID_RESETTIN_TEH_COMPUTORZ_OMG = ord('S')
- USRP2_FW_UPDATE_ID_I_CAN_HAS_HW_REV_LOL = ord('v')
- USRP2_FW_UPDATE_ID_HERES_TEH_HW_REV_OMG = ord('V')
- USRP2_FW_UPDATE_ID_KTHXBAI = ord('~')
-
-_seq = -1
-def seq():
- global _seq
- _seq = _seq+1
- return _seq
-
-########################################################################
-# print equivalent uhd_image_loader command
-########################################################################
def print_image_loader_warning(fw, fpga, reset, safe, addr):
-
# Newline + indent
if platform.system() == "Windows":
nl = " ^\n "
@@ -121,8 +54,7 @@ def print_image_loader_warning(fw, fpga, reset, safe, addr):
print("")
print("************************************************************************************************")
- print("WARNING: This utility will be removed in an upcoming version of UHD. In the future, use")
- print(" this command:")
+ print("ERROR: This utility has been removed in this version of UHD. Use this command:")
print("")
print(uhd_image_loader)
print("")
@@ -130,383 +62,16 @@ def print_image_loader_warning(fw, fpga, reset, safe, addr):
print("")
########################################################################
-# helper functions
-########################################################################
-def unpack_flash_args_fmt(s):
- return struct.unpack(FLASH_ARGS_FMT, s) #(proto_ver, pktid, seq, flash_addr, length, data)
-
-def unpack_flash_info_fmt(s):
- return struct.unpack(FLASH_INFO_FMT, s) #(proto_ver, pktid, seq, sector_size_bytes, memory_size_bytes)
-
-def unpack_flash_ip_fmt(s):
- return struct.unpack(FLASH_IP_FMT, s) #(proto_ver, pktid, seq, ip_addr)
-
-def unpack_flash_hw_rev_fmt(s):
- return struct.unpack(FLASH_HW_REV_FMT, s) #proto_ver, pktid, seq, hw_rev
-
-def pack_flash_args_fmt(proto_ver, pktid, seq, flash_addr, length, data=bytes()):
- return struct.pack(FLASH_ARGS_FMT, proto_ver, pktid, seq, flash_addr, length, data)
-
-def pack_flash_info_fmt(proto_ver, pktid, seq, sector_size_bytes, memory_size_bytes):
- return struct.pack(FLASH_INFO_FMT, proto_ver, pktid, seq, sector_size_bytes, memory_size_bytes)
-
-def pack_flash_hw_rev_fmt(proto_ver, pktid, seq, hw_rev):
- return struct.pack(FLASH_HW_REV_FMT, proto_ver, pktid, seq, hw_rev)
-
-def is_valid_fpga_image(fpga_image):
- for i in range(0,63):
- if fpga_image[i:i+1] == bytes(b'\xFF'): continue
- if fpga_image[i:i+2] == bytes(b'\xAA\x99'): return True
- return False
-
-def is_valid_fw_image(fw_image):
- return fw_image[:4] == bytes(b'\x0B\x0B\x0B\x0B')
-
-
-########################################################################
-# interface discovery and device enumeration
-########################################################################
-def command(*args):
- p = subprocess.Popen(
- args,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- )
- ret = p.wait()
- verbose = p.stdout.read().decode('utf-8')
- if ret != 0: raise Exception(verbose)
- return verbose
-
-def get_interfaces():
- if(platform.system() is "Windows"): return win_get_interfaces()
- else: return unix_get_interfaces()
-
-def unix_get_interfaces():
- ifconfig = command("/sbin/ifconfig")
- ip_addr_re = "cast\D*(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})"
- bcasts = re.findall(ip_addr_re, ifconfig)
- return bcasts
-
-def win_get_interfaces():
- from ctypes import Structure, windll, sizeof
- from ctypes import POINTER, byref
- from ctypes import c_ulong, c_uint, c_ubyte, c_char
- MAX_ADAPTER_DESCRIPTION_LENGTH = 128
- MAX_ADAPTER_NAME_LENGTH = 256
- MAX_ADAPTER_ADDRESS_LENGTH = 8
- class IP_ADDR_STRING(Structure):
- pass
- LP_IP_ADDR_STRING = POINTER(IP_ADDR_STRING)
- IP_ADDR_STRING._fields_ = [
- ("next", LP_IP_ADDR_STRING),
- ("ipAddress", c_char * 16),
- ("ipMask", c_char * 16),
- ("context", c_ulong)]
- class IP_ADAPTER_INFO (Structure):
- pass
- LP_IP_ADAPTER_INFO = POINTER(IP_ADAPTER_INFO)
- IP_ADAPTER_INFO._fields_ = [
- ("next", LP_IP_ADAPTER_INFO),
- ("comboIndex", c_ulong),
- ("adapterName", c_char * (MAX_ADAPTER_NAME_LENGTH + 4)),
- ("description", c_char * (MAX_ADAPTER_DESCRIPTION_LENGTH + 4)),
- ("addressLength", c_uint),
- ("address", c_ubyte * MAX_ADAPTER_ADDRESS_LENGTH),
- ("index", c_ulong),
- ("type", c_uint),
- ("dhcpEnabled", c_uint),
- ("currentIpAddress", LP_IP_ADDR_STRING),
- ("ipAddressList", IP_ADDR_STRING),
- ("gatewayList", IP_ADDR_STRING),
- ("dhcpServer", IP_ADDR_STRING),
- ("haveWins", c_uint),
- ("primaryWinsServer", IP_ADDR_STRING),
- ("secondaryWinsServer", IP_ADDR_STRING),
- ("leaseObtained", c_ulong),
- ("leaseExpires", c_ulong)]
- GetAdaptersInfo = windll.iphlpapi.GetAdaptersInfo
- GetAdaptersInfo.restype = c_ulong
- GetAdaptersInfo.argtypes = [LP_IP_ADAPTER_INFO, POINTER(c_ulong)]
- adapterList = (IP_ADAPTER_INFO * 10)()
- buflen = c_ulong(sizeof(adapterList))
- rc = GetAdaptersInfo(byref(adapterList[0]), byref(buflen))
- if rc == 0:
- for a in adapterList:
- adNode = a.ipAddressList
- while True:
- #convert ipAddr and ipMask into hex addrs that can be turned into a bcast addr
- try:
- ipAddr = adNode.ipAddress.decode()
- ipMask = adNode.ipMask.decode()
- except: ipAddr = None
- if ipAddr and ipMask:
- hexAddr = struct.unpack("<L", socket.inet_aton(ipAddr))[0]
- hexMask = struct.unpack("<L", socket.inet_aton(ipMask))[0]
- if(hexAddr and hexMask): #don't broadcast on 255.255.255.255, that's just lame
- yield socket.inet_ntoa(struct.pack("<L", (hexAddr & hexMask) | (~hexMask) & 0xFFFFFFFF))
- try: adNode = adNode.next
- except: break
- if not adNode: break
-
-def enumerate_devices():
- for bcast_addr in get_interfaces():
- sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
- sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
- sock.settimeout(0.1)
- out_pkt = pack_flash_args_fmt(USRP2_FW_PROTO_VERSION, update_id_t.USRP2_FW_UPDATE_ID_OHAI_LOL, 0, 0, 0)
- sock.sendto(out_pkt, (bcast_addr, UDP_FW_UPDATE_PORT))
- still_goin = True
- while(still_goin):
- try:
- pkt = sock.recv(UDP_MAX_XFER_BYTES)
- (proto_ver, pktid, rxseq, ip_addr) = unpack_flash_ip_fmt(pkt)
- if(pktid == update_id_t.USRP2_FW_UPDATE_ID_OHAI_OMG):
- use_addr = socket.inet_ntoa(struct.pack("<L", socket.ntohl(ip_addr)))
- burner = burner_socket(use_addr, True)
- yield "%s (%s)" % (socket.inet_ntoa(struct.pack("<L", socket.ntohl(ip_addr))), n2xx_revs[burner.get_hw_rev()][0])
- except socket.timeout:
- still_goin = False
-
-########################################################################
# Burner class, holds a socket and send/recv routines
########################################################################
class burner_socket(object):
- def __init__(self, addr, quiet):
- self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ def __init__(self, addr):
self._addr = addr
- self._quiet = quiet
- self._sock.settimeout(UDP_TIMEOUT)
- self._sock.connect((addr, UDP_FW_UPDATE_PORT))
- self.set_callbacks(lambda *a: None, lambda *a: None)
- self.init_update(quiet) #check that the device is there
- self.get_hw_rev()
-
- def set_callbacks(self, progress_cb, status_cb):
- self._progress_cb = progress_cb
- self._status_cb = status_cb
-
- def send_and_recv(self, pkt):
- self._sock.send(pkt)
- return self._sock.recv(UDP_MAX_XFER_BYTES)
-
- #just here to validate comms
- def init_update(self,quiet):
- out_pkt = pack_flash_args_fmt(USRP2_FW_PROTO_VERSION, update_id_t.USRP2_FW_UPDATE_ID_OHAI_LOL, seq(), 0, 0)
- try: in_pkt = self.send_and_recv(out_pkt)
- except socket.timeout: raise Exception("No response from device")
- (proto_ver, pktid, rxseq, ip_addr) = unpack_flash_ip_fmt(in_pkt)
- if pktid == update_id_t.USRP2_FW_UPDATE_ID_OHAI_OMG:
- if not quiet: print("USRP-N2XX found.")
- else:
- raise Exception("Invalid reply received from device.")
-
- def get_hw_rev(self):
- out_pkt = pack_flash_hw_rev_fmt(USRP2_FW_PROTO_VERSION, update_id_t.USRP2_FW_UPDATE_ID_I_CAN_HAS_HW_REV_LOL, seq(), 0)
- in_pkt = self.send_and_recv(out_pkt)
- (proto_ver, pktid, rxseq, hw_rev) = unpack_flash_hw_rev_fmt(in_pkt)
- if(pktid != update_id_t.USRP2_FW_UPDATE_ID_HERES_TEH_HW_REV_OMG): hw_rev = 0
- return socket.ntohs(hw_rev)
-
- memory_size_bytes = 0
- sector_size_bytes = 0
- def get_flash_info(self):
- if (self.memory_size_bytes != 0) and (self.sector_size_bytes != 0):
- return (self.memory_size_bytes, self.sector_size_bytes)
-
- out_pkt = pack_flash_args_fmt(USRP2_FW_PROTO_VERSION, update_id_t.USRP2_FW_UPDATE_ID_WATS_TEH_FLASH_INFO_LOL, seq(), 0, 0)
- in_pkt = self.send_and_recv(out_pkt)
-
- (proto_ver, pktid, rxseq, self.sector_size_bytes, self.memory_size_bytes) = unpack_flash_info_fmt(in_pkt)
-
- if pktid != update_id_t.USRP2_FW_UPDATE_ID_HERES_TEH_FLASH_INFO_OMG:
- raise Exception("Invalid reply %c from device." % (chr(pktid)))
-
- return (self.memory_size_bytes, self.sector_size_bytes)
def burn_fw(self, fw, fpga, reset, safe, check_rev=True):
+ " Just a dummy lol "
print_image_loader_warning(fw, fpga, reset, safe, self._addr)
- (flash_size, sector_size) = self.get_flash_info()
- hw_rev = self.get_hw_rev()
-
- if hw_rev in n2xx_revs: print("Hardware type: %s" % n2xx_revs[hw_rev][0])
- print("Flash size: %i\nSector size: %i\n" % (flash_size, sector_size))
-
- if fpga:
- #validate fpga image name against hardware rev
- if(check_rev and hw_rev != 0 and not any(name in fpga for name in n2xx_revs[hw_rev])):
- raise Exception("Error: incorrect FPGA image version. Please use the correct image for device %s" % n2xx_revs[hw_rev][0])
-
- if safe: image_location = SAFE_FPGA_IMAGE_LOCATION_ADDR
- else: image_location = PROD_FPGA_IMAGE_LOCATION_ADDR
-
- fpga_file = open(fpga, 'rb')
- fpga_image = fpga_file.read()
-
- if len(fpga_image) > FPGA_IMAGE_SIZE_BYTES:
- raise Exception("Error: FPGA image file too large.")
-
- if not is_valid_fpga_image(fpga_image):
- raise Exception("Error: Invalid FPGA image file.")
-
- if (len(fpga_image) + image_location) > flash_size:
- raise Exception("Error: Cannot write past end of device")
-
- print("Begin FPGA write: this should take about 1 minute...")
- start_time = time.time()
- self.erase_image(image_location, FPGA_IMAGE_SIZE_BYTES)
- self.write_image(fpga_image, image_location)
- self.verify_image(fpga_image, image_location)
- print("Time elapsed: %f seconds"%(time.time() - start_time))
- print("\n\n")
-
- if fw:
- if safe: image_location = SAFE_FW_IMAGE_LOCATION_ADDR
- else: image_location = PROD_FW_IMAGE_LOCATION_ADDR
-
- fw_file = open(fw, 'rb')
- fw_image = fw_file.read()
-
- if len(fw_image) > FW_IMAGE_SIZE_BYTES:
- raise Exception("Error: Firmware image file too large.")
-
- if not is_valid_fw_image(fw_image):
- raise Exception("Error: Invalid firmware image file.")
-
- if (len(fw_image) + image_location) > flash_size:
- raise Exception("Error: Cannot write past end of device")
-
- print("Begin firmware write: this should take about 1 second...")
- start_time = time.time()
- self.erase_image(image_location, FW_IMAGE_SIZE_BYTES)
- self.write_image(fw_image, image_location)
- self.verify_image(fw_image, image_location)
- print("Time elapsed: %f seconds"%(time.time() - start_time))
- print("\n\n")
-
- if reset: self.reset_usrp()
-
- def write_image(self, image, addr):
- print("Writing image")
- self._status_cb("Writing")
- writedata = image
- #we split the image into smaller (256B) bits and send them down the wire
- (mem_size, sector_size) = self.get_flash_info()
- if (addr + len(writedata)) > mem_size:
- raise Exception("Error: Cannot write past end of device")
-
- while writedata:
- out_pkt = pack_flash_args_fmt(USRP2_FW_PROTO_VERSION, update_id_t.USRP2_FW_UPDATE_ID_WRITE_TEH_FLASHES_LOL, seq(), addr, FLASH_DATA_PACKET_SIZE, writedata[:FLASH_DATA_PACKET_SIZE])
- in_pkt = self.send_and_recv(out_pkt)
-
- (proto_ver, pktid, rxseq, flash_addr, rxlength, data) = unpack_flash_args_fmt(in_pkt)
-
- if pktid != update_id_t.USRP2_FW_UPDATE_ID_WROTE_TEH_FLASHES_OMG:
- raise Exception("Invalid reply %c from device." % (chr(pktid)))
-
- writedata = writedata[FLASH_DATA_PACKET_SIZE:]
- addr += FLASH_DATA_PACKET_SIZE
- self._progress_cb(float(len(image)-len(writedata))/len(image))
-
- def verify_image(self, image, addr):
- print("Verifying data")
- self._status_cb("Verifying")
- readsize = len(image)
- readdata = bytes()
- while readsize > 0:
- if readsize < FLASH_DATA_PACKET_SIZE: thisreadsize = readsize
- else: thisreadsize = FLASH_DATA_PACKET_SIZE
- out_pkt = pack_flash_args_fmt(USRP2_FW_PROTO_VERSION, update_id_t.USRP2_FW_UPDATE_ID_READ_TEH_FLASHES_LOL, seq(), addr, thisreadsize)
- in_pkt = self.send_and_recv(out_pkt)
-
- (proto_ver, pktid, rxseq, flash_addr, rxlength, data) = unpack_flash_args_fmt(in_pkt)
-
- if pktid != update_id_t.USRP2_FW_UPDATE_ID_KK_READ_TEH_FLASHES_OMG:
- raise Exception("Invalid reply %c from device." % (chr(pktid)))
-
- readdata += data[:thisreadsize]
- readsize -= FLASH_DATA_PACKET_SIZE
- addr += FLASH_DATA_PACKET_SIZE
- self._progress_cb(float(len(readdata))/len(image))
-
- print("Read back %i bytes" % len(readdata))
- # print readdata
-
- # for i in range(256, 512):
- # print "out: %i in: %i" % (ord(image[i]), ord(readdata[i]))
-
- if readdata != image:
- raise Exception("Verify failed. Image did not write correctly.")
- else:
- print("Success.")
-
- def read_image(self, image, size, addr):
- print("Reading image")
- readsize = size
- readdata = str()
- while readsize > 0:
- if readsize < FLASH_DATA_PACKET_SIZE: thisreadsize = readsize
- else: thisreadsize = FLASH_DATA_PACKET_SIZE
- out_pkt = pack_flash_args_fmt(USRP2_FW_PROTO_VERSION, update_id_t.USRP2_FW_UPDATE_ID_READ_TEH_FLASHES_LOL, seq(), addr, thisreadsize)
- in_pkt = self.send_and_recv(out_pkt)
-
- (proto_ver, pktid, rxseq, flash_addr, rxlength, data) = unpack_flash_args_fmt(in_pkt)
-
- if pktid != update_id_t.USRP2_FW_UPDATE_ID_KK_READ_TEH_FLASHES_OMG:
- raise Exception("Invalid reply %c from device." % (chr(pktid)))
-
- readdata += data[:thisreadsize]
- readsize -= FLASH_DATA_PACKET_SIZE
- addr += FLASH_DATA_PACKET_SIZE
-
- print("Read back %i bytes" % len(readdata))
-
- #write to disk
- f = open(image, 'w')
- f.write(readdata)
- f.close()
-
- def reset_usrp(self):
- out_pkt = pack_flash_args_fmt(USRP2_FW_PROTO_VERSION, update_id_t.USRP2_FW_UPDATE_ID_RESET_MAH_COMPUTORZ_LOL, seq(), 0, 0)
- try: in_pkt = self.send_and_recv(out_pkt)
- except socket.timeout: return
-
- (proto_ver, pktid, rxseq, flash_addr, rxlength, data) = unpack_flash_args_fmt(in_pkt)
- if pktid == update_id_t.USRP2_FW_UPDATE_ID_RESETTIN_TEH_COMPUTORZ_OMG:
- raise Exception("Device failed to reset.")
-
- def erase_image(self, addr, length):
- self._status_cb("Erasing")
- #get flash info first
- (flash_size, sector_size) = self.get_flash_info()
- if (addr + length) > flash_size:
- raise Exception("Cannot erase past end of device")
-
- out_pkt = pack_flash_args_fmt(USRP2_FW_PROTO_VERSION, update_id_t.USRP2_FW_UPDATE_ID_ERASE_TEH_FLASHES_LOL, seq(), addr, length)
- in_pkt = self.send_and_recv(out_pkt)
-
- (proto_ver, pktid, rxseq, flash_addr, rxlength, data) = unpack_flash_args_fmt(in_pkt)
-
- if pktid != update_id_t.USRP2_FW_UPDATE_ID_ERASING_TEH_FLASHES_OMG:
- raise Exception("Invalid reply %c from device." % (chr(pktid)))
-
- print("Erasing %i bytes at %i" % (length, addr))
- start_time = time.time()
-
- #now wait for it to finish
- while(True):
- out_pkt = pack_flash_args_fmt(USRP2_FW_PROTO_VERSION, update_id_t.USRP2_FW_UPDATE_ID_R_U_DONE_ERASING_LOL, seq(), 0, 0)
- in_pkt = self.send_and_recv(out_pkt)
-
- (proto_ver, pktid, rxseq, flash_addr, rxlength, data) = unpack_flash_args_fmt(in_pkt)
-
- if pktid == update_id_t.USRP2_FW_UPDATE_ID_IM_DONE_ERASING_OMG: break
- elif pktid != update_id_t.USRP2_FW_UPDATE_ID_NOPE_NOT_DONE_ERASING_OMG:
- raise Exception("Invalid reply %c from device." % (chr(pktid)))
- time.sleep(0.01) #decrease network overhead by waiting a bit before polling
- self._progress_cb(min(1.0, (time.time() - start_time)/(length/80e3)))
-
-
########################################################################
# command line options
########################################################################
@@ -529,47 +94,4 @@ def get_options():
########################################################################
if __name__=='__main__':
options = get_options()
-
- if options.list:
- print('Possible network devices:')
- print(' ' + '\n '.join(enumerate_devices()))
- #enumerate_devices()
- exit()
-
- if not options.addr: raise Exception('no address specified')
-
- if not options.fpga and not options.fw and not options.reset: raise Exception('Must specify either a firmware image or FPGA image, and/or reset.')
-
- if options.overwrite_safe and not options.read:
- print("Are you REALLY, REALLY sure you want to overwrite the safe image? This is ALMOST ALWAYS a terrible idea.")
- print("If your image is faulty, your USRP2+ will become a brick until reprogrammed via JTAG.")
-
- python_major_version = int(platform.python_version_tuple()[0])
- if python_major_version > 2:
- response = input("""Type "yes" to continue, or anything else to quit: """)
- else:
- response = raw_input("""Type "yes" to continue, or anything else to quit: """)
- if response != "yes": sys.exit(0)
-
- burner = burner_socket(addr=options.addr,quiet=False)
-
- if options.read:
- if options.fw:
- file = options.fw
- if os.path.isfile(file):
- response = raw_input("File already exists -- overwrite? (y/n) ")
- if response != "y": sys.exit(0)
- size = FW_IMAGE_SIZE_BYTES
- addr = SAFE_FW_IMAGE_LOCATION_ADDR if options.overwrite_safe else PROD_FW_IMAGE_LOCATION_ADDR
- burner.read_image(file, size, addr)
-
- if options.fpga:
- file = options.fpga
- if os.path.isfile(file):
- response = input("File already exists -- overwrite? (y/n) ")
- if response != "y": sys.exit(0)
- size = FPGA_IMAGE_SIZE_BYTES
- addr = SAFE_FPGA_IMAGE_LOCATION_ADDR if options.overwrite_safe else PROD_FPGA_IMAGE_LOCATION_ADDR
- burner.read_image(file, size, addr)
-
- else: burner.burn_fw(fw=options.fw, fpga=options.fpga, reset=options.reset, safe=options.overwrite_safe, check_rev=not options.dont_check_rev)
+ burner_socket(options.addr).burn_fw(fw=options.fw, fpga=options.fpga, reset=options.reset, safe=options.overwrite_safe, check_rev=not options.dont_check_rev)
diff --git a/host/utils/usrp_n2xx_net_burner_gui.py b/host/utils/usrp_n2xx_net_burner_gui.py
deleted file mode 100755
index bad065f08..000000000
--- a/host/utils/usrp_n2xx_net_burner_gui.py
+++ /dev/null
@@ -1,248 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2011 Ettus Research LLC
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-#
-
-import usrp_n2xx_net_burner #import implementation
-try:
- import tkinter, tkinter.filedialog, tkinter.font, tkinter.messagebox
-except ImportError:
- import tkFileDialog, tkFont, tkMessageBox
- import Tkinter as tkinter
- tkinter.filedialog = tkFileDialog
- tkinter.font = tkFont
- tkinter.messagebox = tkMessageBox
-import os
-
-class BinFileEntry(tkinter.Frame):
- """
- Simple file entry widget for getting the file path of bin files.
- Combines a label, entry, and button with file dialog callback.
- """
-
- def __init__(self, root, what, def_path=''):
- self._what = what
- tkinter.Frame.__init__(self, root)
- tkinter.Label(self, text=what+":").pack(side=tkinter.LEFT)
- self._entry = tkinter.Entry(self, width=50)
- self._entry.insert(tkinter.END, def_path)
- self._entry.pack(side=tkinter.LEFT)
- tkinter.Button(self, text="...", command=self._button_cb).pack(side=tkinter.LEFT)
-
- def _button_cb(self):
- filename = tkinter.filedialog.askopenfilename(
- parent=self,
- filetypes=[('bin files', '*.bin'), ('all files', '*.*')],
- title="Select bin file for %s"%self._what,
- initialdir=os.path.dirname(self.get_filename()),
- )
-
- # open file on your own
- if filename:
- self._entry.delete(0, tkinter.END)
- self._entry.insert(0, filename)
-
- def get_filename(self):
- return self._entry.get()
-
-class ProgressBar(tkinter.Canvas):
- """
- A simple implementation of a progress bar.
- Draws rectangle that fills from left to right.
- """
-
- def __init__(self, root, width=500, height=20):
- self._width = width
- self._height = height
- tkinter.Canvas.__init__(self, root, relief="sunken", borderwidth=2, width=self._width-2, height=self._height-2)
- self._last_fill_pixels = None
- self.set(0.0)
-
- def set(self, frac):
- """
- Update the progress where fraction is between 0.0 and 1.0
- """
- #determine the number of pixels to draw
- fill_pixels = int(round(self._width*frac))
- if fill_pixels == self._last_fill_pixels: return
- self._last_fill_pixels = fill_pixels
-
- #draw a rectangle representing the progress
- if frac: self.create_rectangle(0, 0, fill_pixels, self._height, fill="#357EC7")
- else: self.create_rectangle(0, 0, self._width, self._height, fill="#E8E8E8")
-
-class DeviceEntryWidget(tkinter.Frame):
- """
- Simple entry widget for getting the network device name.
- Combines a label, entry, and helpful text box with hints.
- """
-
- def __init__(self, root, text=''):
- tkinter.Frame.__init__(self, root)
-
- tkinter.Button(self, text="Rescan for Devices", command=self._reload_cb).pack()
-
- self._hints = tkinter.Listbox(self)
- self._hints_addrs_only = tkinter.Listbox(self)
-
- self._hints.bind("<<ListboxSelect>>", self._listbox_cb)
- self._hints_addrs_only.bind("<<ListboxSelect>>", self._listbox_cb)
-
- self._reload_cb()
- self._hints.pack(expand=tkinter.YES, fill=tkinter.X)
-
- frame = tkinter.Frame(self)
- frame.pack()
-
- tkinter.Label(frame, text="Network Address:").pack(side=tkinter.LEFT)
- self._entry = tkinter.Entry(frame, width=50)
- self._entry.insert(tkinter.END, text)
- self._entry.pack(side=tkinter.LEFT)
-
- def _reload_cb(self):
- self._hints.delete(0, tkinter.END)
- for hint in usrp_n2xx_net_burner.enumerate_devices():
- self._hints.insert(tkinter.END, hint)
- self._hints_addrs_only.insert(tkinter.END, hint.split(" (")[0])
-
- def _listbox_cb(self, event):
- try:
- sel = self._hints_addrs_only.get(self._hints.curselection()[0])
- self._entry.delete(0, tkinter.END)
- self._entry.insert(0, sel)
- except Exception as e: print(e)
-
- def get_devname(self):
- return self._entry.get()
-
-class SectionLabel(tkinter.Label):
- """
- Make a text label with bold font.
- """
-
- def __init__(self, root, text):
- tkinter.Label.__init__(self, root, text=text)
-
- #set the font bold
- f = tkinter.font.Font(font=self['font'])
- f['weight'] = 'bold'
- self['font'] = f.name
-
-class USRPN2XXNetBurnerApp(tkinter.Frame):
- """
- The top level gui application for the usrp-n2xx network burner.
- Creates entry widgets and button with callback to write images.
- """
-
- def __init__(self, root, addr, fw, fpga):
-
- tkinter.Frame.__init__(self, root)
-
- #pack the file entry widgets
- SectionLabel(self, text="Select Images").pack(pady=5)
- self._fw_img_entry = BinFileEntry(self, "Firmware Image", def_path=fw)
- self._fw_img_entry.pack()
- self._fpga_img_entry = BinFileEntry(self, "FPGA Image", def_path=fpga)
- self._fpga_img_entry.pack()
-
- #pack the destination entry widget
- SectionLabel(self, text="Select Device").pack(pady=5)
- self._net_dev_entry = DeviceEntryWidget(self, text=addr)
- self._net_dev_entry.pack()
-
- #the do it button
- SectionLabel(self, text="").pack(pady=5)
- button = tkinter.Button(self, text="Burn Images", command=self._burn)
- self._enable_input = lambda: button.configure(state=tkinter.NORMAL)
- self._disable_input = lambda: button.configure(state=tkinter.DISABLED)
- button.pack()
-
- #a progress bar to monitor the status
- progress_frame = tkinter.Frame(self)
- progress_frame.pack()
- self._status = tkinter.StringVar()
- tkinter.Label(progress_frame, textvariable=self._status).pack(side=tkinter.LEFT)
- self._pbar = ProgressBar(progress_frame)
- self._pbar.pack(side=tkinter.RIGHT, expand=True)
-
- def _burn(self):
- #grab strings from the gui
- fw = self._fw_img_entry.get_filename()
- fpga = self._fpga_img_entry.get_filename()
- addr = self._net_dev_entry.get_devname()
-
- #check input
- if not addr:
- tkinter.messagebox.showerror('Error:', 'No address specified!')
- return
- if not fw and not fpga:
- tkinter.messagebox.showerror('Error:', 'No images specified!')
- return
- if fw and not os.path.exists(fw):
- tkinter.messagebox.showerror('Error:', 'Firmware image not found!')
- return
- if fpga and not os.path.exists(fpga):
- tkinter.messagebox.showerror('Error:', 'FPGA image not found!')
- return
-
- self._disable_input()
- try:
- #make a new burner object and attempt the burner operation
- burner = usrp_n2xx_net_burner.burner_socket(addr=addr,quiet=False)
-
- #setup callbacks that update the gui
- def status_cb(status):
- self._pbar.set(0.0) #status change, reset the progress
- self._status.set("%s %s "%(status.title(), image_type))
- self.update()
- def progress_cb(progress):
- self._pbar.set(progress)
- self.update()
-
- if options.overwrite_safe:
- if tkinter.messagebox.askyesno("Overwrite safe images?", "Overwrite safe images! This is ALMOST ALWAYS a terrible idea."):
- for (image_type, fw_img, fpga_img) in (('FPGA', '', fpga), ('Firmware', fw, '')):
- burner.set_callbacks(progress_cb=progress_cb, status_cb=status_cb)
- burner.burn_fw(fw=fw_img, fpga=fpga_img, reset=False, safe=True, check_rev=not options.dont_check_rev)
-
- if tkinter.messagebox.askyesno("Burn was successful!", "Reset the device?"):
- burner.reset_usrp()
- else:
- for (image_type, fw_img, fpga_img) in (('FPGA', '', fpga), ('Firmware', fw, '')):
- burner.set_callbacks(progress_cb=progress_cb, status_cb=status_cb)
- burner.burn_fw(fw=fw_img, fpga=fpga_img, reset=False, safe=False, check_rev=not options.dont_check_rev)
-
- if tkinter.messagebox.askyesno("Burn was successful!", "Reset the device?"):
- burner.reset_usrp()
-
- except Exception as e:
- tkinter.messagebox.showerror('Verbose:', 'Error: %s'%str(e))
-
- #reset the progress bar
- self._pbar.set(0.0)
- self._status.set("")
- self._enable_input()
-
-########################################################################
-# main
-########################################################################
-if __name__=='__main__':
- options = usrp_n2xx_net_burner.get_options()
- root = tkinter.Tk()
- root.title('USRP-N2XX Net Burner')
- USRPN2XXNetBurnerApp(root, addr=options.addr, fw=options.fw, fpga=options.fpga).pack()
- root.mainloop()
- exit()
diff --git a/host/utils/usrp_n2xx_simple_net_burner.cpp b/host/utils/usrp_n2xx_simple_net_burner.cpp
index 20070503c..9077ef2f6 100644
--- a/host/utils/usrp_n2xx_simple_net_burner.cpp
+++ b/host/utils/usrp_n2xx_simple_net_burner.cpp
@@ -15,171 +15,11 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//
-#include <csignal>
-#include <iostream>
-#include <fstream>
-#include <time.h>
-#include <vector>
-
-#include <boost/foreach.hpp>
-#include <boost/asio.hpp>
-#include <boost/filesystem.hpp>
#include <boost/program_options.hpp>
-#include <boost/assign.hpp>
-#include <boost/assign/list_of.hpp>
#include <boost/format.hpp>
-#include <boost/algorithm/string/erase.hpp>
-#include <boost/filesystem.hpp>
-#include <boost/thread/thread.hpp>
-
-#include <uhd/exception.hpp>
-#include <uhd/property_tree.hpp>
-#include <uhd/transport/if_addrs.hpp>
-#include <uhd/transport/udp_simple.hpp>
-#include <uhd/types/dict.hpp>
-#include <uhd/utils/byteswap.hpp>
-#include <uhd/utils/paths.hpp>
-#include <uhd/utils/safe_main.hpp>
-#include <uhd/utils/safe_call.hpp>
+#include <iostream>
-namespace fs = boost::filesystem;
namespace po = boost::program_options;
-using namespace boost::algorithm;
-using namespace uhd;
-using namespace uhd::transport;
-
-#define UDP_FW_UPDATE_PORT 49154
-#define UDP_MAX_XFER_BYTES 1024
-#define UDP_TIMEOUT 3
-#define UDP_POLL_INTERVAL 0.10 //in seconds
-#define USRP2_FW_PROTO_VERSION 7 //should be unused after r6
-#define USRP2_UDP_UPDATE_PORT 49154
-#define FLASH_DATA_PACKET_SIZE 256
-#define FPGA_IMAGE_SIZE_BYTES 1572864
-#define FW_IMAGE_SIZE_BYTES 31744
-
-#define PROD_FPGA_IMAGE_LOCATION_ADDR 0x00180000
-#define SAFE_FPGA_IMAGE_LOCATION_ADDR 0x00000000
-
-#define PROD_FW_IMAGE_LOCATION_ADDR 0x00300000
-#define SAFE_FW_IMAGE_LOCATION_ADDR 0x003F0000
-
-typedef enum {
- UNKNOWN = ' ',
-
- USRP2_QUERY = 'a',
- USRP2_ACK = 'A',
-
- GET_FLASH_INFO_CMD = 'f',
- GET_FLASH_INFO_ACK = 'F',
-
- ERASE_FLASH_CMD = 'e',
- ERASE_FLASH_ACK = 'E',
-
- CHECK_ERASING_DONE_CMD = 'd',
- DONE_ERASING_ACK = 'D',
- NOT_DONE_ERASING_ACK = 'B',
-
- WRITE_FLASH_CMD = 'w',
- WRITE_FLASH_ACK = 'W',
-
- READ_FLASH_CMD = 'r',
- READ_FLASH_ACK = 'R',
-
- RESET_USRP_CMD = 's',
- RESET_USRP_ACK = 'S',
-
- GET_HW_REV_CMD = 'v',
- GET_HW_REV_ACK = 'V',
-
-} usrp2_fw_update_id_t;
-
-typedef struct {
- uint32_t proto_ver;
- uint32_t id;
- uint32_t seq;
- union {
- uint32_t ip_addr;
- uint32_t hw_rev;
- struct {
- uint32_t flash_addr;
- uint32_t length;
- uint8_t data[256];
- } flash_args;
- struct {
- uint32_t sector_size_bytes;
- uint32_t memory_size_bytes;
- } flash_info_args;
- } data;
-} usrp2_fw_update_data_t;
-
-//Mapping revision numbers to filenames
-uhd::dict<uint32_t, std::string> filename_map = boost::assign::map_list_of
- (0, "N2XX")
- (0xa, "n200_r3")
- (0x100a, "n200_r4")
- (0x10a, "n210_r3")
- (0x110a, "n210_r4")
-;
-
-uint8_t usrp2_update_data_in_mem[udp_simple::mtu];
-uint8_t fpga_image[FPGA_IMAGE_SIZE_BYTES];
-uint8_t fw_image[FW_IMAGE_SIZE_BYTES];
-
-/***********************************************************************
- * Signal handlers
- **********************************************************************/
-static int num_ctrl_c = 0;
-void sig_int_handler(int){
- num_ctrl_c++;
- if(num_ctrl_c == 1){
- std::cout << std::endl << "Are you sure you want to abort the image burning? If you do, your "
- "USRP-N Series unit will be bricked!" << std::endl
- << "Press Ctrl+C again to abort the image burning procedure." << std::endl << std::endl;
- }
- else{
- std::cout << std::endl << "Aborting. Your USRP-N Series unit will be bricked." << std::endl
- << "Refer to http://files.ettus.com/manual/page_usrp2.html#usrp2_loadflash_brick" << std::endl
- << "for details on restoring your device." << std::endl;
- exit(EXIT_FAILURE);
- }
-}
-
-/***********************************************************************
- * List all connected USRP N2XX devices
- **********************************************************************/
-void list_usrps(){
- udp_simple::sptr udp_bc_transport;
- const usrp2_fw_update_data_t *update_data_in = reinterpret_cast<const usrp2_fw_update_data_t *>(usrp2_update_data_in_mem);
- uint32_t hw_rev = 0;
-
- usrp2_fw_update_data_t usrp2_ack_pkt = usrp2_fw_update_data_t();
- usrp2_ack_pkt.proto_ver = htonx<uint32_t>(USRP2_FW_PROTO_VERSION);
- usrp2_ack_pkt.id = htonx<uint32_t>(USRP2_QUERY);
-
- std::cout << "Available USRP N2XX devices:" << std::endl;
-
- //Send UDP packets to all broadcast addresses
- BOOST_FOREACH(const if_addrs_t &if_addrs, get_if_addrs()){
- //Avoid the loopback device
- if(if_addrs.inet == boost::asio::ip::address_v4::loopback().to_string()) continue;
- udp_bc_transport = udp_simple::make_broadcast(if_addrs.bcast, BOOST_STRINGIZE(USRP2_UDP_UPDATE_PORT));
- udp_bc_transport->send(boost::asio::buffer(&usrp2_ack_pkt, sizeof(usrp2_ack_pkt)));
-
- size_t len = udp_bc_transport->recv(boost::asio::buffer(usrp2_update_data_in_mem), UDP_TIMEOUT);
- if(len > offsetof(usrp2_fw_update_data_t, data) and ntohl(update_data_in->id) == USRP2_ACK){
- usrp2_ack_pkt.id = htonx<uint32_t>(GET_HW_REV_CMD);
- udp_bc_transport->send(boost::asio::buffer(&usrp2_ack_pkt, sizeof(usrp2_ack_pkt)));
-
- size_t len = udp_bc_transport->recv(boost::asio::buffer(usrp2_update_data_in_mem), UDP_TIMEOUT);
- if(len > offsetof(usrp2_fw_update_data_t, data) and ntohl(update_data_in->id) == GET_HW_REV_ACK){
- hw_rev = ntohl(update_data_in->data.hw_rev);
- }
-
- std::cout << boost::format(" * %s (%s)\n") % udp_bc_transport->get_recv_addr() % filename_map[hw_rev];
- }
- }
-}
/***********************************************************************
* Find USRP N2XX with specified IP address and return type
@@ -225,8 +65,7 @@ void print_image_loader_warning(const std::string &fw_path,
}
std::cout << "************************************************************************************************" << std::endl
- << "WARNING: This utility will be removed in an upcoming version of UHD. In the future, use" << std::endl
- << " this command:" << std::endl
+ << "ERROR: This utility has been removed in this version of UHD. Use this command:" << std::endl
<< std::endl
<< uhd_image_loader << std::endl
<< std::endl
@@ -234,336 +73,8 @@ void print_image_loader_warning(const std::string &fw_path,
<< std::endl;
}
-/***********************************************************************
- * Find USRP N2XX with specified IP address and return type
- **********************************************************************/
-uint32_t find_usrp(udp_simple::sptr udp_transport, bool check_rev){
- uint32_t hw_rev;
- bool found_it = false;
-
- // If the user chooses to not care about the rev, simply check
- // for the presence of a USRP N2XX.
- uint32_t cmd_id = (check_rev) ? GET_HW_REV_CMD
- : USRP2_QUERY;
- uint32_t ack_id = (check_rev) ? GET_HW_REV_ACK
- : USRP2_ACK;
-
- const usrp2_fw_update_data_t *update_data_in = reinterpret_cast<const usrp2_fw_update_data_t *>(usrp2_update_data_in_mem);
- usrp2_fw_update_data_t hw_info_pkt = usrp2_fw_update_data_t();
- hw_info_pkt.proto_ver = htonx<uint32_t>(USRP2_FW_PROTO_VERSION);
- hw_info_pkt.id = htonx<uint32_t>(cmd_id);
- udp_transport->send(boost::asio::buffer(&hw_info_pkt, sizeof(hw_info_pkt)));
-
- //Loop and receive until the timeout
- size_t len = udp_transport->recv(boost::asio::buffer(usrp2_update_data_in_mem), UDP_TIMEOUT);
- if(len > offsetof(usrp2_fw_update_data_t, data) and ntohl(update_data_in->id) == ack_id){
- hw_rev = ntohl(update_data_in->data.hw_rev);
- if(filename_map.has_key(hw_rev)){
- std::cout << boost::format("Found %s.\n\n") % filename_map[hw_rev];
- found_it = true;
- }
- else{
- if(check_rev) throw std::runtime_error("Invalid revision found.");
- else{
- hw_rev = 0;
- std::cout << "Found USRP N2XX." << std::endl;
- found_it = true;
- }
- }
- }
- if(not found_it) throw std::runtime_error("No USRP N2XX found.");
-
- return hw_rev;
-}
-
-/***********************************************************************
- * Custom filename validation functions
- **********************************************************************/
-
-void validate_custom_fpga_file(std::string rev_str, std::string& fpga_path, bool check_rev){
-
- //Check for existence of file
- if(not fs::exists(fpga_path)) throw std::runtime_error(str(boost::format("No file at specified FPGA path: %s") % fpga_path));
-
- //If user cares about revision, use revision string to detect invalid image filename
- uhd::fs_path custom_fpga_path(fpga_path);
- if(custom_fpga_path.leaf().find(rev_str) == std::string::npos and check_rev){
- throw std::runtime_error(str(boost::format("Invalid FPGA image filename at path: %s\nFilename must contain '%s' to be considered valid for this model.")
- % fpga_path % rev_str));
- }
-}
-
-void validate_custom_fw_file(std::string rev_str, std::string& fw_path, bool check_rev){
-
- //Check for existence of file
- if(not fs::exists(fw_path)) throw std::runtime_error(str(boost::format("No file at specified firmware path: %s") % fw_path));
-
- //If user cares about revision, use revision string to detect invalid image filename
- uhd::fs_path custom_fw_path(fw_path);
- if(custom_fw_path.leaf().find(erase_tail_copy(rev_str,3)) == std::string::npos and check_rev){
- throw std::runtime_error(str(boost::format("Invalid firmware image filename at path: %s\nFilename must contain '%s' to be considered valid for this model.")
- % fw_path % erase_tail_copy(rev_str,3)));
- }
-}
-
-/***********************************************************************
- * Reading and validating image binaries
- **********************************************************************/
-
-int read_fpga_image(std::string& fpga_path){
-
- //Check size of given image
- std::ifstream fpga_file(fpga_path.c_str(), std::ios::binary);
- fpga_file.seekg(0, std::ios::end);
- size_t fpga_image_size = size_t(fpga_file.tellg());
- if(fpga_image_size > FPGA_IMAGE_SIZE_BYTES){
- throw std::runtime_error(str(boost::format("FPGA image is too large. %d > %d")
- % fpga_image_size % FPGA_IMAGE_SIZE_BYTES));
- }
-
- //Check sequence of bytes in image before reading
- uint8_t fpga_test_bytes[63];
- fpga_file.seekg(0, std::ios::beg);
- fpga_file.read((char*)fpga_test_bytes,63);
- bool is_good = false;
- for(int i = 0; i < 62; i++){
- if(fpga_test_bytes[i] == 255) continue;
- else if(fpga_test_bytes[i] == 170 and
- fpga_test_bytes[i+1] == 153){
- is_good = true;
- break;
- }
- }
- if(not is_good) throw std::runtime_error("Not a valid FPGA image.");
-
- //With image validated, read into utility
- fpga_file.seekg(0, std::ios::beg);
- fpga_file.read((char*)fpga_image,fpga_image_size);
- fpga_file.close();
-
- //Return image size
- return fpga_image_size;
-}
-
-int read_fw_image(std::string& fw_path){
-
- //Check size of given image
- std::ifstream fw_file(fw_path.c_str(), std::ios::binary);
- fw_file.seekg(0, std::ios::end);
- size_t fw_image_size = size_t(fw_file.tellg());
- if(fw_image_size > FW_IMAGE_SIZE_BYTES){
- throw std::runtime_error(str(boost::format("Firmware image is too large. %d > %d")
- % fw_image_size % FW_IMAGE_SIZE_BYTES));
- }
-
- //Check sequence of bytes in image before reading
- uint8_t fw_test_bytes[4];
- fw_file.seekg(0, std::ios::beg);
- fw_file.read((char*)fw_test_bytes,4);
- for(int i = 0; i < 4; i++) if(fw_test_bytes[i] != 11) throw std::runtime_error("Not a valid firmware image.");
-
- //With image validated, read into utility
- fw_file.seekg(0, std::ios::beg);
- fw_file.read((char*)fw_image,fw_image_size);
- fw_file.close();
-
- return fw_image_size;
-}
-
-uint32_t* get_flash_info(std::string& ip_addr){
-
- uint32_t *flash_info = new uint32_t[2];
- const usrp2_fw_update_data_t *update_data_in = reinterpret_cast<const usrp2_fw_update_data_t *>(usrp2_update_data_in_mem);
-
- udp_simple::sptr udp_transport = udp_simple::make_connected(ip_addr, BOOST_STRINGIZE(USRP2_UDP_UPDATE_PORT));
- usrp2_fw_update_data_t get_flash_info_pkt = usrp2_fw_update_data_t();
- get_flash_info_pkt.proto_ver = htonx<uint32_t>(USRP2_FW_PROTO_VERSION);
- get_flash_info_pkt.id = htonx<uint32_t>(GET_FLASH_INFO_CMD);
- udp_transport->send(boost::asio::buffer(&get_flash_info_pkt, sizeof(get_flash_info_pkt)));
-
- //Loop and receive until the timeout
- size_t len = udp_transport->recv(boost::asio::buffer(usrp2_update_data_in_mem), UDP_TIMEOUT);
- if(len > offsetof(usrp2_fw_update_data_t, data) and ntohl(update_data_in->id) == GET_FLASH_INFO_ACK){
- flash_info[0] = ntohl(update_data_in->data.flash_info_args.sector_size_bytes);
- flash_info[1] = ntohl(update_data_in->data.flash_info_args.memory_size_bytes);
- }
- else if(ntohl(update_data_in->id) != GET_FLASH_INFO_ACK){
- throw std::runtime_error(str(boost::format("Received invalid reply %d from device.\n")
- % ntohl(update_data_in->id)));
- }
-
- return flash_info;
-}
-
-/***********************************************************************
- * Image burning functions
- **********************************************************************/
-
-void erase_image(udp_simple::sptr udp_transport, bool is_fw, uint32_t memory_size, bool overwrite_safe){
-
- uint32_t image_location_addr = is_fw ? overwrite_safe ? SAFE_FW_IMAGE_LOCATION_ADDR
- : PROD_FW_IMAGE_LOCATION_ADDR
- : overwrite_safe ? SAFE_FPGA_IMAGE_LOCATION_ADDR
- : PROD_FPGA_IMAGE_LOCATION_ADDR;
- uint32_t image_size = is_fw ? FW_IMAGE_SIZE_BYTES
- : FPGA_IMAGE_SIZE_BYTES;
-
- //Making sure this won't attempt to erase past end of device
- if((image_location_addr+image_size) > memory_size) throw std::runtime_error("Cannot erase past end of device.");
-
- //UDP receive buffer
- const usrp2_fw_update_data_t *update_data_in = reinterpret_cast<const usrp2_fw_update_data_t *>(usrp2_update_data_in_mem);
-
- //Setting up UDP packet
- usrp2_fw_update_data_t erase_pkt = usrp2_fw_update_data_t();
- erase_pkt.id = htonx<uint32_t>(ERASE_FLASH_CMD);
- erase_pkt.proto_ver = htonx<uint32_t>(USRP2_FW_PROTO_VERSION);
- erase_pkt.data.flash_args.flash_addr = htonx<uint32_t>(image_location_addr);
- erase_pkt.data.flash_args.length = htonx<uint32_t>(image_size);
-
- //Begin erasing
- udp_transport->send(boost::asio::buffer(&erase_pkt, sizeof(erase_pkt)));
- size_t len = udp_transport->recv(boost::asio::buffer(usrp2_update_data_in_mem), UDP_TIMEOUT);
- if(len > offsetof(usrp2_fw_update_data_t, data) and ntohl(update_data_in->id) == ERASE_FLASH_ACK){
- if(is_fw) std::cout << "Erasing firmware image." << std::endl;
- else std::cout << "Erasing FPGA image." << std::endl;
- }
- else if(ntohl(update_data_in->id) != ERASE_FLASH_ACK){
- throw std::runtime_error(str(boost::format("Received invalid reply %d from device.\n")
- % ntohl(update_data_in->id)));
- }
-
- //Check for erase completion
- erase_pkt.id = htonx<uint32_t>(CHECK_ERASING_DONE_CMD);
- while(true){
- udp_transport->send(boost::asio::buffer(&erase_pkt, sizeof(erase_pkt)));
- size_t len = udp_transport->recv(boost::asio::buffer(usrp2_update_data_in_mem), UDP_TIMEOUT);
- if(len > offsetof(usrp2_fw_update_data_t, data) and ntohl(update_data_in->id) == DONE_ERASING_ACK){
- std::cout << boost::format(" * Successfully erased %d bytes at %d.\n")
- % image_size % image_location_addr;
- break;
- }
- else if(ntohl(update_data_in->id) != NOT_DONE_ERASING_ACK){
- throw std::runtime_error(str(boost::format("Received invalid reply %d from device.\n")
- % ntohl(update_data_in->id)));
- }
- }
-}
-
-void write_image(udp_simple::sptr udp_transport, bool is_fw, uint8_t* image,
- uint32_t memory_size, int image_size, bool overwrite_safe){
-
- uint32_t begin_addr = is_fw ? overwrite_safe ? SAFE_FW_IMAGE_LOCATION_ADDR
- : PROD_FW_IMAGE_LOCATION_ADDR
- : overwrite_safe ? SAFE_FPGA_IMAGE_LOCATION_ADDR
- : PROD_FPGA_IMAGE_LOCATION_ADDR;
- uint32_t current_addr = begin_addr;
- std::string type = is_fw ? "firmware" : "FPGA";
-
- //Making sure this won't attempt to write past end of device
- if(current_addr+image_size > memory_size) throw std::runtime_error("Cannot write past end of device.");
-
- //UDP receive buffer
- const usrp2_fw_update_data_t *update_data_in = reinterpret_cast<const usrp2_fw_update_data_t *>(usrp2_update_data_in_mem);
-
- //Setting up UDP packet
- usrp2_fw_update_data_t write_pkt = usrp2_fw_update_data_t();
- write_pkt.id = htonx<uint32_t>(WRITE_FLASH_CMD);
- write_pkt.proto_ver = htonx<uint32_t>(USRP2_FW_PROTO_VERSION);
- write_pkt.data.flash_args.length = htonx<uint32_t>(FLASH_DATA_PACKET_SIZE);
-
- for(int i = 0; i < ((image_size/FLASH_DATA_PACKET_SIZE)+1); i++){
- //Print progress
- std::cout << "\rWriting " << type << " image ("
- << int((double(current_addr-begin_addr)/double(image_size))*100) << "%)." << std::flush;
-
- write_pkt.data.flash_args.flash_addr = htonx<uint32_t>(current_addr);
- std::copy(image+(i*FLASH_DATA_PACKET_SIZE), image+((i+1)*FLASH_DATA_PACKET_SIZE), write_pkt.data.flash_args.data);
-
- udp_transport->send(boost::asio::buffer(&write_pkt, sizeof(write_pkt)));
- size_t len = udp_transport->recv(boost::asio::buffer(usrp2_update_data_in_mem), UDP_TIMEOUT);
- if(len > offsetof(usrp2_fw_update_data_t, data) and ntohl(update_data_in->id) != WRITE_FLASH_ACK){
- throw std::runtime_error(str(boost::format("Invalid reply %d from device.")
- % ntohl(update_data_in->id)));
- }
-
- current_addr += FLASH_DATA_PACKET_SIZE;
- }
- std::cout << std::flush << "\rWriting " << type << " image (100%)." << std::endl;
- std::cout << boost::format(" * Successfully wrote %d bytes.\n") % image_size;
-}
-
-void verify_image(udp_simple::sptr udp_transport, bool is_fw, uint8_t* image,
- uint32_t memory_size, int image_size, bool overwrite_safe){
-
- int current_index = 0;
- uint32_t begin_addr = is_fw ? overwrite_safe ? SAFE_FW_IMAGE_LOCATION_ADDR
- : PROD_FW_IMAGE_LOCATION_ADDR
- : overwrite_safe ? SAFE_FPGA_IMAGE_LOCATION_ADDR
- : PROD_FPGA_IMAGE_LOCATION_ADDR;
- uint32_t current_addr = begin_addr;
- std::string type = is_fw ? "firmware" : "FPGA";
-
- //Array size needs to be known at runtime, this constant is guaranteed to be larger than any firmware or FPGA image
- uint8_t from_usrp[FPGA_IMAGE_SIZE_BYTES];
-
- //Making sure this won't attempt to read past end of device
- if(current_addr+image_size > memory_size) throw std::runtime_error("Cannot read past end of device.");
-
- //UDP receive buffer
- const usrp2_fw_update_data_t *update_data_in = reinterpret_cast<const usrp2_fw_update_data_t *>(usrp2_update_data_in_mem);
-
- //Setting up UDP packet
- usrp2_fw_update_data_t verify_pkt = usrp2_fw_update_data_t();
- verify_pkt.id = htonx<uint32_t>(READ_FLASH_CMD);
- verify_pkt.proto_ver = htonx<uint32_t>(USRP2_FW_PROTO_VERSION);
- verify_pkt.data.flash_args.length = htonx<uint32_t>(FLASH_DATA_PACKET_SIZE);
-
- for(int i = 0; i < ((image_size/FLASH_DATA_PACKET_SIZE)+1); i++){
- //Print progress
- std::cout << "\rVerifying " << type << " image ("
- << int((double(current_addr-begin_addr)/double(image_size))*100) << "%)." << std::flush;
-
- verify_pkt.data.flash_args.flash_addr = htonx<uint32_t>(current_addr);
-
- udp_transport->send(boost::asio::buffer(&verify_pkt, sizeof(verify_pkt)));
- size_t len = udp_transport->recv(boost::asio::buffer(usrp2_update_data_in_mem), UDP_TIMEOUT);
- if(len > offsetof(usrp2_fw_update_data_t, data) and ntohl(update_data_in->id) != READ_FLASH_ACK){
- throw std::runtime_error(str(boost::format("Invalid reply %d from device.")
- % ntohl(update_data_in->id)));
- }
- for(int j = 0; j < FLASH_DATA_PACKET_SIZE; j++) from_usrp[current_index+j] = update_data_in->data.flash_args.data[j];
-
- current_addr += FLASH_DATA_PACKET_SIZE;
- current_index += FLASH_DATA_PACKET_SIZE;
- }
- for(int i = 0; i < image_size; i++) if(from_usrp[i] != image[i]) throw std::runtime_error("Image write failed.");
-
- std::cout << std::flush << "\rVerifying " << type << " image (100%)." << std::endl;
- std::cout << " * Successful." << std::endl;
-}
-
-void reset_usrp(udp_simple::sptr udp_transport){
-
- //Set up UDP transport
- const usrp2_fw_update_data_t *update_data_in = reinterpret_cast<const usrp2_fw_update_data_t *>(usrp2_update_data_in_mem);
-
- //Set up UDP packet
- usrp2_fw_update_data_t reset_pkt = usrp2_fw_update_data_t();
- reset_pkt.id = htonx<uint32_t>(RESET_USRP_CMD);
- reset_pkt.proto_ver = htonx<uint32_t>(USRP2_FW_PROTO_VERSION);
-
- //Reset USRP
- udp_transport->send(boost::asio::buffer(&reset_pkt, sizeof(reset_pkt)));
- size_t len = udp_transport->recv(boost::asio::buffer(usrp2_update_data_in_mem), UDP_TIMEOUT);
- if(len > offsetof(usrp2_fw_update_data_t, data) and ntohl(update_data_in->id) == RESET_USRP_ACK){
- throw std::runtime_error("USRP reset failed."); //There should be no response to this UDP packet
- }
- else std::cout << std::endl << "Resetting USRP." << std::endl;
-}
-
-int UHD_SAFE_MAIN(int argc, char *argv[]){
-
+int main(int argc, char *argv[])
+{
//Establish user options
std::string fw_path;
std::string ip_addr;
@@ -586,142 +97,7 @@ int UHD_SAFE_MAIN(int argc, char *argv[]){
po::store(po::parse_command_line(argc, argv, desc), vm);
po::notify(vm);
- //Print help message
- if(vm.count("help") > 0){
- std::cout << boost::format("N2XX Simple Net Burner\n");
- std::cout << boost::format("Automatically detects and burns standard firmware and FPGA images onto USRP N2XX devices.\n");
- std::cout << boost::format("Can optionally take user input for custom images.\n\n");
- std::cout << desc << std::endl;
- return EXIT_SUCCESS;
- }
-
- //List options
- if(vm.count("list")){
- list_usrps();
- return EXIT_SUCCESS;
- }
-
- //Store user options
- bool burn_fpga = (vm.count("no-fpga") == 0);
- bool burn_fw = (vm.count("no-fw") == 0);
- bool use_custom_fpga = (vm.count("fpga") > 0);
- bool use_custom_fw = (vm.count("fw") > 0);
- bool auto_reboot = (vm.count("auto-reboot") > 0);
- bool check_rev = (vm.count("dont-check-rev") == 0);
- bool overwrite_safe = (vm.count("overwrite-safe") > 0);
- int fpga_image_size = 0;
- int fw_image_size = 0;
-
- //Process options and detect invalid option combinations
- if(not burn_fpga && not burn_fw){
- std::cout << "No images will be burned." << std::endl;
- return EXIT_FAILURE;
- }
- if(not check_rev){
- //Without knowing a revision, the utility cannot automatically generate a filepath, so the user
- //must specify one. The user must also burn both types of images for consistency.
- if(not (burn_fpga and burn_fw))
- throw std::runtime_error("If the --dont-check-rev option is used, both FPGA and firmware images need to be burned.");
- if(not (use_custom_fpga and use_custom_fw))
- throw std::runtime_error("If the --dont-check-rev option is used, the user must specify image filepaths.");
- }
- if(overwrite_safe){
- //If the user specifies overwriting safe images, both image types must be burned for consistency.
- if(not (burn_fpga and burn_fw))
- throw std::runtime_error("If the --overwrite-safe option is used, both FPGA and firmware images need to be burned.");
-
- std::cout << "Are you REALLY sure you want to overwrite the safe images?" << std::endl;
- std::cout << "This is ALMOST ALWAYS a terrible idea." << std::endl;
- std::cout << "Type \"yes\" to continue, or anything else to quit: " << std::flush;
- std::string safe_response;
- std::getline(std::cin, safe_response);
- if(safe_response != "yes"){
- std::cout << "Exiting." << std::endl;
- return EXIT_SUCCESS;
- }
- else std::cout << std::endl; //Formatting
- }
-
- //Find USRP and establish connection
- std::cout << boost::format("Searching for USRP N2XX with IP address %s.\n") % ip_addr;
- udp_simple::sptr udp_transport = udp_simple::make_connected(ip_addr, BOOST_STRINGIZE(USRP2_UDP_UPDATE_PORT));
- uint32_t hw_rev = find_usrp(udp_transport, check_rev);
-
- //Check validity of file locations and binaries before attempting burn
- std::cout << "Searching for specified images." << std::endl << std::endl;
- if(burn_fpga){
- if(use_custom_fpga){
- //Expand tilde usage if applicable
- #ifndef UHD_PLATFORM_WIN32
- if(fpga_path.find("~/") == 0) fpga_path.replace(0,1,getenv("HOME"));
- #endif
- validate_custom_fpga_file(filename_map[hw_rev], fpga_path, check_rev);
- }
- else{
- std::string default_fpga_filename = str(boost::format("usrp_%s_fpga.bin") % filename_map[hw_rev]);
- fpga_path = find_image_path(default_fpga_filename);
- }
-
- fpga_image_size = read_fpga_image(fpga_path);
- }
- if(burn_fw){
- if(use_custom_fw){
- //Expand tilde usage if applicable
- #ifndef UHD_PLATFORM_WIN32
- if(fw_path.find("~/") == 0) fw_path.replace(0,1,getenv("HOME"));
- #endif
- validate_custom_fw_file(filename_map[hw_rev], fw_path, check_rev);
- }
- else{
- std::string default_fw_filename = str(boost::format("usrp_%s_fw.bin") % erase_tail_copy(filename_map[hw_rev],3));
- fw_path = find_image_path(default_fw_filename);
- }
-
- fw_image_size = read_fw_image(fw_path);
- }
-
print_image_loader_warning(fw_path, fpga_path, vm);
- std::cout << "Will burn the following images:" << std::endl;
- if(burn_fw) std::cout << boost::format(" * Firmware: %s\n") % fw_path;
- if(burn_fpga) std::cout << boost::format(" * FPGA: %s\n") % fpga_path;
- std::cout << std::endl;
-
- uint32_t* flash_info = get_flash_info(ip_addr);
- std::cout << boost::format("Querying %s for flash information.\n") % filename_map[hw_rev];
- std::cout << boost::format(" * Flash size: %3.2f\n") % flash_info[1];
- std::cout << boost::format(" * Sector size: %3.2f\n\n") % flash_info[0];
-
- //Burning images
- std::signal(SIGINT, &sig_int_handler);
- if(burn_fpga){
- erase_image(udp_transport, false, flash_info[1], overwrite_safe);
- write_image(udp_transport, false, fpga_image, flash_info[1], fpga_image_size, overwrite_safe);
- verify_image(udp_transport, false, fpga_image, flash_info[1], fpga_image_size, overwrite_safe);
- }
- if(burn_fpga and burn_fw) std::cout << std::endl; //Formatting
- if(burn_fw){
- erase_image(udp_transport, true, flash_info[1], overwrite_safe);
- write_image(udp_transport, true, fw_image, flash_info[1], fw_image_size, overwrite_safe);
- verify_image(udp_transport, true, fw_image, flash_info[1], fw_image_size, overwrite_safe);
- }
-
- delete(flash_info);
-
- //Reset USRP N2XX
- bool reset = false;
- if(auto_reboot) reset = true;
- else{
- std::string user_response = "foo";
- while(user_response != "y" and user_response != "" and user_response != "n"){
- std::cout << std::endl << "Image burning successful. Reset USRP (Y/n)? ";
- std::getline(std::cin, user_response);
- std::transform(user_response.begin(), user_response.end(), user_response.begin(), ::tolower);
- reset = (user_response == "" or user_response == "y");
- }
- std::cout << std::endl; //Formatting
- }
- if(reset) reset_usrp(udp_transport);
-
- return EXIT_SUCCESS;
+ return EXIT_FAILURE;
}
diff --git a/host/utils/usrp_x3xx_fpga_burner.cpp b/host/utils/usrp_x3xx_fpga_burner.cpp
index e3de8aad3..8f297865b 100644
--- a/host/utils/usrp_x3xx_fpga_burner.cpp
+++ b/host/utils/usrp_x3xx_fpga_burner.cpp
@@ -15,188 +15,14 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//
-#include <csignal>
#include <iostream>
-#include <map>
-#include <fstream>
-#include <stdexcept>
-#include <stdint.h>
-#include <stdio.h>
-#include <string.h>
-#include <vector>
-
-#include <boost/foreach.hpp>
-#include <boost/asio.hpp>
#include <boost/program_options.hpp>
-#include <boost/property_tree/ptree.hpp>
-#include <boost/property_tree/xml_parser.hpp>
-#include <boost/assign.hpp>
-#include <stdint.h>
-#include <boost/assign/list_of.hpp>
#include <boost/format.hpp>
-#include <boost/algorithm/string/erase.hpp>
-#include <boost/filesystem.hpp>
-#include <boost/thread/thread.hpp>
-
-#include <uhd/exception.hpp>
-#include <uhd/transport/if_addrs.hpp>
-#include <uhd/transport/nirio/niusrprio_session.h>
-#include <uhd/transport/udp_simple.hpp>
-#include <uhd/device.hpp>
-#include <uhd/types/device_addr.hpp>
-#include <uhd/utils/byteswap.hpp>
-#include <uhd/utils/paths.hpp>
-#include <uhd/utils/safe_main.hpp>
-#include <uhd/utils/safe_call.hpp>
-
-extern "C" {
-#include "cdecode.h"
-}
-
-#define X300_FPGA_BIN_SIZE_BYTES 15877916
-#define X300_FPGA_BIT_MAX_SIZE_BYTES 15878032
-#define X300_FPGA_PROG_UDP_PORT 49157
-#define X300_FLASH_SECTOR_SIZE 131072
-#define X300_PACKET_SIZE_BYTES 256
-#define X300_FPGA_SECTOR_START 32
-#define X300_MAX_RESPONSE_BYTES 128
-#define UDP_TIMEOUT 3
-#define FPGA_LOAD_TIMEOUT 15
-#define X300_FPGA_PROG_FLAGS_ACK 1
-#define X300_FPGA_PROG_FLAGS_ERROR 2
-#define X300_FPGA_PROG_FLAGS_INIT 4
-#define X300_FPGA_PROG_FLAGS_CLEANUP 8
-#define X300_FPGA_PROG_FLAGS_ERASE 16
-#define X300_FPGA_PROG_FLAGS_VERIFY 32
-#define X300_FPGA_PROG_CONFIGURE 64
-#define X300_FPGA_PROG_CONFIG_STATUS 128
-
-namespace fs = boost::filesystem;
namespace po = boost::program_options;
-using namespace uhd;
-using namespace uhd::transport;
-
-static int num_ctrl_c = 0;
-void sig_int_handler(int){
- num_ctrl_c++;
- if(num_ctrl_c == 1){
- std::cout << std::endl << "Are you sure you want to abort the image burning? If you do, your "
- "USRP-X series device will be bricked!" << std::endl
- << "Press Ctrl+C again to abort the image burning procedure." << std::endl << std::endl;
- }
- else{
- std::cout << std::endl << "Aborting. Your USRP X-Series device will be bricked." << std::endl
- << "http://files.ettus.com/manual/page_usrp_x3x0.html#x3x0_load_fpga_imgs_jtag" << std::endl
- << "for details on restoring your device." << std::endl;
- exit(EXIT_FAILURE);
- }
-}
-
-typedef struct {
- uint32_t flags;
- uint32_t sector;
- uint32_t index;
- uint32_t size;
- uint16_t data[128];
-} x300_fpga_update_data_t;
-
-uint8_t x300_data_in_mem[udp_simple::mtu];
-uint8_t intermediary_packet_data[X300_PACKET_SIZE_BYTES];
-
-uint8_t bitswap(uint8_t b){
- b = ((b & 0xF0) >> 4) | ((b & 0x0F) << 4);
- b = ((b & 0xCC) >> 2) | ((b & 0x33) << 2);
- b = ((b & 0xAA) >> 1) | ((b & 0x55) << 1);
-
- return b;
-}
-
-void list_usrps(){
- device_addrs_t found_devices = device::find(device_addr_t("type=x300"), device::USRP);
-
- std::cout << "Available X3x0 devices:" << std::endl;
- BOOST_FOREACH(const device_addr_t &dev, found_devices){
- std::string dev_string;
- if(dev.has_key("addr")){
- dev_string = str(boost::format(" * %s (%s, addr: %s)")
- % dev["product"]
- % dev["fpga"]
- % dev["addr"]);
- }
- else{
- dev_string = str(boost::format(" * %s (%s, resource: %s)")
- % dev["product"]
- % dev["fpga"]
- % dev["resource"]);
- }
- std::cout << dev_string << std::endl;
- }
-}
-
-device_addr_t find_usrp_with_ethernet(std::string ip_addr, bool output){
- if(output) std::cout << "Attempting to find X3x0 with IP address: " << ip_addr << std::endl;
- const device_addr_t dev = device_addr_t(str(boost::format("addr=%s") % ip_addr));
- device_addrs_t found_devices = device::find(dev, device::USRP);
-
- if(found_devices.size() < 1) {
- throw std::runtime_error("Could not find X3x0 with the specified address!");
- }
- else if(found_devices.size() > 1) {
- throw std::runtime_error("Found multiple X3x0 units with the specified address!");
- }
- else {
- if(output) std::cout << (boost::format("Found %s (%s).\n\n")
- % found_devices[0]["product"]
- % found_devices[0]["fpga"]);
- }
- return found_devices[0];
-}
-
-device_addr_t find_usrp_with_pcie(std::string resource, bool output){
- if(output) std::cout << "Attempting to find X3x0 with resource: " << resource << std::endl;
- const device_addr_t dev = device_addr_t(str(boost::format("resource=%s") % resource));
- device_addrs_t found_devices = device::find(dev, device::USRP);
-
- if(found_devices.size() < 1) {
- throw std::runtime_error("Could not find X3x0 with the specified resource!");
- }
- else {
- if(output) std::cout << (boost::format("Found %s (%s).\n\n")
- % found_devices[0]["product"]
- % found_devices[0]["fpga"]);
- }
- return found_devices[0];
-}
-
-std::string get_default_image_path(std::string model, std::string image_type){
- std::transform(model.begin(), model.end(), model.begin(), ::tolower);
-
- std::string image_name = str(boost::format("usrp_%s_fpga_%s.bit")
- % model.c_str() % image_type.c_str());
-
- return find_image_path(image_name);
-}
-
-void extract_from_lvbitx(std::string lvbitx_path, std::vector<char> &bitstream){
- boost::property_tree::ptree pt;
- boost::property_tree::xml_parser::read_xml(lvbitx_path.c_str(), pt,
- boost::property_tree::xml_parser::no_comments |
- boost::property_tree::xml_parser::trim_whitespace);
- std::string const encoded_bitstream(pt.get<std::string>("Bitfile.Bitstream"));
- std::vector<char> decoded_bitstream(encoded_bitstream.size());
-
- base64_decodestate decode_state;
- base64_init_decodestate(&decode_state);
- size_t const decoded_size = base64_decode_block(encoded_bitstream.c_str(),
- encoded_bitstream.size(), &decoded_bitstream.front(), &decode_state);
- decoded_bitstream.resize(decoded_size);
- bitstream.swap(decoded_bitstream);
-}
-
-void print_image_loader_warning(const std::string &fpga_path, const po::variables_map &vm){
-
+void print_image_loader_warning(const std::string &fpga_path, const po::variables_map &vm)
+{
// Newline + indent
#ifdef UHD_PLATFORM_WIN32
const std::string nl = " ^\n ";
@@ -210,11 +36,9 @@ void print_image_loader_warning(const std::string &fpga_path, const po::variable
if(vm.count("addr") > 0){
uhd_image_loader += str(boost::format(",addr=%s")
% vm["addr"].as<std::string>());
-
if(vm.count("configure") > 0){
uhd_image_loader += ",configure";
}
-
if(vm.count("verify") > 0){
uhd_image_loader += ",verify";
}
@@ -251,8 +75,7 @@ void print_image_loader_warning(const std::string &fpga_path, const po::variable
}
std::cout << "************************************************************************************************" << std::endl
- << "WARNING: This utility will be removed in an upcoming version of UHD. In the future, use" << std::endl
- << " this command:" << std::endl
+ << "ERROR: This utility has been removed in this version of UHD. Use this command:" << std::endl
<< std::endl
<< uhd_image_loader << std::endl
<< std::endl
@@ -260,218 +83,17 @@ void print_image_loader_warning(const std::string &fpga_path, const po::variable
<< std::endl;
}
-void ethernet_burn(udp_simple::sptr udp_transport, std::string fpga_path, bool verify){
- uint32_t max_size;
- std::vector<char> bitstream;
-
- if(fs::extension(fpga_path) == ".bit") max_size = X300_FPGA_BIT_MAX_SIZE_BYTES;
- else max_size = X300_FPGA_BIN_SIZE_BYTES; //Use for both .bin and .lvbitx
-
- bool is_lvbitx = (fs::extension(fpga_path) == ".lvbitx");
-
- size_t fpga_image_size;
- FILE* file;
- if((file = fopen(fpga_path.c_str(), "rb"))){
- fseek(file, 0, SEEK_END);
- if(is_lvbitx){
- extract_from_lvbitx(fpga_path, bitstream);
- fpga_image_size = bitstream.size();
- }
- else fpga_image_size = ftell(file);
- if(fpga_image_size > max_size){
- fclose(file);
- throw std::runtime_error(str(boost::format("FPGA size is too large (%d > %d).")
- % fpga_image_size % max_size));
- }
- rewind(file);
- }
- else{
- throw std::runtime_error(str(boost::format("Could not find FPGA image at location: %s")
- % fpga_path.c_str()));
- }
-
- const x300_fpga_update_data_t *update_data_in = reinterpret_cast<const x300_fpga_update_data_t *>(x300_data_in_mem);
-
- x300_fpga_update_data_t ack_packet;
- ack_packet.flags = htonx<uint32_t>(X300_FPGA_PROG_FLAGS_ACK | X300_FPGA_PROG_FLAGS_INIT);
- ack_packet.sector = 0;
- ack_packet.size = 0;
- ack_packet.index = 0;
- memset(ack_packet.data, 0, sizeof(ack_packet.data));
- udp_transport->send(boost::asio::buffer(&ack_packet, sizeof(ack_packet)));
-
- udp_transport->recv(boost::asio::buffer(x300_data_in_mem), UDP_TIMEOUT);
- if((ntohl(update_data_in->flags) & X300_FPGA_PROG_FLAGS_ERROR) != X300_FPGA_PROG_FLAGS_ERROR){
- std::cout << "Burning image: " << fpga_path << std::endl;
- if(verify) std::cout << "NOTE: Verifying image. Burning will take much longer." << std::endl;
- std::cout << std::endl;
- }
- else{
- throw std::runtime_error("Failed to start image burning! Did you specify the correct IP address? If so, power-cycle the device and try again.");
- }
-
- size_t current_pos = 0;
- size_t sectors = fpga_image_size / X300_FLASH_SECTOR_SIZE;
-
- //Each sector
- for(size_t i = 0; i < fpga_image_size; i += X300_FLASH_SECTOR_SIZE){
-
- //Print progress percentage at beginning of each sector
- std::cout << "\rProgress: " << int(double(i)/double(fpga_image_size)*100)
- << "% (" << (i / X300_FLASH_SECTOR_SIZE) << "/"
- << sectors << " sectors)" << std::flush;
-
- //Each packet
- for(size_t j = i; (j < fpga_image_size and j < (i+X300_FLASH_SECTOR_SIZE)); j += X300_PACKET_SIZE_BYTES){
- x300_fpga_update_data_t send_packet;
-
- send_packet.flags = X300_FPGA_PROG_FLAGS_ACK;
- if(verify) send_packet.flags |= X300_FPGA_PROG_FLAGS_VERIFY;
- if(j == i) send_packet.flags |= X300_FPGA_PROG_FLAGS_ERASE; //Erase the sector before writing
- send_packet.flags = htonx<uint32_t>(send_packet.flags);
-
- send_packet.sector = htonx<uint32_t>(X300_FPGA_SECTOR_START + (i/X300_FLASH_SECTOR_SIZE));
- send_packet.index = htonx<uint32_t>((j % X300_FLASH_SECTOR_SIZE) / 2);
- send_packet.size = htonx<uint32_t>(X300_PACKET_SIZE_BYTES / 2);
- memset(intermediary_packet_data,0,X300_PACKET_SIZE_BYTES);
- memset(send_packet.data,0,X300_PACKET_SIZE_BYTES);
- if(!is_lvbitx) current_pos = ftell(file);
-
- if(current_pos + X300_PACKET_SIZE_BYTES > fpga_image_size){
- if(is_lvbitx){
- memcpy(intermediary_packet_data, (&bitstream[current_pos]), (bitstream.size()-current_pos+1));
- }
- else{
- size_t len = fread(intermediary_packet_data, sizeof(uint8_t), (fpga_image_size-current_pos), file);
- if(len != (fpga_image_size-current_pos)){
- throw std::runtime_error("Error reading from file!");
- }
- }
- }
- else{
- if(is_lvbitx){
- memcpy(intermediary_packet_data, (&bitstream[current_pos]), X300_PACKET_SIZE_BYTES);
- current_pos += X300_PACKET_SIZE_BYTES;
- }
- else{
- size_t len = fread(intermediary_packet_data, sizeof(uint8_t), X300_PACKET_SIZE_BYTES, file);
- if(len != X300_PACKET_SIZE_BYTES){
- throw std::runtime_error("Error reading from file!");
- }
- }
- }
-
- for(size_t k = 0; k < X300_PACKET_SIZE_BYTES; k++){
- intermediary_packet_data[k] = bitswap(intermediary_packet_data[k]);
- }
-
- memcpy(send_packet.data, intermediary_packet_data, X300_PACKET_SIZE_BYTES);
-
- for(size_t k = 0; k < (X300_PACKET_SIZE_BYTES/2); k++){
- send_packet.data[k] = htonx<uint16_t>(send_packet.data[k]);
- }
-
- udp_transport->send(boost::asio::buffer(&send_packet, sizeof(send_packet)));
-
- if (udp_transport->recv(boost::asio::buffer(x300_data_in_mem), UDP_TIMEOUT) == 0)
- throw std::runtime_error("Timed out waiting for ACK!");
-
- const x300_fpga_update_data_t *update_data_in = reinterpret_cast<const x300_fpga_update_data_t *>(x300_data_in_mem);
-
- if((ntohl(update_data_in->flags) & X300_FPGA_PROG_FLAGS_ERROR) == X300_FPGA_PROG_FLAGS_ERROR){
- throw std::runtime_error("Transfer or data verification failed!");
- }
- }
- }
- fclose(file);
-
- //Send clean-up signal
- x300_fpga_update_data_t cleanup_packet;
- cleanup_packet.flags = htonx<uint32_t>(X300_FPGA_PROG_FLAGS_ACK | X300_FPGA_PROG_FLAGS_CLEANUP);
- cleanup_packet.sector = 0;
- cleanup_packet.size = 0;
- cleanup_packet.index = 0;
- memset(cleanup_packet.data, 0, sizeof(cleanup_packet.data));
- udp_transport->send(boost::asio::buffer(&cleanup_packet, sizeof(cleanup_packet)));
-
- if (udp_transport->recv(boost::asio::buffer(x300_data_in_mem), UDP_TIMEOUT) == 0)
- throw std::runtime_error("Timed out waiting for ACK!");
- const x300_fpga_update_data_t *cleanup_data_in = reinterpret_cast<const x300_fpga_update_data_t *>(x300_data_in_mem);
-
- if((ntohl(cleanup_data_in->flags) & X300_FPGA_PROG_FLAGS_ERROR) == X300_FPGA_PROG_FLAGS_ERROR){
- throw std::runtime_error("Transfer or data verification failed!");
- }
-
- std::cout << "\rProgress: " << "100% (" << sectors << "/" << sectors << " sectors)" << std::endl;
-}
-
-void pcie_burn(std::string resource, std::string rpc_port, std::string fpga_path)
-{
- std::cout << "Burning image: " << fpga_path << std::endl;
- std::cout << "This will take 3-10 minutes." << std::endl;
-
- nirio_status status = NiRio_Status_Success;
-
- uhd::niusrprio::niusrprio_session fpga_session(resource, rpc_port);
- nirio_status_chain(fpga_session.download_bitstream_to_flash(fpga_path), status);
-
- if(nirio_status_fatal(status)) throw std::runtime_error("Failed to burn FPGA image!");
-}
-
-bool configure_fpga(udp_simple::sptr udp_transport, std::string ip_addr){
- x300_fpga_update_data_t configure_packet;
- configure_packet.flags = htonx<uint32_t>(X300_FPGA_PROG_CONFIGURE | X300_FPGA_PROG_FLAGS_ACK);
- configure_packet.sector = 0;
- configure_packet.size = 0;
- configure_packet.index = 0;
- memset(configure_packet.data, 0, sizeof(configure_packet.data));
- udp_transport->send(boost::asio::buffer(&configure_packet, sizeof(configure_packet)));
-
- udp_transport->recv(boost::asio::buffer(x300_data_in_mem), UDP_TIMEOUT);
- const x300_fpga_update_data_t *configure_data_in = reinterpret_cast<const x300_fpga_update_data_t *>(x300_data_in_mem);
- bool successful = false;
-
- if((ntohl(configure_data_in->flags) & X300_FPGA_PROG_FLAGS_ERROR) == X300_FPGA_PROG_FLAGS_ERROR){
- throw std::runtime_error("Transfer or data verification failed!");
- }
- else{
- std::cout << std::endl << "Waiting for X3x0 to configure FPGA image and reload." << std::endl;
- boost::this_thread::sleep(boost::posix_time::milliseconds(5000));
-
- x300_fpga_update_data_t config_status_packet;
- configure_packet.flags = htonx<uint32_t>(X300_FPGA_PROG_CONFIG_STATUS);
- config_status_packet.sector = 0;
- config_status_packet.size = 0;
- config_status_packet.index = 0;
- memset(config_status_packet.data, 0, sizeof(config_status_packet.data));
- for(int i = 0; i < 5; i++){
- udp_transport->send(boost::asio::buffer(&config_status_packet, sizeof(config_status_packet)));
- udp_transport->recv(boost::asio::buffer(x300_data_in_mem), 1);
- const x300_fpga_update_data_t *config_status_data_in = reinterpret_cast<const x300_fpga_update_data_t *>(x300_data_in_mem);
-
- if((ntohl(config_status_data_in->flags) & X300_FPGA_PROG_FLAGS_ERROR) != X300_FPGA_PROG_FLAGS_ERROR
- and udp_transport->get_recv_addr() == ip_addr){
- successful = true;
- break;
- }
- successful = false; //If it worked, the break would skip this
- }
- }
- return successful;
-}
-
-int UHD_SAFE_MAIN(int argc, char *argv[]){
- memset(intermediary_packet_data, 0, X300_PACKET_SIZE_BYTES);
+int main(int argc, char *argv[]){
std::string ip_addr, resource, fpga_path, image_type, rpc_port;
po::options_description desc("Allowed options");
desc.add_options()
("help", "Display this help message.")
- ("addr", po::value<std::string>(&ip_addr), "Specify an IP address.")
+ ("addr", po::value<std::string>(&ip_addr)->default_value("1.2.3.4"), "Specify an IP address.")
("resource", po::value<std::string>(&resource), "Specify an NI-RIO resource.")
("rpc-port", po::value<std::string>(&rpc_port)->default_value("5444"), "Specify a port to communicate with the RPC server.")
- ("type", po::value<std::string>(&image_type), "Specify an image type (1G, HGS, XGS), leave blank for current type.")
- ("fpga-path", po::value<std::string>(&fpga_path), "Specify an FPGA path (overrides --type option).")
+ ("type", po::value<std::string>(&image_type)->default_value("HG"), "Specify an image type (1G, HG, XG), leave blank for current type.")
+ ("fpga-path", po::value<std::string>(&fpga_path)->default_value("/path/to/fpga-image.bit"), "Specify an FPGA path (overrides --type option).")
("configure", "Initialize FPGA with image currently burned to flash (Ethernet only).")
("verify", "Verify data downloaded to flash (Ethernet only, download will take much longer)")
("list", "List all available X3x0 devices.")
@@ -480,97 +102,8 @@ int UHD_SAFE_MAIN(int argc, char *argv[]){
po::store(po::parse_command_line(argc, argv, desc), vm);
po::notify(vm);
- //Print help message
- if(vm.count("help")){
- std::cout << "USRP X3x0 FPGA Burner" << std::endl << std::endl;
-
- std::cout << "Burns an FPGA image onto a USRP X300/X310. To burn the image" << std::endl
- << "over Ethernet, specify an IP address with the --addr option," << std::endl
- << "or to burn over PCIe, specify an NI-RIO resource (ex. RIO0)" << std::endl
- << "with the --resource option." << std::endl << std::endl;
-
- std::cout << desc << std::endl;
- return EXIT_SUCCESS;
- }
-
- //List all available devices
- if(vm.count("list")){
- list_usrps();
- return EXIT_SUCCESS;
- }
-
- /*
- * The user must specify whether to burn the image over Ethernet or PCI-e.
- */
- if(not (vm.count("addr") xor vm.count("resource"))){
- throw std::runtime_error("You must specify addr OR resource!");
- }
-
- /*
- * With settings validated, find X3x0 with specified arguments.
- */
- device_addr_t dev = (vm.count("addr")) ? find_usrp_with_ethernet(ip_addr, true)
- : find_usrp_with_pcie(resource, true);
-
- /*
- * If custom FPGA path is given, ignore specified type and let FPGA
- * figure it out.
- */
- if(vm.count("fpga-path")){
- //Expand tilde usage if applicable
- #ifndef UHD_PLATFORM_WIN32
- if(fpga_path.find("~/") == 0) fpga_path.replace(0,1,getenv("HOME"));
- #endif
- }
- else{
- if(vm.count("type")){
- //Make sure the specified type is 1G, HGS, or XGS
- if((image_type != "1G") and (image_type != "HGS") and (image_type != "XGS")){
- throw std::runtime_error("--type must be 1G, HGS, or XGS!");
- }
- else fpga_path = get_default_image_path(dev["product"], image_type);
- }
- else{
- //Use default image of currently present FPGA type
- fpga_path = get_default_image_path(dev["product"], dev["fpga"]);
- }
- }
-
- /*
- * Check validity of image through extension
- */
- std::string ext = fs::extension(fpga_path.c_str());
- if(ext != ".bin" and ext != ".bit" and ext != ".lvbitx"){
- throw std::runtime_error("The image filename must end in .bin, .bit, or .lvbitx.");
- }
-
print_image_loader_warning(fpga_path, vm);
- std::signal(SIGINT, &sig_int_handler);
- if(vm.count("addr")){
- udp_simple::sptr udp_transport = udp_simple::make_connected(ip_addr, BOOST_STRINGIZE(X300_FPGA_PROG_UDP_PORT));
-
- ethernet_burn(udp_transport, fpga_path, (vm.count("verify") > 0));
-
- if(vm.count("configure")){
- if(configure_fpga(udp_transport, ip_addr)) std::cout << "Successfully configured FPGA!" << std::endl;
- else throw std::runtime_error("FPGA configuring failed!");
- }
- }
- else pcie_burn(resource, rpc_port, fpga_path);
-
- /*
- * Attempt to find USRP after burning
- */
- std::cout << std::endl << "Attempting to find device..." << std::flush;
- boost::this_thread::sleep(boost::posix_time::milliseconds(2000)); //Sometimes needed for Ethernet to reconnect
- device_addr_t found_usrp = (vm.count("addr")) ? find_usrp_with_ethernet(ip_addr, false)
- : find_usrp_with_pcie(resource, false);
- std::cout << "found!" << std::endl; //If unsuccessful, runtime error would occur in find functions
- std::cout << "Successfully burned FPGA image!" << std::endl << std::endl;
-
- if(vm.count("addr")) std::cout << str(boost::format("Power-cycle the USRP %s to use the new image.") % found_usrp["product"]) << std::endl;
- else std::cout << str(boost::format("Power-cycle the USRP %s and reboot your machine to use the new image.") % found_usrp["product"]) << std::endl;
-
- return EXIT_SUCCESS;
+ return EXIT_FAILURE;
}
+
diff --git a/tools/dissectors/make-dissector-reg.py b/tools/dissectors/make-dissector-reg.py
index 44972909b..37170a84f 100755
--- a/tools/dissectors/make-dissector-reg.py
+++ b/tools/dissectors/make-dissector-reg.py
@@ -59,7 +59,7 @@ elif registertype == "dissectors":
*/
"""
else:
- print "Unknown output type '%s'" % registertype
+ print("Unknown output type '%s'" % registertype)
sys.exit(1)
@@ -77,7 +77,7 @@ for file in files:
filenames.append(os.path.join(srcdir, file))
if len(filenames) < 1:
- print "No files found"
+ print("No files found")
sys.exit(1)
@@ -118,7 +118,7 @@ if cache_filename:
cache_file = open(cache_filename, 'rb')
cache = pickle.load(cache_file)
cache_file.close()
- if not cache.has_key(VERSION_KEY) or cache[VERSION_KEY] != CUR_VERSION:
+ if (VERSION_KEY not in cache) or cache[VERSION_KEY] != CUR_VERSION:
cache = {VERSION_KEY: CUR_VERSION}
except:
cache = {VERSION_KEY: CUR_VERSION}
@@ -127,10 +127,10 @@ if cache_filename:
for filename in filenames:
file = open(filename)
cur_mtime = os.fstat(file.fileno())[ST_MTIME]
- if cache and cache.has_key(filename):
+ if cache and (filename in cache):
cdict = cache[filename]
if cur_mtime == cdict['mtime']:
-# print "Pulling %s from cache" % (filename)
+# print("Pulling %s from cache" % (filename))
regs['proto_reg'].extend(cdict['proto_reg'])
regs['handoff_reg'].extend(cdict['handoff_reg'])
regs['wtap_register'].extend(cdict['wtap_register'])
@@ -144,7 +144,7 @@ for filename in filenames:
'handoff_reg': [],
'wtap_register': [],
}
-# print "Searching %s" % (filename)
+# print("Searching %s" % (filename))
for line in file.readlines():
for action in patterns:
regex = action[1]
@@ -154,7 +154,7 @@ for filename in filenames:
sym_type = action[0]
regs[sym_type].append(symbol)
if cache is not None:
-# print "Caching %s for %s: %s" % (sym_type, filename, symbol)
+# print("Caching %s for %s: %s" % (sym_type, filename, symbol))
cache[filename][sym_type].append(symbol)
file.close()
@@ -165,7 +165,7 @@ if cache is not None and cache_filename is not None:
# Make sure we actually processed something
if len(regs['proto_reg']) < 1:
- print "No protocol registrations found"
+ print("No protocol registrations found")
sys.exit(1)
# Sort the lists to make them pretty
@@ -252,7 +252,7 @@ register_wtap_module(void)
reg_code.write(line)
reg_code.write("}\n");
- reg_code.write("#endif\n");
+ reg_code.write("#endif\n");
else:
reg_code.write("""
static gulong proto_reg_count(void)
diff --git a/tools/gr-usrptest/CMakeLists.txt b/tools/gr-usrptest/CMakeLists.txt
new file mode 100644
index 000000000..3eeea5f75
--- /dev/null
+++ b/tools/gr-usrptest/CMakeLists.txt
@@ -0,0 +1,198 @@
+# Copyright 2011,2012,2014,2016 Free Software Foundation, Inc.
+#
+# This file is part of GNU Radio
+#
+# GNU Radio is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# GNU Radio is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Radio; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+
+########################################################################
+# Project setup
+########################################################################
+cmake_minimum_required(VERSION 2.6)
+project(gr-usrptest CXX C)
+enable_testing()
+
+#install to PyBOMBS target prefix if defined
+if(DEFINED ENV{PYBOMBS_PREFIX})
+ set(CMAKE_INSTALL_PREFIX $ENV{PYBOMBS_PREFIX})
+ message(STATUS "PyBOMBS installed GNU Radio. Setting CMAKE_INSTALL_PREFIX to $ENV{PYBOMBS_PREFIX}")
+endif()
+
+#select the release build type by default to get optimization flags
+if(NOT CMAKE_BUILD_TYPE)
+ set(CMAKE_BUILD_TYPE "Release")
+ message(STATUS "Build type not specified: defaulting to release.")
+endif(NOT CMAKE_BUILD_TYPE)
+set(CMAKE_BUILD_TYPE ${CMAKE_BUILD_TYPE} CACHE STRING "")
+
+#make sure our local CMake Modules path comes first
+list(INSERT CMAKE_MODULE_PATH 0 ${CMAKE_SOURCE_DIR}/cmake/Modules)
+
+# Set the version information here
+set(VERSION_INFO_MAJOR_VERSION 1)
+set(VERSION_INFO_API_COMPAT 0)
+set(VERSION_INFO_MINOR_VERSION 0)
+set(VERSION_INFO_MAINT_VERSION git)
+
+########################################################################
+# Compiler specific setup
+########################################################################
+if(CMAKE_COMPILER_IS_GNUCXX AND NOT WIN32)
+ #http://gcc.gnu.org/wiki/Visibility
+ add_definitions(-fvisibility=hidden)
+endif()
+
+########################################################################
+# Find boost
+########################################################################
+if(UNIX AND EXISTS "/usr/lib64")
+ list(APPEND BOOST_LIBRARYDIR "/usr/lib64") #fedora 64-bit fix
+endif(UNIX AND EXISTS "/usr/lib64")
+set(Boost_ADDITIONAL_VERSIONS
+ "1.35.0" "1.35" "1.36.0" "1.36" "1.37.0" "1.37" "1.38.0" "1.38" "1.39.0" "1.39"
+ "1.40.0" "1.40" "1.41.0" "1.41" "1.42.0" "1.42" "1.43.0" "1.43" "1.44.0" "1.44"
+ "1.45.0" "1.45" "1.46.0" "1.46" "1.47.0" "1.47" "1.48.0" "1.48" "1.49.0" "1.49"
+ "1.50.0" "1.50" "1.51.0" "1.51" "1.52.0" "1.52" "1.53.0" "1.53" "1.54.0" "1.54"
+ "1.55.0" "1.55" "1.56.0" "1.56" "1.57.0" "1.57" "1.58.0" "1.58" "1.59.0" "1.59"
+ "1.60.0" "1.60" "1.61.0" "1.61" "1.62.0" "1.62" "1.63.0" "1.63" "1.64.0" "1.64"
+ "1.65.0" "1.65" "1.66.0" "1.66" "1.67.0" "1.67" "1.68.0" "1.68" "1.69.0" "1.69"
+)
+find_package(Boost "1.35" COMPONENTS filesystem system)
+
+if(NOT Boost_FOUND)
+ message(FATAL_ERROR "Boost required to compile usrptest")
+endif()
+
+########################################################################
+# Install directories
+########################################################################
+include(GrPlatform) #define LIB_SUFFIX
+set(GR_RUNTIME_DIR bin)
+set(GR_LIBRARY_DIR lib${LIB_SUFFIX})
+set(GR_INCLUDE_DIR include/usrptest)
+set(GR_DATA_DIR share)
+set(GR_PKG_DATA_DIR ${GR_DATA_DIR}/${CMAKE_PROJECT_NAME})
+set(GR_DOC_DIR ${GR_DATA_DIR}/doc)
+set(GR_PKG_DOC_DIR ${GR_DOC_DIR}/${CMAKE_PROJECT_NAME})
+set(GR_CONF_DIR etc)
+set(GR_PKG_CONF_DIR ${GR_CONF_DIR}/${CMAKE_PROJECT_NAME}/conf.d)
+set(GR_LIBEXEC_DIR libexec)
+set(GR_PKG_LIBEXEC_DIR ${GR_LIBEXEC_DIR}/${CMAKE_PROJECT_NAME})
+set(GRC_BLOCKS_DIR ${GR_PKG_DATA_DIR}/grc/blocks)
+
+########################################################################
+# On Apple only, set install name and use rpath correctly, if not already set
+########################################################################
+if(APPLE)
+ if(NOT CMAKE_INSTALL_NAME_DIR)
+ set(CMAKE_INSTALL_NAME_DIR
+ ${CMAKE_INSTALL_PREFIX}/${GR_LIBRARY_DIR} CACHE
+ PATH "Library Install Name Destination Directory" FORCE)
+ endif(NOT CMAKE_INSTALL_NAME_DIR)
+ if(NOT CMAKE_INSTALL_RPATH)
+ set(CMAKE_INSTALL_RPATH
+ ${CMAKE_INSTALL_PREFIX}/${GR_LIBRARY_DIR} CACHE
+ PATH "Library Install RPath" FORCE)
+ endif(NOT CMAKE_INSTALL_RPATH)
+ if(NOT CMAKE_BUILD_WITH_INSTALL_RPATH)
+ set(CMAKE_BUILD_WITH_INSTALL_RPATH ON CACHE
+ BOOL "Do Build Using Library Install RPath" FORCE)
+ endif(NOT CMAKE_BUILD_WITH_INSTALL_RPATH)
+endif(APPLE)
+
+########################################################################
+# Find gnuradio build dependencies
+########################################################################
+find_package(CppUnit)
+find_package(Doxygen)
+
+# Search for GNU Radio and its components and versions. Add any
+# components required to the list of GR_REQUIRED_COMPONENTS (in all
+# caps such as FILTER or FFT) and change the version to the minimum
+# API compatible version required.
+set(GR_REQUIRED_COMPONENTS RUNTIME)
+find_package(Gnuradio "3.7.2" REQUIRED)
+list(INSERT CMAKE_MODULE_PATH 0 ${CMAKE_SOURCE_DIR}/cmake/Modules)
+include(GrVersion)
+
+if(NOT CPPUNIT_FOUND)
+ message(FATAL_ERROR "CppUnit required to compile usrptest")
+endif()
+
+########################################################################
+# Setup doxygen option
+########################################################################
+if(DOXYGEN_FOUND)
+ option(ENABLE_DOXYGEN "Build docs using Doxygen" ON)
+else(DOXYGEN_FOUND)
+ option(ENABLE_DOXYGEN "Build docs using Doxygen" OFF)
+endif(DOXYGEN_FOUND)
+
+########################################################################
+# Setup the include and linker paths
+########################################################################
+include_directories(
+ ${CMAKE_SOURCE_DIR}/lib
+ ${CMAKE_SOURCE_DIR}/include
+ ${CMAKE_BINARY_DIR}/lib
+ ${CMAKE_BINARY_DIR}/include
+ ${Boost_INCLUDE_DIRS}
+ ${CPPUNIT_INCLUDE_DIRS}
+ ${GNURADIO_ALL_INCLUDE_DIRS}
+)
+
+link_directories(
+ ${Boost_LIBRARY_DIRS}
+ ${CPPUNIT_LIBRARY_DIRS}
+ ${GNURADIO_RUNTIME_LIBRARY_DIRS}
+)
+
+# Set component parameters
+set(GR_USRPTEST_INCLUDE_DIRS ${CMAKE_CURRENT_SOURCE_DIR}/include CACHE INTERNAL "" FORCE)
+set(GR_USRPTEST_SWIG_INCLUDE_DIRS ${CMAKE_CURRENT_SOURCE_DIR}/swig CACHE INTERNAL "" FORCE)
+
+########################################################################
+# Create uninstall target
+########################################################################
+configure_file(
+ ${CMAKE_SOURCE_DIR}/cmake/cmake_uninstall.cmake.in
+ ${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake
+@ONLY)
+
+add_custom_target(uninstall
+ ${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake
+)
+
+########################################################################
+# Add subdirectories
+########################################################################
+add_subdirectory(include/usrptest)
+add_subdirectory(lib)
+add_subdirectory(swig)
+add_subdirectory(python)
+add_subdirectory(grc)
+add_subdirectory(apps)
+add_subdirectory(docs)
+
+########################################################################
+# Install cmake search helper for this library
+########################################################################
+if(NOT CMAKE_MODULES_DIR)
+ set(CMAKE_MODULES_DIR lib${LIB_SUFFIX}/cmake)
+endif(NOT CMAKE_MODULES_DIR)
+
+install(FILES cmake/Modules/usrptestConfig.cmake
+ DESTINATION ${CMAKE_MODULES_DIR}/usrptest
+)
diff --git a/tools/gr-usrptest/MANIFEST.md b/tools/gr-usrptest/MANIFEST.md
new file mode 100644
index 000000000..83cce8174
--- /dev/null
+++ b/tools/gr-usrptest/MANIFEST.md
@@ -0,0 +1,16 @@
+title: The USRPTEST OOT Module
+brief: Short description of gr-usrptest
+tags: # Tags are arbitrary, but look at CGRAN what other authors are using
+ - sdr
+author:
+ - Andrej Rode <andrej.rode@ettus.com>
+copyright_owner:
+ - Ettus Research LLC
+license:
+#repo: # Put the URL of the repository here, or leave blank for default
+#website: <module_website> # If you have a separate project website, put it here
+#icon: <icon_url> # Put a URL to a square image here that will be used as an icon on CGRAN
+---
+A longer, multi-line description of gr-usrptest.
+You may use some *basic* Markdown here.
+If left empty, it will try to find a README file instead.
diff --git a/tools/gr-usrptest/README.md b/tools/gr-usrptest/README.md
new file mode 100644
index 000000000..bc8a70c1d
--- /dev/null
+++ b/tools/gr-usrptest/README.md
@@ -0,0 +1,18 @@
+# gr-usrptest OOT-Module
+
+## Usage
+This OOT is used to run GNU Radio based tests on various USRPs and daughterboards catch regressions.
+
+## Structure
+gr-usrptest follows the structure of a regular OOT-Module with some additions.
+The python directory contains a couple additional submodules.
+ - flowgraphs
+ - Contains dynamically configured GNU Radio flowgraphs.
+ - rts_tests
+ - Contains tests which can be run unsupervised and store results
+ - labview_control
+ - Contains classes and functions to control a remote LabVIEW instance with python_labview_automation
+
+## Applications
+ - usrp_phasealignment.py
+ - calculates phase differences between an arbitrary number of USRP devices. Runs phase difference measurement a speficied number of times and retunes the USRP daugtherboards to a random frequency between measurements. Prints average phase difference and standard deviation for every measurement in human readable format.
diff --git a/tools/gr-usrptest/apps/CMakeLists.txt b/tools/gr-usrptest/apps/CMakeLists.txt
new file mode 100644
index 000000000..a40192924
--- /dev/null
+++ b/tools/gr-usrptest/apps/CMakeLists.txt
@@ -0,0 +1,26 @@
+# Copyright 2011 Free Software Foundation, Inc.
+#
+# This file is part of GNU Radio
+#
+# GNU Radio is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# GNU Radio is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Radio; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+
+include(GrPython)
+
+GR_PYTHON_INSTALL(
+ PROGRAMS
+ usrp_phasealignment.py
+ DESTINATION bin
+)
diff --git a/tools/gr-usrptest/apps/usrp_phasealignment.py b/tools/gr-usrptest/apps/usrp_phasealignment.py
new file mode 100755
index 000000000..928788be4
--- /dev/null
+++ b/tools/gr-usrptest/apps/usrp_phasealignment.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python
+
+from gnuradio.uhd.uhd_app import UHDApp
+from usrptest.flowgraphs import phasealignment_fg
+from usrptest.functions import run_test, setup_phase_alignment_parser, setup_tx_phase_alignment_parser, setup_manual_phase_alignment_parser
+import time
+import argparse
+
+
+def plot_results(results):
+ import matplotlib.pyplot as plt
+ ax = plt.axes()
+ ax.set_ylim(-180, 180)
+ for result in results:
+ plt.errorbar(
+ range(len(result['avg'])),
+ result["avg"],
+ result["stddev"],
+ label="{} - {}".format(result["first"], result["second"]),
+ axes=ax)
+ ax.legend(loc='upper left', bbox_to_anchor=(0.0, 0.0))
+ plt.show()
+
+
+def print_results(results):
+ for result in results:
+ print('Results for: {first} - {second}'.format(
+ first=result['first'], second=result['second']))
+ for i, (avg,
+ stddev) in enumerate(zip(result['avg'], result['stddev'])):
+ print('\t {}. run avg: {}, stddev: {}'.format(i + 1, avg, stddev))
+
+
+def main():
+ parser = argparse.ArgumentParser(conflict_handler='resolve')
+ UHDApp.setup_argparser(parser=parser)
+ parser = setup_phase_alignment_parser(parser)
+ parser = setup_tx_phase_alignment_parser(parser)
+ parser = setup_manual_phase_alignment_parser(parser)
+ args = parser.parse_args()
+ test_app = UHDApp(args=args)
+ if args.auto and args.start_freq and args.stop_freq:
+ from random import uniform
+ bw = (args.stop_freq - args.start_freq) / args.freq_bands
+ for nband in range(args.freq_bands):
+ freq1 = args.start_freq + nband * bw
+ new_freq = uniform(freq1, freq1 + bw)
+ test_app.args.freq = new_freq
+ raw_input(
+ "New test frequency: {:f} MHz. Adjust your signal generator and press ENTER to start measurement.".
+ format(new_freq / 1e6))
+ fg = phasealignment_fg.phasealignment_fg(test_app)
+ fg.start()
+ results = run_test(fg, args.runs)
+ fg.stop()
+ fg.wait()
+ if args.plot:
+ plot_results(results)
+ print_results(results)
+ else:
+ fg = phasealignment_fg.phasealignment_fg(test_app)
+ fg.start()
+ results = run_test(fg, args.runs)
+ fg.stop()
+ fg.wait()
+ if args.plot:
+ plot_results(results)
+ print_results(results)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/gr-usrptest/apps/usrp_selftest.py b/tools/gr-usrptest/apps/usrp_selftest.py
new file mode 100755
index 000000000..3dcd4e6ca
--- /dev/null
+++ b/tools/gr-usrptest/apps/usrp_selftest.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+
+import argparse
+from usrptest import parsers
+from usrptest.flowgraphs import selftest_fg
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser = parsers.add_core_args(parser)
+ parser = parsers.add_selftest_args(parser)
+ args = parser.parse_args()
+ my_flowgraph = selftest_fg.selftest_fg(args.frequency, args.samp_rate, args.dphase ,args.devices)
+ results = my_flowgraph.run()
+ print(results)
+if __name__ == '__main__':
+ main()
diff --git a/tools/gr-usrptest/cmake/Modules/CMakeParseArgumentsCopy.cmake b/tools/gr-usrptest/cmake/Modules/CMakeParseArgumentsCopy.cmake
new file mode 100644
index 000000000..7ce4c49ae
--- /dev/null
+++ b/tools/gr-usrptest/cmake/Modules/CMakeParseArgumentsCopy.cmake
@@ -0,0 +1,138 @@
+# CMAKE_PARSE_ARGUMENTS(<prefix> <options> <one_value_keywords> <multi_value_keywords> args...)
+#
+# CMAKE_PARSE_ARGUMENTS() is intended to be used in macros or functions for
+# parsing the arguments given to that macro or function.
+# It processes the arguments and defines a set of variables which hold the
+# values of the respective options.
+#
+# The <options> argument contains all options for the respective macro,
+# i.e. keywords which can be used when calling the macro without any value
+# following, like e.g. the OPTIONAL keyword of the install() command.
+#
+# The <one_value_keywords> argument contains all keywords for this macro
+# which are followed by one value, like e.g. DESTINATION keyword of the
+# install() command.
+#
+# The <multi_value_keywords> argument contains all keywords for this macro
+# which can be followed by more than one value, like e.g. the TARGETS or
+# FILES keywords of the install() command.
+#
+# When done, CMAKE_PARSE_ARGUMENTS() will have defined for each of the
+# keywords listed in <options>, <one_value_keywords> and
+# <multi_value_keywords> a variable composed of the given <prefix>
+# followed by "_" and the name of the respective keyword.
+# These variables will then hold the respective value from the argument list.
+# For the <options> keywords this will be TRUE or FALSE.
+#
+# All remaining arguments are collected in a variable
+# <prefix>_UNPARSED_ARGUMENTS, this can be checked afterwards to see whether
+# your macro was called with unrecognized parameters.
+#
+# As an example here a my_install() macro, which takes similar arguments as the
+# real install() command:
+#
+# function(MY_INSTALL)
+# set(options OPTIONAL FAST)
+# set(oneValueArgs DESTINATION RENAME)
+# set(multiValueArgs TARGETS CONFIGURATIONS)
+# cmake_parse_arguments(MY_INSTALL "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN} )
+# ...
+#
+# Assume my_install() has been called like this:
+# my_install(TARGETS foo bar DESTINATION bin OPTIONAL blub)
+#
+# After the cmake_parse_arguments() call the macro will have set the following
+# variables:
+# MY_INSTALL_OPTIONAL = TRUE
+# MY_INSTALL_FAST = FALSE (this option was not used when calling my_install()
+# MY_INSTALL_DESTINATION = "bin"
+# MY_INSTALL_RENAME = "" (was not used)
+# MY_INSTALL_TARGETS = "foo;bar"
+# MY_INSTALL_CONFIGURATIONS = "" (was not used)
+# MY_INSTALL_UNPARSED_ARGUMENTS = "blub" (no value expected after "OPTIONAL"
+#
+# You can the continue and process these variables.
+#
+# Keywords terminate lists of values, e.g. if directly after a one_value_keyword
+# another recognized keyword follows, this is interpreted as the beginning of
+# the new option.
+# E.g. my_install(TARGETS foo DESTINATION OPTIONAL) would result in
+# MY_INSTALL_DESTINATION set to "OPTIONAL", but MY_INSTALL_DESTINATION would
+# be empty and MY_INSTALL_OPTIONAL would be set to TRUE therefor.
+
+#=============================================================================
+# Copyright 2010 Alexander Neundorf <neundorf@kde.org>
+#
+# Distributed under the OSI-approved BSD License (the "License");
+# see accompanying file Copyright.txt for details.
+#
+# This software is distributed WITHOUT ANY WARRANTY; without even the
+# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the License for more information.
+#=============================================================================
+# (To distribute this file outside of CMake, substitute the full
+# License text for the above reference.)
+
+
+if(__CMAKE_PARSE_ARGUMENTS_INCLUDED)
+ return()
+endif()
+set(__CMAKE_PARSE_ARGUMENTS_INCLUDED TRUE)
+
+
+function(CMAKE_PARSE_ARGUMENTS prefix _optionNames _singleArgNames _multiArgNames)
+ # first set all result variables to empty/FALSE
+ foreach(arg_name ${_singleArgNames} ${_multiArgNames})
+ set(${prefix}_${arg_name})
+ endforeach(arg_name)
+
+ foreach(option ${_optionNames})
+ set(${prefix}_${option} FALSE)
+ endforeach(option)
+
+ set(${prefix}_UNPARSED_ARGUMENTS)
+
+ set(insideValues FALSE)
+ set(currentArgName)
+
+ # now iterate over all arguments and fill the result variables
+ foreach(currentArg ${ARGN})
+ list(FIND _optionNames "${currentArg}" optionIndex) # ... then this marks the end of the arguments belonging to this keyword
+ list(FIND _singleArgNames "${currentArg}" singleArgIndex) # ... then this marks the end of the arguments belonging to this keyword
+ list(FIND _multiArgNames "${currentArg}" multiArgIndex) # ... then this marks the end of the arguments belonging to this keyword
+
+ if(${optionIndex} EQUAL -1 AND ${singleArgIndex} EQUAL -1 AND ${multiArgIndex} EQUAL -1)
+ if(insideValues)
+ if("${insideValues}" STREQUAL "SINGLE")
+ set(${prefix}_${currentArgName} ${currentArg})
+ set(insideValues FALSE)
+ elseif("${insideValues}" STREQUAL "MULTI")
+ list(APPEND ${prefix}_${currentArgName} ${currentArg})
+ endif()
+ else(insideValues)
+ list(APPEND ${prefix}_UNPARSED_ARGUMENTS ${currentArg})
+ endif(insideValues)
+ else()
+ if(NOT ${optionIndex} EQUAL -1)
+ set(${prefix}_${currentArg} TRUE)
+ set(insideValues FALSE)
+ elseif(NOT ${singleArgIndex} EQUAL -1)
+ set(currentArgName ${currentArg})
+ set(${prefix}_${currentArgName})
+ set(insideValues "SINGLE")
+ elseif(NOT ${multiArgIndex} EQUAL -1)
+ set(currentArgName ${currentArg})
+ set(${prefix}_${currentArgName})
+ set(insideValues "MULTI")
+ endif()
+ endif()
+
+ endforeach(currentArg)
+
+ # propagate the result variables to the caller:
+ foreach(arg_name ${_singleArgNames} ${_multiArgNames} ${_optionNames})
+ set(${prefix}_${arg_name} ${${prefix}_${arg_name}} PARENT_SCOPE)
+ endforeach(arg_name)
+ set(${prefix}_UNPARSED_ARGUMENTS ${${prefix}_UNPARSED_ARGUMENTS} PARENT_SCOPE)
+
+endfunction(CMAKE_PARSE_ARGUMENTS _options _singleArgs _multiArgs)
diff --git a/tools/gr-usrptest/cmake/Modules/FindCppUnit.cmake b/tools/gr-usrptest/cmake/Modules/FindCppUnit.cmake
new file mode 100644
index 000000000..f93ade341
--- /dev/null
+++ b/tools/gr-usrptest/cmake/Modules/FindCppUnit.cmake
@@ -0,0 +1,39 @@
+# http://www.cmake.org/pipermail/cmake/2006-October/011446.html
+# Modified to use pkg config and use standard var names
+
+#
+# Find the CppUnit includes and library
+#
+# This module defines
+# CPPUNIT_INCLUDE_DIR, where to find tiff.h, etc.
+# CPPUNIT_LIBRARIES, the libraries to link against to use CppUnit.
+# CPPUNIT_FOUND, If false, do not try to use CppUnit.
+
+INCLUDE(FindPkgConfig)
+PKG_CHECK_MODULES(PC_CPPUNIT "cppunit")
+
+FIND_PATH(CPPUNIT_INCLUDE_DIRS
+ NAMES cppunit/TestCase.h
+ HINTS ${PC_CPPUNIT_INCLUDE_DIR}
+ ${CMAKE_INSTALL_PREFIX}/include
+ PATHS
+ /usr/local/include
+ /usr/include
+)
+
+FIND_LIBRARY(CPPUNIT_LIBRARIES
+ NAMES cppunit
+ HINTS ${PC_CPPUNIT_LIBDIR}
+ ${CMAKE_INSTALL_PREFIX}/lib
+ ${CMAKE_INSTALL_PREFIX}/lib64
+ PATHS
+ ${CPPUNIT_INCLUDE_DIRS}/../lib
+ /usr/local/lib
+ /usr/lib
+)
+
+LIST(APPEND CPPUNIT_LIBRARIES ${CMAKE_DL_LIBS})
+
+INCLUDE(FindPackageHandleStandardArgs)
+FIND_PACKAGE_HANDLE_STANDARD_ARGS(CPPUNIT DEFAULT_MSG CPPUNIT_LIBRARIES CPPUNIT_INCLUDE_DIRS)
+MARK_AS_ADVANCED(CPPUNIT_LIBRARIES CPPUNIT_INCLUDE_DIRS)
diff --git a/tools/gr-usrptest/cmake/Modules/FindGnuradioRuntime.cmake b/tools/gr-usrptest/cmake/Modules/FindGnuradioRuntime.cmake
new file mode 100644
index 000000000..afed684a5
--- /dev/null
+++ b/tools/gr-usrptest/cmake/Modules/FindGnuradioRuntime.cmake
@@ -0,0 +1,36 @@
+INCLUDE(FindPkgConfig)
+PKG_CHECK_MODULES(PC_GNURADIO_RUNTIME gnuradio-runtime)
+
+if(PC_GNURADIO_RUNTIME_FOUND)
+ # look for include files
+ FIND_PATH(
+ GNURADIO_RUNTIME_INCLUDE_DIRS
+ NAMES gnuradio/top_block.h
+ HINTS $ENV{GNURADIO_RUNTIME_DIR}/include
+ ${PC_GNURADIO_RUNTIME_INCLUDE_DIRS}
+ ${CMAKE_INSTALL_PREFIX}/include
+ PATHS /usr/local/include
+ /usr/include
+ )
+
+ # look for libs
+ FIND_LIBRARY(
+ GNURADIO_RUNTIME_LIBRARIES
+ NAMES gnuradio-runtime
+ HINTS $ENV{GNURADIO_RUNTIME_DIR}/lib
+ ${PC_GNURADIO_RUNTIME_LIBDIR}
+ ${CMAKE_INSTALL_PREFIX}/lib/
+ ${CMAKE_INSTALL_PREFIX}/lib64/
+ PATHS /usr/local/lib
+ /usr/local/lib64
+ /usr/lib
+ /usr/lib64
+ )
+
+ set(GNURADIO_RUNTIME_FOUND ${PC_GNURADIO_RUNTIME_FOUND})
+endif(PC_GNURADIO_RUNTIME_FOUND)
+
+INCLUDE(FindPackageHandleStandardArgs)
+# do not check GNURADIO_RUNTIME_INCLUDE_DIRS, is not set when default include path us used.
+FIND_PACKAGE_HANDLE_STANDARD_ARGS(GNURADIO_RUNTIME DEFAULT_MSG GNURADIO_RUNTIME_LIBRARIES)
+MARK_AS_ADVANCED(GNURADIO_RUNTIME_LIBRARIES GNURADIO_RUNTIME_INCLUDE_DIRS)
diff --git a/tools/gr-usrptest/cmake/Modules/GrMiscUtils.cmake b/tools/gr-usrptest/cmake/Modules/GrMiscUtils.cmake
new file mode 100644
index 000000000..5bad57c51
--- /dev/null
+++ b/tools/gr-usrptest/cmake/Modules/GrMiscUtils.cmake
@@ -0,0 +1,528 @@
+# Copyright 2010-2011,2014 Free Software Foundation, Inc.
+#
+# This file is part of GNU Radio
+#
+# GNU Radio is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# GNU Radio is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Radio; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+
+if(DEFINED __INCLUDED_GR_MISC_UTILS_CMAKE)
+ return()
+endif()
+set(__INCLUDED_GR_MISC_UTILS_CMAKE TRUE)
+
+########################################################################
+# Set global variable macro.
+# Used for subdirectories to export settings.
+# Example: include and library paths.
+########################################################################
+function(GR_SET_GLOBAL var)
+ set(${var} ${ARGN} CACHE INTERNAL "" FORCE)
+endfunction(GR_SET_GLOBAL)
+
+########################################################################
+# Set the pre-processor definition if the condition is true.
+# - def the pre-processor definition to set and condition name
+########################################################################
+function(GR_ADD_COND_DEF def)
+ if(${def})
+ add_definitions(-D${def})
+ endif(${def})
+endfunction(GR_ADD_COND_DEF)
+
+########################################################################
+# Check for a header and conditionally set a compile define.
+# - hdr the relative path to the header file
+# - def the pre-processor definition to set
+########################################################################
+function(GR_CHECK_HDR_N_DEF hdr def)
+ include(CheckIncludeFileCXX)
+ CHECK_INCLUDE_FILE_CXX(${hdr} ${def})
+ GR_ADD_COND_DEF(${def})
+endfunction(GR_CHECK_HDR_N_DEF)
+
+########################################################################
+# Include subdirectory macro.
+# Sets the CMake directory variables,
+# includes the subdirectory CMakeLists.txt,
+# resets the CMake directory variables.
+#
+# This macro includes subdirectories rather than adding them
+# so that the subdirectory can affect variables in the level above.
+# This provides a work-around for the lack of convenience libraries.
+# This way a subdirectory can append to the list of library sources.
+########################################################################
+macro(GR_INCLUDE_SUBDIRECTORY subdir)
+ #insert the current directories on the front of the list
+ list(INSERT _cmake_source_dirs 0 ${CMAKE_CURRENT_SOURCE_DIR})
+ list(INSERT _cmake_binary_dirs 0 ${CMAKE_CURRENT_BINARY_DIR})
+
+ #set the current directories to the names of the subdirs
+ set(CMAKE_CURRENT_SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/${subdir})
+ set(CMAKE_CURRENT_BINARY_DIR ${CMAKE_CURRENT_BINARY_DIR}/${subdir})
+
+ #include the subdirectory CMakeLists to run it
+ file(MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR})
+ include(${CMAKE_CURRENT_SOURCE_DIR}/CMakeLists.txt)
+
+ #reset the value of the current directories
+ list(GET _cmake_source_dirs 0 CMAKE_CURRENT_SOURCE_DIR)
+ list(GET _cmake_binary_dirs 0 CMAKE_CURRENT_BINARY_DIR)
+
+ #pop the subdir names of the front of the list
+ list(REMOVE_AT _cmake_source_dirs 0)
+ list(REMOVE_AT _cmake_binary_dirs 0)
+endmacro(GR_INCLUDE_SUBDIRECTORY)
+
+########################################################################
+# Check if a compiler flag works and conditionally set a compile define.
+# - flag the compiler flag to check for
+# - have the variable to set with result
+########################################################################
+macro(GR_ADD_CXX_COMPILER_FLAG_IF_AVAILABLE flag have)
+ include(CheckCXXCompilerFlag)
+ CHECK_CXX_COMPILER_FLAG(${flag} ${have})
+ if(${have})
+ if(${CMAKE_VERSION} VERSION_GREATER "2.8.4")
+ STRING(FIND "${CMAKE_CXX_FLAGS}" "${flag}" flag_dup)
+ if(${flag_dup} EQUAL -1)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${flag}")
+ set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${flag}")
+ endif(${flag_dup} EQUAL -1)
+ endif(${CMAKE_VERSION} VERSION_GREATER "2.8.4")
+ endif(${have})
+endmacro(GR_ADD_CXX_COMPILER_FLAG_IF_AVAILABLE)
+
+########################################################################
+# Generates the .la libtool file
+# This appears to generate libtool files that cannot be used by auto*.
+# Usage GR_LIBTOOL(TARGET [target] DESTINATION [dest])
+# Notice: there is not COMPONENT option, these will not get distributed.
+########################################################################
+function(GR_LIBTOOL)
+ if(NOT DEFINED GENERATE_LIBTOOL)
+ set(GENERATE_LIBTOOL OFF) #disabled by default
+ endif()
+
+ if(GENERATE_LIBTOOL)
+ include(CMakeParseArgumentsCopy)
+ CMAKE_PARSE_ARGUMENTS(GR_LIBTOOL "" "TARGET;DESTINATION" "" ${ARGN})
+
+ find_program(LIBTOOL libtool)
+ if(LIBTOOL)
+ include(CMakeMacroLibtoolFile)
+ CREATE_LIBTOOL_FILE(${GR_LIBTOOL_TARGET} /${GR_LIBTOOL_DESTINATION})
+ endif(LIBTOOL)
+ endif(GENERATE_LIBTOOL)
+
+endfunction(GR_LIBTOOL)
+
+########################################################################
+# Do standard things to the library target
+# - set target properties
+# - make install rules
+# Also handle gnuradio custom naming conventions w/ extras mode.
+########################################################################
+function(GR_LIBRARY_FOO target)
+ #parse the arguments for component names
+ include(CMakeParseArgumentsCopy)
+ CMAKE_PARSE_ARGUMENTS(GR_LIBRARY "" "RUNTIME_COMPONENT;DEVEL_COMPONENT" "" ${ARGN})
+
+ #set additional target properties
+ set_target_properties(${target} PROPERTIES SOVERSION ${LIBVER})
+
+ #install the generated files like so...
+ install(TARGETS ${target}
+ LIBRARY DESTINATION ${GR_LIBRARY_DIR} COMPONENT ${GR_LIBRARY_RUNTIME_COMPONENT} # .so/.dylib file
+ ARCHIVE DESTINATION ${GR_LIBRARY_DIR} COMPONENT ${GR_LIBRARY_DEVEL_COMPONENT} # .lib file
+ RUNTIME DESTINATION ${GR_RUNTIME_DIR} COMPONENT ${GR_LIBRARY_RUNTIME_COMPONENT} # .dll file
+ )
+
+ #extras mode enabled automatically on linux
+ if(NOT DEFINED LIBRARY_EXTRAS)
+ set(LIBRARY_EXTRAS ${LINUX})
+ endif()
+
+ #special extras mode to enable alternative naming conventions
+ if(LIBRARY_EXTRAS)
+
+ #create .la file before changing props
+ GR_LIBTOOL(TARGET ${target} DESTINATION ${GR_LIBRARY_DIR})
+
+ #give the library a special name with ultra-zero soversion
+ set_target_properties(${target} PROPERTIES OUTPUT_NAME ${target}-${LIBVER} SOVERSION "0.0.0")
+ set(target_name lib${target}-${LIBVER}.so.0.0.0)
+
+ #custom command to generate symlinks
+ add_custom_command(
+ TARGET ${target}
+ POST_BUILD
+ COMMAND ${CMAKE_COMMAND} -E create_symlink ${target_name} ${CMAKE_CURRENT_BINARY_DIR}/lib${target}.so
+ COMMAND ${CMAKE_COMMAND} -E create_symlink ${target_name} ${CMAKE_CURRENT_BINARY_DIR}/lib${target}-${LIBVER}.so.0
+ COMMAND ${CMAKE_COMMAND} -E touch ${target_name} #so the symlinks point to something valid so cmake 2.6 will install
+ )
+
+ #and install the extra symlinks
+ install(
+ FILES
+ ${CMAKE_CURRENT_BINARY_DIR}/lib${target}.so
+ ${CMAKE_CURRENT_BINARY_DIR}/lib${target}-${LIBVER}.so.0
+ DESTINATION ${GR_LIBRARY_DIR} COMPONENT ${GR_LIBRARY_RUNTIME_COMPONENT}
+ )
+
+ endif(LIBRARY_EXTRAS)
+endfunction(GR_LIBRARY_FOO)
+
+########################################################################
+# Create a dummy custom command that depends on other targets.
+# Usage:
+# GR_GEN_TARGET_DEPS(unique_name target_deps <target1> <target2> ...)
+# ADD_CUSTOM_COMMAND(<the usual args> ${target_deps})
+#
+# Custom command cant depend on targets, but can depend on executables,
+# and executables can depend on targets. So this is the process:
+########################################################################
+function(GR_GEN_TARGET_DEPS name var)
+ file(
+ WRITE ${CMAKE_CURRENT_BINARY_DIR}/${name}.cpp.in
+ "int main(void){return 0;}\n"
+ )
+ execute_process(
+ COMMAND ${CMAKE_COMMAND} -E copy_if_different
+ ${CMAKE_CURRENT_BINARY_DIR}/${name}.cpp.in
+ ${CMAKE_CURRENT_BINARY_DIR}/${name}.cpp
+ )
+ add_executable(${name} ${CMAKE_CURRENT_BINARY_DIR}/${name}.cpp)
+ if(ARGN)
+ add_dependencies(${name} ${ARGN})
+ endif(ARGN)
+
+ if(CMAKE_CROSSCOMPILING)
+ set(${var} "DEPENDS;${name}" PARENT_SCOPE) #cant call command when cross
+ else()
+ set(${var} "DEPENDS;${name};COMMAND;${name}" PARENT_SCOPE)
+ endif()
+endfunction(GR_GEN_TARGET_DEPS)
+
+########################################################################
+# Control use of gr_logger
+# Usage:
+# GR_LOGGING()
+#
+# Will set ENABLE_GR_LOG to 1 by default.
+# Can manually set with -DENABLE_GR_LOG=0|1
+########################################################################
+function(GR_LOGGING)
+ find_package(Log4cpp)
+
+ OPTION(ENABLE_GR_LOG "Use gr_logger" ON)
+ if(ENABLE_GR_LOG)
+ # If gr_logger is enabled, make it usable
+ add_definitions( -DENABLE_GR_LOG )
+
+ # also test LOG4CPP; if we have it, use this version of the logger
+ # otherwise, default to the stdout/stderr model.
+ if(LOG4CPP_FOUND)
+ SET(HAVE_LOG4CPP True CACHE INTERNAL "" FORCE)
+ add_definitions( -DHAVE_LOG4CPP )
+ else(not LOG4CPP_FOUND)
+ SET(HAVE_LOG4CPP False CACHE INTERNAL "" FORCE)
+ SET(LOG4CPP_INCLUDE_DIRS "" CACHE INTERNAL "" FORCE)
+ SET(LOG4CPP_LIBRARY_DIRS "" CACHE INTERNAL "" FORCE)
+ SET(LOG4CPP_LIBRARIES "" CACHE INTERNAL "" FORCE)
+ endif(LOG4CPP_FOUND)
+
+ SET(ENABLE_GR_LOG ${ENABLE_GR_LOG} CACHE INTERNAL "" FORCE)
+
+ else(ENABLE_GR_LOG)
+ SET(HAVE_LOG4CPP False CACHE INTERNAL "" FORCE)
+ SET(LOG4CPP_INCLUDE_DIRS "" CACHE INTERNAL "" FORCE)
+ SET(LOG4CPP_LIBRARY_DIRS "" CACHE INTERNAL "" FORCE)
+ SET(LOG4CPP_LIBRARIES "" CACHE INTERNAL "" FORCE)
+ endif(ENABLE_GR_LOG)
+
+ message(STATUS "ENABLE_GR_LOG set to ${ENABLE_GR_LOG}.")
+ message(STATUS "HAVE_LOG4CPP set to ${HAVE_LOG4CPP}.")
+ message(STATUS "LOG4CPP_LIBRARIES set to ${LOG4CPP_LIBRARIES}.")
+
+endfunction(GR_LOGGING)
+
+########################################################################
+# Run GRCC to compile .grc files into .py files.
+#
+# Usage: GRCC(filename, directory)
+# - filenames: List of file name of .grc file
+# - directory: directory of built .py file - usually in
+# ${CMAKE_CURRENT_BINARY_DIR}
+# - Sets PYFILES: output converted GRC file names to Python files.
+########################################################################
+function(GRCC)
+ # Extract directory from list of args, remove it for the list of filenames.
+ list(GET ARGV -1 directory)
+ list(REMOVE_AT ARGV -1)
+ set(filenames ${ARGV})
+ file(MAKE_DIRECTORY ${directory})
+
+ SET(GRCC_COMMAND ${CMAKE_SOURCE_DIR}/gr-utils/python/grcc)
+
+ # GRCC uses some stuff in grc and gnuradio-runtime, so we force
+ # the known paths here
+ list(APPEND PYTHONPATHS
+ ${CMAKE_SOURCE_DIR}
+ ${CMAKE_SOURCE_DIR}/gnuradio-runtime/python
+ ${CMAKE_SOURCE_DIR}/gnuradio-runtime/lib/swig
+ ${CMAKE_BINARY_DIR}/gnuradio-runtime/lib/swig
+ )
+
+ if(WIN32)
+ #SWIG generates the python library files into a subdirectory.
+ #Therefore, we must append this subdirectory into PYTHONPATH.
+ #Only do this for the python directories matching the following:
+ foreach(pydir ${PYTHONPATHS})
+ get_filename_component(name ${pydir} NAME)
+ if(name MATCHES "^(swig|lib|src)$")
+ list(APPEND PYTHONPATHS ${pydir}/${CMAKE_BUILD_TYPE})
+ endif()
+ endforeach(pydir)
+ endif(WIN32)
+
+ file(TO_NATIVE_PATH "${PYTHONPATHS}" pypath)
+
+ if(UNIX)
+ list(APPEND pypath "$PYTHONPATH")
+ string(REPLACE ";" ":" pypath "${pypath}")
+ set(ENV{PYTHONPATH} ${pypath})
+ endif(UNIX)
+
+ if(WIN32)
+ list(APPEND pypath "%PYTHONPATH%")
+ string(REPLACE ";" "\\;" pypath "${pypath}")
+ #list(APPEND environs "PYTHONPATH=${pypath}")
+ set(ENV{PYTHONPATH} ${pypath})
+ endif(WIN32)
+
+ foreach(f ${filenames})
+ execute_process(
+ COMMAND ${GRCC_COMMAND} -d ${directory} ${f}
+ )
+ string(REPLACE ".grc" ".py" pyfile "${f}")
+ string(REPLACE "${CMAKE_CURRENT_SOURCE_DIR}" "${CMAKE_CURRENT_BINARY_DIR}" pyfile "${pyfile}")
+ list(APPEND pyfiles ${pyfile})
+ endforeach(f)
+
+ set(PYFILES ${pyfiles} PARENT_SCOPE)
+endfunction(GRCC)
+
+########################################################################
+# Check if HAVE_PTHREAD_SETSCHEDPARAM and HAVE_SCHED_SETSCHEDULER
+# should be defined
+########################################################################
+macro(GR_CHECK_LINUX_SCHED_AVAIL)
+set(CMAKE_REQUIRED_LIBRARIES -lpthread)
+ CHECK_CXX_SOURCE_COMPILES("
+ #include <pthread.h>
+ int main(){
+ pthread_t pthread;
+ pthread_setschedparam(pthread, 0, 0);
+ return 0;
+ } " HAVE_PTHREAD_SETSCHEDPARAM
+ )
+ GR_ADD_COND_DEF(HAVE_PTHREAD_SETSCHEDPARAM)
+
+ CHECK_CXX_SOURCE_COMPILES("
+ #include <sched.h>
+ int main(){
+ pid_t pid;
+ sched_setscheduler(pid, 0, 0);
+ return 0;
+ } " HAVE_SCHED_SETSCHEDULER
+ )
+ GR_ADD_COND_DEF(HAVE_SCHED_SETSCHEDULER)
+endmacro(GR_CHECK_LINUX_SCHED_AVAIL)
+
+########################################################################
+# Macros to generate source and header files from template
+########################################################################
+macro(GR_EXPAND_X_H component root)
+
+ include(GrPython)
+
+ file(WRITE ${CMAKE_CURRENT_BINARY_DIR}/generate_helper.py
+"#!${PYTHON_EXECUTABLE}
+
+import sys, os, re
+sys.path.append('${GR_RUNTIME_PYTHONPATH}')
+sys.path.append('${CMAKE_SOURCE_DIR}/python')
+os.environ['srcdir'] = '${CMAKE_CURRENT_SOURCE_DIR}'
+os.chdir('${CMAKE_CURRENT_BINARY_DIR}')
+
+if __name__ == '__main__':
+ import build_utils
+ root, inp = sys.argv[1:3]
+ for sig in sys.argv[3:]:
+ name = re.sub ('X+', sig, root)
+ d = build_utils.standard_dict2(name, sig, '${component}')
+ build_utils.expand_template(d, inp)
+")
+
+ #make a list of all the generated headers
+ unset(expanded_files_h)
+ foreach(sig ${ARGN})
+ string(REGEX REPLACE "X+" ${sig} name ${root})
+ list(APPEND expanded_files_h ${CMAKE_CURRENT_BINARY_DIR}/${name}.h)
+ endforeach(sig)
+ unset(name)
+
+ #create a command to generate the headers
+ add_custom_command(
+ OUTPUT ${expanded_files_h}
+ DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/${root}.h.t
+ COMMAND ${PYTHON_EXECUTABLE} ${PYTHON_DASH_B}
+ ${CMAKE_CURRENT_BINARY_DIR}/generate_helper.py
+ ${root} ${root}.h.t ${ARGN}
+ )
+
+ #install rules for the generated headers
+ list(APPEND generated_includes ${expanded_files_h})
+
+endmacro(GR_EXPAND_X_H)
+
+macro(GR_EXPAND_X_CC_H component root)
+
+ include(GrPython)
+
+ file(WRITE ${CMAKE_CURRENT_BINARY_DIR}/generate_helper.py
+"#!${PYTHON_EXECUTABLE}
+
+import sys, os, re
+sys.path.append('${GR_RUNTIME_PYTHONPATH}')
+sys.path.append('${CMAKE_SOURCE_DIR}/python')
+os.environ['srcdir'] = '${CMAKE_CURRENT_SOURCE_DIR}'
+os.chdir('${CMAKE_CURRENT_BINARY_DIR}')
+
+if __name__ == '__main__':
+ import build_utils
+ root, inp = sys.argv[1:3]
+ for sig in sys.argv[3:]:
+ name = re.sub ('X+', sig, root)
+ d = build_utils.standard_impl_dict2(name, sig, '${component}')
+ build_utils.expand_template(d, inp)
+")
+
+ #make a list of all the generated files
+ unset(expanded_files_cc)
+ unset(expanded_files_h)
+ foreach(sig ${ARGN})
+ string(REGEX REPLACE "X+" ${sig} name ${root})
+ list(APPEND expanded_files_cc ${CMAKE_CURRENT_BINARY_DIR}/${name}.cc)
+ list(APPEND expanded_files_h ${CMAKE_CURRENT_BINARY_DIR}/${name}.h)
+ endforeach(sig)
+ unset(name)
+
+ #create a command to generate the source files
+ add_custom_command(
+ OUTPUT ${expanded_files_cc}
+ DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/${root}.cc.t
+ COMMAND ${PYTHON_EXECUTABLE} ${PYTHON_DASH_B}
+ ${CMAKE_CURRENT_BINARY_DIR}/generate_helper.py
+ ${root} ${root}.cc.t ${ARGN}
+ )
+
+ #create a command to generate the header files
+ add_custom_command(
+ OUTPUT ${expanded_files_h}
+ DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/${root}.h.t
+ COMMAND ${PYTHON_EXECUTABLE} ${PYTHON_DASH_B}
+ ${CMAKE_CURRENT_BINARY_DIR}/generate_helper.py
+ ${root} ${root}.h.t ${ARGN}
+ )
+
+ #make source files depends on headers to force generation
+ set_source_files_properties(${expanded_files_cc}
+ PROPERTIES OBJECT_DEPENDS "${expanded_files_h}"
+ )
+
+ #install rules for the generated files
+ list(APPEND generated_sources ${expanded_files_cc})
+ list(APPEND generated_headers ${expanded_files_h})
+
+endmacro(GR_EXPAND_X_CC_H)
+
+macro(GR_EXPAND_X_CC_H_IMPL component root)
+
+ include(GrPython)
+
+ file(WRITE ${CMAKE_CURRENT_BINARY_DIR}/generate_helper.py
+"#!${PYTHON_EXECUTABLE}
+
+import sys, os, re
+sys.path.append('${GR_RUNTIME_PYTHONPATH}')
+sys.path.append('${CMAKE_SOURCE_DIR}/python')
+os.environ['srcdir'] = '${CMAKE_CURRENT_SOURCE_DIR}'
+os.chdir('${CMAKE_CURRENT_BINARY_DIR}')
+
+if __name__ == '__main__':
+ import build_utils
+ root, inp = sys.argv[1:3]
+ for sig in sys.argv[3:]:
+ name = re.sub ('X+', sig, root)
+ d = build_utils.standard_dict(name, sig, '${component}')
+ build_utils.expand_template(d, inp, '_impl')
+")
+
+ #make a list of all the generated files
+ unset(expanded_files_cc_impl)
+ unset(expanded_files_h_impl)
+ unset(expanded_files_h)
+ foreach(sig ${ARGN})
+ string(REGEX REPLACE "X+" ${sig} name ${root})
+ list(APPEND expanded_files_cc_impl ${CMAKE_CURRENT_BINARY_DIR}/${name}_impl.cc)
+ list(APPEND expanded_files_h_impl ${CMAKE_CURRENT_BINARY_DIR}/${name}_impl.h)
+ list(APPEND expanded_files_h ${CMAKE_CURRENT_BINARY_DIR}/../include/gnuradio/${component}/${name}.h)
+ endforeach(sig)
+ unset(name)
+
+ #create a command to generate the _impl.cc files
+ add_custom_command(
+ OUTPUT ${expanded_files_cc_impl}
+ DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/${root}_impl.cc.t
+ COMMAND ${PYTHON_EXECUTABLE} ${PYTHON_DASH_B}
+ ${CMAKE_CURRENT_BINARY_DIR}/generate_helper.py
+ ${root} ${root}_impl.cc.t ${ARGN}
+ )
+
+ #create a command to generate the _impl.h files
+ add_custom_command(
+ OUTPUT ${expanded_files_h_impl}
+ DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/${root}_impl.h.t
+ COMMAND ${PYTHON_EXECUTABLE} ${PYTHON_DASH_B}
+ ${CMAKE_CURRENT_BINARY_DIR}/generate_helper.py
+ ${root} ${root}_impl.h.t ${ARGN}
+ )
+
+ #make _impl.cc source files depend on _impl.h to force generation
+ set_source_files_properties(${expanded_files_cc_impl}
+ PROPERTIES OBJECT_DEPENDS "${expanded_files_h_impl}"
+ )
+
+ #make _impl.h source files depend on headers to force generation
+ set_source_files_properties(${expanded_files_h_impl}
+ PROPERTIES OBJECT_DEPENDS "${expanded_files_h}"
+ )
+
+ #install rules for the generated files
+ list(APPEND generated_sources ${expanded_files_cc_impl})
+ list(APPEND generated_headers ${expanded_files_h_impl})
+
+endmacro(GR_EXPAND_X_CC_H_IMPL)
diff --git a/tools/gr-usrptest/cmake/Modules/GrPlatform.cmake b/tools/gr-usrptest/cmake/Modules/GrPlatform.cmake
new file mode 100644
index 000000000..fbbea5fee
--- /dev/null
+++ b/tools/gr-usrptest/cmake/Modules/GrPlatform.cmake
@@ -0,0 +1,54 @@
+# Copyright 2011 Free Software Foundation, Inc.
+#
+# This file is part of GNU Radio
+#
+# GNU Radio is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# GNU Radio is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Radio; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+
+if(DEFINED __INCLUDED_GR_PLATFORM_CMAKE)
+ return()
+endif()
+set(__INCLUDED_GR_PLATFORM_CMAKE TRUE)
+
+########################################################################
+# Setup additional defines for OS types
+########################################################################
+if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
+ set(LINUX TRUE)
+endif()
+
+if(NOT CMAKE_CROSSCOMPILING AND LINUX AND EXISTS "/etc/debian_version")
+ set(DEBIAN TRUE)
+endif()
+
+if(NOT CMAKE_CROSSCOMPILING AND LINUX AND EXISTS "/etc/redhat-release")
+ set(REDHAT TRUE)
+endif()
+
+if(NOT CMAKE_CROSSCOMPILING AND LINUX AND EXISTS "/etc/slackware-version")
+ set(SLACKWARE TRUE)
+endif()
+
+########################################################################
+# when the library suffix should be 64 (applies to redhat linux family)
+########################################################################
+if (REDHAT OR SLACKWARE)
+ set(LIB64_CONVENTION TRUE)
+endif()
+
+if(NOT DEFINED LIB_SUFFIX AND LIB64_CONVENTION AND CMAKE_SYSTEM_PROCESSOR MATCHES "64$")
+ set(LIB_SUFFIX 64)
+endif()
+set(LIB_SUFFIX ${LIB_SUFFIX} CACHE STRING "lib directory suffix")
diff --git a/tools/gr-usrptest/cmake/Modules/GrPython.cmake b/tools/gr-usrptest/cmake/Modules/GrPython.cmake
new file mode 100644
index 000000000..06e061e21
--- /dev/null
+++ b/tools/gr-usrptest/cmake/Modules/GrPython.cmake
@@ -0,0 +1,241 @@
+# Copyright 2010-2011 Free Software Foundation, Inc.
+#
+# This file is part of GNU Radio
+#
+# GNU Radio is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# GNU Radio is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Radio; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+
+if(DEFINED __INCLUDED_GR_PYTHON_CMAKE)
+ return()
+endif()
+set(__INCLUDED_GR_PYTHON_CMAKE TRUE)
+
+########################################################################
+# Setup the python interpreter:
+# This allows the user to specify a specific interpreter,
+# or finds the interpreter via the built-in cmake module.
+########################################################################
+#this allows the user to override PYTHON_EXECUTABLE
+if(PYTHON_EXECUTABLE)
+
+ set(PYTHONINTERP_FOUND TRUE)
+
+#otherwise if not set, try to automatically find it
+else(PYTHON_EXECUTABLE)
+
+ #use the built-in find script
+ find_package(PythonInterp 2)
+
+ #and if that fails use the find program routine
+ if(NOT PYTHONINTERP_FOUND)
+ find_program(PYTHON_EXECUTABLE NAMES python python2 python2.7 python2.6 python2.5)
+ if(PYTHON_EXECUTABLE)
+ set(PYTHONINTERP_FOUND TRUE)
+ endif(PYTHON_EXECUTABLE)
+ endif(NOT PYTHONINTERP_FOUND)
+
+endif(PYTHON_EXECUTABLE)
+
+if (CMAKE_CROSSCOMPILING)
+ set(QA_PYTHON_EXECUTABLE "/usr/bin/python")
+else (CMAKE_CROSSCOMPILING)
+ set(QA_PYTHON_EXECUTABLE ${PYTHON_EXECUTABLE})
+endif(CMAKE_CROSSCOMPILING)
+
+#make the path to the executable appear in the cmake gui
+set(PYTHON_EXECUTABLE ${PYTHON_EXECUTABLE} CACHE FILEPATH "python interpreter")
+set(QA_PYTHON_EXECUTABLE ${QA_PYTHON_EXECUTABLE} CACHE FILEPATH "python interpreter for QA tests")
+
+#make sure we can use -B with python (introduced in 2.6)
+if(PYTHON_EXECUTABLE)
+ execute_process(
+ COMMAND ${PYTHON_EXECUTABLE} -B -c ""
+ OUTPUT_QUIET ERROR_QUIET
+ RESULT_VARIABLE PYTHON_HAS_DASH_B_RESULT
+ )
+ if(PYTHON_HAS_DASH_B_RESULT EQUAL 0)
+ set(PYTHON_DASH_B "-B")
+ endif()
+endif(PYTHON_EXECUTABLE)
+
+########################################################################
+# Check for the existence of a python module:
+# - desc a string description of the check
+# - mod the name of the module to import
+# - cmd an additional command to run
+# - have the result variable to set
+########################################################################
+macro(GR_PYTHON_CHECK_MODULE desc mod cmd have)
+ message(STATUS "")
+ message(STATUS "Python checking for ${desc}")
+ execute_process(
+ COMMAND ${PYTHON_EXECUTABLE} -c "
+#########################################
+try:
+ import ${mod}
+ assert ${cmd}
+except ImportError, AssertionError: exit(-1)
+except: pass
+#########################################"
+ RESULT_VARIABLE ${have}
+ )
+ if(${have} EQUAL 0)
+ message(STATUS "Python checking for ${desc} - found")
+ set(${have} TRUE)
+ else(${have} EQUAL 0)
+ message(STATUS "Python checking for ${desc} - not found")
+ set(${have} FALSE)
+ endif(${have} EQUAL 0)
+endmacro(GR_PYTHON_CHECK_MODULE)
+
+########################################################################
+# Sets the python installation directory GR_PYTHON_DIR
+########################################################################
+if(NOT DEFINED GR_PYTHON_DIR)
+execute_process(COMMAND ${PYTHON_EXECUTABLE} -c "
+from distutils import sysconfig
+print sysconfig.get_python_lib(plat_specific=True, prefix='')
+" OUTPUT_VARIABLE GR_PYTHON_DIR OUTPUT_STRIP_TRAILING_WHITESPACE
+)
+endif()
+file(TO_CMAKE_PATH ${GR_PYTHON_DIR} GR_PYTHON_DIR)
+
+########################################################################
+# Create an always-built target with a unique name
+# Usage: GR_UNIQUE_TARGET(<description> <dependencies list>)
+########################################################################
+function(GR_UNIQUE_TARGET desc)
+ file(RELATIVE_PATH reldir ${CMAKE_BINARY_DIR} ${CMAKE_CURRENT_BINARY_DIR})
+ execute_process(COMMAND ${PYTHON_EXECUTABLE} -c "import re, hashlib
+unique = hashlib.md5('${reldir}${ARGN}').hexdigest()[:5]
+print(re.sub('\\W', '_', '${desc} ${reldir} ' + unique))"
+ OUTPUT_VARIABLE _target OUTPUT_STRIP_TRAILING_WHITESPACE)
+ add_custom_target(${_target} ALL DEPENDS ${ARGN})
+endfunction(GR_UNIQUE_TARGET)
+
+########################################################################
+# Install python sources (also builds and installs byte-compiled python)
+########################################################################
+function(GR_PYTHON_INSTALL)
+ include(CMakeParseArgumentsCopy)
+ CMAKE_PARSE_ARGUMENTS(GR_PYTHON_INSTALL "" "DESTINATION;COMPONENT" "FILES;PROGRAMS" ${ARGN})
+
+ ####################################################################
+ if(GR_PYTHON_INSTALL_FILES)
+ ####################################################################
+ install(${ARGN}) #installs regular python files
+
+ #create a list of all generated files
+ unset(pysrcfiles)
+ unset(pycfiles)
+ unset(pyofiles)
+ foreach(pyfile ${GR_PYTHON_INSTALL_FILES})
+ get_filename_component(pyfile ${pyfile} ABSOLUTE)
+ list(APPEND pysrcfiles ${pyfile})
+
+ #determine if this file is in the source or binary directory
+ file(RELATIVE_PATH source_rel_path ${CMAKE_CURRENT_SOURCE_DIR} ${pyfile})
+ string(LENGTH "${source_rel_path}" source_rel_path_len)
+ file(RELATIVE_PATH binary_rel_path ${CMAKE_CURRENT_BINARY_DIR} ${pyfile})
+ string(LENGTH "${binary_rel_path}" binary_rel_path_len)
+
+ #and set the generated path appropriately
+ if(${source_rel_path_len} GREATER ${binary_rel_path_len})
+ set(pygenfile ${CMAKE_CURRENT_BINARY_DIR}/${binary_rel_path})
+ else()
+ set(pygenfile ${CMAKE_CURRENT_BINARY_DIR}/${source_rel_path})
+ endif()
+ list(APPEND pycfiles ${pygenfile}c)
+ list(APPEND pyofiles ${pygenfile}o)
+
+ #ensure generation path exists
+ get_filename_component(pygen_path ${pygenfile} PATH)
+ file(MAKE_DIRECTORY ${pygen_path})
+
+ endforeach(pyfile)
+
+ #the command to generate the pyc files
+ add_custom_command(
+ DEPENDS ${pysrcfiles} OUTPUT ${pycfiles}
+ COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_BINARY_DIR}/python_compile_helper.py ${pysrcfiles} ${pycfiles}
+ )
+
+ #the command to generate the pyo files
+ add_custom_command(
+ DEPENDS ${pysrcfiles} OUTPUT ${pyofiles}
+ COMMAND ${PYTHON_EXECUTABLE} -O ${CMAKE_BINARY_DIR}/python_compile_helper.py ${pysrcfiles} ${pyofiles}
+ )
+
+ #create install rule and add generated files to target list
+ set(python_install_gen_targets ${pycfiles} ${pyofiles})
+ install(FILES ${python_install_gen_targets}
+ DESTINATION ${GR_PYTHON_INSTALL_DESTINATION}
+ COMPONENT ${GR_PYTHON_INSTALL_COMPONENT}
+ )
+
+ ####################################################################
+ elseif(GR_PYTHON_INSTALL_PROGRAMS)
+ ####################################################################
+ file(TO_NATIVE_PATH ${PYTHON_EXECUTABLE} pyexe_native)
+
+ if (CMAKE_CROSSCOMPILING)
+ set(pyexe_native "/usr/bin/env python")
+ endif()
+
+ foreach(pyfile ${GR_PYTHON_INSTALL_PROGRAMS})
+ get_filename_component(pyfile_name ${pyfile} NAME)
+ get_filename_component(pyfile ${pyfile} ABSOLUTE)
+ string(REPLACE "${CMAKE_SOURCE_DIR}" "${CMAKE_BINARY_DIR}" pyexefile "${pyfile}.exe")
+ list(APPEND python_install_gen_targets ${pyexefile})
+
+ get_filename_component(pyexefile_path ${pyexefile} PATH)
+ file(MAKE_DIRECTORY ${pyexefile_path})
+
+ add_custom_command(
+ OUTPUT ${pyexefile} DEPENDS ${pyfile}
+ COMMAND ${PYTHON_EXECUTABLE} -c
+ "import re; R=re.compile('^\#!.*$\\n',flags=re.MULTILINE); open('${pyexefile}','w').write('\#!${pyexe_native}\\n'+R.sub('',open('${pyfile}','r').read()))"
+ COMMENT "Shebangin ${pyfile_name}"
+ VERBATIM
+ )
+
+ #on windows, python files need an extension to execute
+ get_filename_component(pyfile_ext ${pyfile} EXT)
+ if(WIN32 AND NOT pyfile_ext)
+ set(pyfile_name "${pyfile_name}.py")
+ endif()
+
+ install(PROGRAMS ${pyexefile} RENAME ${pyfile_name}
+ DESTINATION ${GR_PYTHON_INSTALL_DESTINATION}
+ COMPONENT ${GR_PYTHON_INSTALL_COMPONENT}
+ )
+ endforeach(pyfile)
+
+ endif()
+
+ GR_UNIQUE_TARGET("pygen" ${python_install_gen_targets})
+
+endfunction(GR_PYTHON_INSTALL)
+
+########################################################################
+# Write the python helper script that generates byte code files
+########################################################################
+file(WRITE ${CMAKE_BINARY_DIR}/python_compile_helper.py "
+import sys, py_compile
+files = sys.argv[1:]
+srcs, gens = files[:len(files)/2], files[len(files)/2:]
+for src, gen in zip(srcs, gens):
+ py_compile.compile(file=src, cfile=gen, doraise=True)
+")
diff --git a/tools/gr-usrptest/cmake/Modules/GrSwig.cmake b/tools/gr-usrptest/cmake/Modules/GrSwig.cmake
new file mode 100644
index 000000000..abf4dc461
--- /dev/null
+++ b/tools/gr-usrptest/cmake/Modules/GrSwig.cmake
@@ -0,0 +1,251 @@
+# Copyright 2010-2011 Free Software Foundation, Inc.
+#
+# This file is part of GNU Radio
+#
+# GNU Radio is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# GNU Radio is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Radio; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+
+if(DEFINED __INCLUDED_GR_SWIG_CMAKE)
+ return()
+endif()
+set(__INCLUDED_GR_SWIG_CMAKE TRUE)
+
+include(GrPython)
+
+########################################################################
+# Builds a swig documentation file to be generated into python docstrings
+# Usage: GR_SWIG_MAKE_DOCS(output_file input_path input_path....)
+#
+# Set the following variable to specify extra dependent targets:
+# - GR_SWIG_DOCS_SOURCE_DEPS
+# - GR_SWIG_DOCS_TARGET_DEPS
+########################################################################
+function(GR_SWIG_MAKE_DOCS output_file)
+ if(ENABLE_DOXYGEN)
+
+ #setup the input files variable list, quote formated
+ set(input_files)
+ unset(INPUT_PATHS)
+ foreach(input_path ${ARGN})
+ if(IS_DIRECTORY ${input_path}) #when input path is a directory
+ file(GLOB input_path_h_files ${input_path}/*.h)
+ else() #otherwise its just a file, no glob
+ set(input_path_h_files ${input_path})
+ endif()
+ list(APPEND input_files ${input_path_h_files})
+ set(INPUT_PATHS "${INPUT_PATHS} \"${input_path}\"")
+ endforeach(input_path)
+
+ #determine the output directory
+ get_filename_component(name ${output_file} NAME_WE)
+ get_filename_component(OUTPUT_DIRECTORY ${output_file} PATH)
+ set(OUTPUT_DIRECTORY ${OUTPUT_DIRECTORY}/${name}_swig_docs)
+ make_directory(${OUTPUT_DIRECTORY})
+
+ #generate the Doxyfile used by doxygen
+ configure_file(
+ ${CMAKE_SOURCE_DIR}/docs/doxygen/Doxyfile.swig_doc.in
+ ${OUTPUT_DIRECTORY}/Doxyfile
+ @ONLY)
+
+ #Create a dummy custom command that depends on other targets
+ include(GrMiscUtils)
+ GR_GEN_TARGET_DEPS(_${name}_tag tag_deps ${GR_SWIG_DOCS_TARGET_DEPS})
+
+ #call doxygen on the Doxyfile + input headers
+ add_custom_command(
+ OUTPUT ${OUTPUT_DIRECTORY}/xml/index.xml
+ DEPENDS ${input_files} ${GR_SWIG_DOCS_SOURCE_DEPS} ${tag_deps}
+ COMMAND ${DOXYGEN_EXECUTABLE} ${OUTPUT_DIRECTORY}/Doxyfile
+ COMMENT "Generating doxygen xml for ${name} docs"
+ )
+
+ #call the swig_doc script on the xml files
+ add_custom_command(
+ OUTPUT ${output_file}
+ DEPENDS ${input_files} ${stamp-file} ${OUTPUT_DIRECTORY}/xml/index.xml
+ COMMAND ${PYTHON_EXECUTABLE} ${PYTHON_DASH_B}
+ ${CMAKE_SOURCE_DIR}/docs/doxygen/swig_doc.py
+ ${OUTPUT_DIRECTORY}/xml
+ ${output_file}
+ COMMENT "Generating python docstrings for ${name}"
+ WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/docs/doxygen
+ )
+
+ else(ENABLE_DOXYGEN)
+ file(WRITE ${output_file} "\n") #no doxygen -> empty file
+ endif(ENABLE_DOXYGEN)
+endfunction(GR_SWIG_MAKE_DOCS)
+
+########################################################################
+# Build a swig target for the common gnuradio use case. Usage:
+# GR_SWIG_MAKE(target ifile ifile ifile...)
+#
+# Set the following variables before calling:
+# - GR_SWIG_FLAGS
+# - GR_SWIG_INCLUDE_DIRS
+# - GR_SWIG_LIBRARIES
+# - GR_SWIG_SOURCE_DEPS
+# - GR_SWIG_TARGET_DEPS
+# - GR_SWIG_DOC_FILE
+# - GR_SWIG_DOC_DIRS
+########################################################################
+macro(GR_SWIG_MAKE name)
+ set(ifiles ${ARGN})
+
+ # Shimming this in here to take care of a SWIG bug with handling
+ # vector<size_t> and vector<unsigned int> (on 32-bit machines) and
+ # vector<long unsigned int> (on 64-bit machines). Use this to test
+ # the size of size_t, then set SIZE_T_32 if it's a 32-bit machine
+ # or not if it's 64-bit. The logic in gr_type.i handles the rest.
+ INCLUDE(CheckTypeSize)
+ CHECK_TYPE_SIZE("size_t" SIZEOF_SIZE_T)
+ CHECK_TYPE_SIZE("unsigned int" SIZEOF_UINT)
+ if(${SIZEOF_SIZE_T} EQUAL ${SIZEOF_UINT})
+ list(APPEND GR_SWIG_FLAGS -DSIZE_T_32)
+ endif(${SIZEOF_SIZE_T} EQUAL ${SIZEOF_UINT})
+
+ #do swig doc generation if specified
+ if(GR_SWIG_DOC_FILE)
+ set(GR_SWIG_DOCS_SOURCE_DEPS ${GR_SWIG_SOURCE_DEPS})
+ list(APPEND GR_SWIG_DOCS_TARGET_DEPS ${GR_SWIG_TARGET_DEPS})
+ GR_SWIG_MAKE_DOCS(${GR_SWIG_DOC_FILE} ${GR_SWIG_DOC_DIRS})
+ add_custom_target(${name}_swig_doc DEPENDS ${GR_SWIG_DOC_FILE})
+ list(APPEND GR_SWIG_TARGET_DEPS ${name}_swig_doc ${GR_RUNTIME_SWIG_DOC_FILE})
+ endif()
+
+ #append additional include directories
+ find_package(PythonLibs 2)
+ list(APPEND GR_SWIG_INCLUDE_DIRS ${PYTHON_INCLUDE_PATH}) #deprecated name (now dirs)
+ list(APPEND GR_SWIG_INCLUDE_DIRS ${PYTHON_INCLUDE_DIRS})
+
+ #prepend local swig directories
+ list(INSERT GR_SWIG_INCLUDE_DIRS 0 ${CMAKE_CURRENT_SOURCE_DIR})
+ list(INSERT GR_SWIG_INCLUDE_DIRS 0 ${CMAKE_CURRENT_BINARY_DIR})
+
+ #determine include dependencies for swig file
+ execute_process(
+ COMMAND ${PYTHON_EXECUTABLE}
+ ${CMAKE_BINARY_DIR}/get_swig_deps.py
+ "${ifiles}" "${GR_SWIG_INCLUDE_DIRS}"
+ OUTPUT_STRIP_TRAILING_WHITESPACE
+ OUTPUT_VARIABLE SWIG_MODULE_${name}_EXTRA_DEPS
+ WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
+ )
+
+ #Create a dummy custom command that depends on other targets
+ include(GrMiscUtils)
+ GR_GEN_TARGET_DEPS(_${name}_swig_tag tag_deps ${GR_SWIG_TARGET_DEPS})
+ set(tag_file ${CMAKE_CURRENT_BINARY_DIR}/${name}.tag)
+ add_custom_command(
+ OUTPUT ${tag_file}
+ DEPENDS ${GR_SWIG_SOURCE_DEPS} ${tag_deps}
+ COMMAND ${CMAKE_COMMAND} -E touch ${tag_file}
+ )
+
+ #append the specified include directories
+ include_directories(${GR_SWIG_INCLUDE_DIRS})
+ list(APPEND SWIG_MODULE_${name}_EXTRA_DEPS ${tag_file})
+
+ #setup the swig flags with flags and include directories
+ set(CMAKE_SWIG_FLAGS -fvirtual -modern -keyword -w511 -module ${name} ${GR_SWIG_FLAGS})
+ foreach(dir ${GR_SWIG_INCLUDE_DIRS})
+ list(APPEND CMAKE_SWIG_FLAGS "-I${dir}")
+ endforeach(dir)
+
+ #set the C++ property on the swig .i file so it builds
+ set_source_files_properties(${ifiles} PROPERTIES CPLUSPLUS ON)
+
+ #setup the actual swig library target to be built
+ include(UseSWIG)
+ SWIG_ADD_MODULE(${name} python ${ifiles})
+ SWIG_LINK_LIBRARIES(${name} ${PYTHON_LIBRARIES} ${GR_SWIG_LIBRARIES})
+ if(${name} STREQUAL "runtime_swig")
+ SET_TARGET_PROPERTIES(${SWIG_MODULE_runtime_swig_REAL_NAME} PROPERTIES DEFINE_SYMBOL "gnuradio_runtime_EXPORTS")
+ endif(${name} STREQUAL "runtime_swig")
+
+endmacro(GR_SWIG_MAKE)
+
+########################################################################
+# Install swig targets generated by GR_SWIG_MAKE. Usage:
+# GR_SWIG_INSTALL(
+# TARGETS target target target...
+# [DESTINATION destination]
+# [COMPONENT component]
+# )
+########################################################################
+macro(GR_SWIG_INSTALL)
+
+ include(CMakeParseArgumentsCopy)
+ CMAKE_PARSE_ARGUMENTS(GR_SWIG_INSTALL "" "DESTINATION;COMPONENT" "TARGETS" ${ARGN})
+
+ foreach(name ${GR_SWIG_INSTALL_TARGETS})
+ install(TARGETS ${SWIG_MODULE_${name}_REAL_NAME}
+ DESTINATION ${GR_SWIG_INSTALL_DESTINATION}
+ COMPONENT ${GR_SWIG_INSTALL_COMPONENT}
+ )
+
+ include(GrPython)
+ GR_PYTHON_INSTALL(FILES ${CMAKE_CURRENT_BINARY_DIR}/${name}.py
+ DESTINATION ${GR_SWIG_INSTALL_DESTINATION}
+ COMPONENT ${GR_SWIG_INSTALL_COMPONENT}
+ )
+
+ GR_LIBTOOL(
+ TARGET ${SWIG_MODULE_${name}_REAL_NAME}
+ DESTINATION ${GR_SWIG_INSTALL_DESTINATION}
+ )
+
+ endforeach(name)
+
+endmacro(GR_SWIG_INSTALL)
+
+########################################################################
+# Generate a python file that can determine swig dependencies.
+# Used by the make macro above to determine extra dependencies.
+# When you build C++, CMake figures out the header dependencies.
+# This code essentially performs that logic for swig includes.
+########################################################################
+file(WRITE ${CMAKE_BINARY_DIR}/get_swig_deps.py "
+
+import os, sys, re
+
+i_include_matcher = re.compile('%(include|import)\\s*[<|\"](.*)[>|\"]')
+h_include_matcher = re.compile('#(include)\\s*[<|\"](.*)[>|\"]')
+include_dirs = sys.argv[2].split(';')
+
+def get_swig_incs(file_path):
+ if file_path.endswith('.i'): matcher = i_include_matcher
+ else: matcher = h_include_matcher
+ file_contents = open(file_path, 'r').read()
+ return matcher.findall(file_contents, re.MULTILINE)
+
+def get_swig_deps(file_path, level):
+ deps = [file_path]
+ if level == 0: return deps
+ for keyword, inc_file in get_swig_incs(file_path):
+ for inc_dir in include_dirs:
+ inc_path = os.path.join(inc_dir, inc_file)
+ if not os.path.exists(inc_path): continue
+ deps.extend(get_swig_deps(inc_path, level-1))
+ break #found, we dont search in lower prio inc dirs
+ return deps
+
+if __name__ == '__main__':
+ ifiles = sys.argv[1].split(';')
+ deps = sum([get_swig_deps(ifile, 3) for ifile in ifiles], [])
+ #sys.stderr.write(';'.join(set(deps)) + '\\n\\n')
+ print(';'.join(set(deps)))
+")
diff --git a/tools/gr-usrptest/cmake/Modules/GrTest.cmake b/tools/gr-usrptest/cmake/Modules/GrTest.cmake
new file mode 100644
index 000000000..62caab4b5
--- /dev/null
+++ b/tools/gr-usrptest/cmake/Modules/GrTest.cmake
@@ -0,0 +1,143 @@
+# Copyright 2010-2011 Free Software Foundation, Inc.
+#
+# This file is part of GNU Radio
+#
+# GNU Radio is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# GNU Radio is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Radio; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+
+if(DEFINED __INCLUDED_GR_TEST_CMAKE)
+ return()
+endif()
+set(__INCLUDED_GR_TEST_CMAKE TRUE)
+
+########################################################################
+# Add a unit test and setup the environment for a unit test.
+# Takes the same arguments as the ADD_TEST function.
+#
+# Before calling set the following variables:
+# GR_TEST_TARGET_DEPS - built targets for the library path
+# GR_TEST_LIBRARY_DIRS - directories for the library path
+# GR_TEST_PYTHON_DIRS - directories for the python path
+# GR_TEST_ENVIRONS - other environment key/value pairs
+########################################################################
+function(GR_ADD_TEST test_name)
+
+ #Ensure that the build exe also appears in the PATH.
+ list(APPEND GR_TEST_TARGET_DEPS ${ARGN})
+
+ #In the land of windows, all libraries must be in the PATH.
+ #Since the dependent libraries are not yet installed,
+ #we must manually set them in the PATH to run tests.
+ #The following appends the path of a target dependency.
+ foreach(target ${GR_TEST_TARGET_DEPS})
+ get_target_property(location ${target} LOCATION)
+ if(location)
+ get_filename_component(path ${location} PATH)
+ string(REGEX REPLACE "\\$\\(.*\\)" ${CMAKE_BUILD_TYPE} path ${path})
+ list(APPEND GR_TEST_LIBRARY_DIRS ${path})
+ endif(location)
+ endforeach(target)
+
+ if(WIN32)
+ #SWIG generates the python library files into a subdirectory.
+ #Therefore, we must append this subdirectory into PYTHONPATH.
+ #Only do this for the python directories matching the following:
+ foreach(pydir ${GR_TEST_PYTHON_DIRS})
+ get_filename_component(name ${pydir} NAME)
+ if(name MATCHES "^(swig|lib|src)$")
+ list(APPEND GR_TEST_PYTHON_DIRS ${pydir}/${CMAKE_BUILD_TYPE})
+ endif()
+ endforeach(pydir)
+ endif(WIN32)
+
+ file(TO_NATIVE_PATH ${CMAKE_CURRENT_SOURCE_DIR} srcdir)
+ file(TO_NATIVE_PATH "${GR_TEST_LIBRARY_DIRS}" libpath) #ok to use on dir list?
+ file(TO_NATIVE_PATH "${GR_TEST_PYTHON_DIRS}" pypath) #ok to use on dir list?
+
+ set(environs "VOLK_GENERIC=1" "GR_DONT_LOAD_PREFS=1" "srcdir=${srcdir}")
+ list(APPEND environs ${GR_TEST_ENVIRONS})
+
+ #http://www.cmake.org/pipermail/cmake/2009-May/029464.html
+ #Replaced this add test + set environs code with the shell script generation.
+ #Its nicer to be able to manually run the shell script to diagnose problems.
+ #ADD_TEST(${ARGV})
+ #SET_TESTS_PROPERTIES(${test_name} PROPERTIES ENVIRONMENT "${environs}")
+
+ if(UNIX)
+ set(LD_PATH_VAR "LD_LIBRARY_PATH")
+ if(APPLE)
+ set(LD_PATH_VAR "DYLD_LIBRARY_PATH")
+ endif()
+
+ set(binpath "${CMAKE_CURRENT_BINARY_DIR}:$PATH")
+ list(APPEND libpath "$${LD_PATH_VAR}")
+ list(APPEND pypath "$PYTHONPATH")
+
+ #replace list separator with the path separator
+ string(REPLACE ";" ":" libpath "${libpath}")
+ string(REPLACE ";" ":" pypath "${pypath}")
+ list(APPEND environs "PATH=${binpath}" "${LD_PATH_VAR}=${libpath}" "PYTHONPATH=${pypath}")
+
+ #generate a bat file that sets the environment and runs the test
+ if (CMAKE_CROSSCOMPILING)
+ set(SHELL "/bin/sh")
+ else(CMAKE_CROSSCOMPILING)
+ find_program(SHELL sh)
+ endif(CMAKE_CROSSCOMPILING)
+ set(sh_file ${CMAKE_CURRENT_BINARY_DIR}/${test_name}_test.sh)
+ file(WRITE ${sh_file} "#!${SHELL}\n")
+ #each line sets an environment variable
+ foreach(environ ${environs})
+ file(APPEND ${sh_file} "export ${environ}\n")
+ endforeach(environ)
+ #load the command to run with its arguments
+ foreach(arg ${ARGN})
+ file(APPEND ${sh_file} "${arg} ")
+ endforeach(arg)
+ file(APPEND ${sh_file} "\n")
+
+ #make the shell file executable
+ execute_process(COMMAND chmod +x ${sh_file})
+
+ add_test(${test_name} ${SHELL} ${sh_file})
+
+ endif(UNIX)
+
+ if(WIN32)
+ list(APPEND libpath ${DLL_PATHS} "%PATH%")
+ list(APPEND pypath "%PYTHONPATH%")
+
+ #replace list separator with the path separator (escaped)
+ string(REPLACE ";" "\\;" libpath "${libpath}")
+ string(REPLACE ";" "\\;" pypath "${pypath}")
+ list(APPEND environs "PATH=${libpath}" "PYTHONPATH=${pypath}")
+
+ #generate a bat file that sets the environment and runs the test
+ set(bat_file ${CMAKE_CURRENT_BINARY_DIR}/${test_name}_test.bat)
+ file(WRITE ${bat_file} "@echo off\n")
+ #each line sets an environment variable
+ foreach(environ ${environs})
+ file(APPEND ${bat_file} "SET ${environ}\n")
+ endforeach(environ)
+ #load the command to run with its arguments
+ foreach(arg ${ARGN})
+ file(APPEND ${bat_file} "${arg} ")
+ endforeach(arg)
+ file(APPEND ${bat_file} "\n")
+
+ add_test(${test_name} ${bat_file})
+ endif(WIN32)
+
+endfunction(GR_ADD_TEST)
diff --git a/tools/gr-usrptest/cmake/Modules/UseSWIG.cmake b/tools/gr-usrptest/cmake/Modules/UseSWIG.cmake
new file mode 100644
index 000000000..c0f172870
--- /dev/null
+++ b/tools/gr-usrptest/cmake/Modules/UseSWIG.cmake
@@ -0,0 +1,304 @@
+# - SWIG module for CMake
+# Defines the following macros:
+# SWIG_ADD_MODULE(name language [ files ])
+# - Define swig module with given name and specified language
+# SWIG_LINK_LIBRARIES(name [ libraries ])
+# - Link libraries to swig module
+# All other macros are for internal use only.
+# To get the actual name of the swig module,
+# use: ${SWIG_MODULE_${name}_REAL_NAME}.
+# Set Source files properties such as CPLUSPLUS and SWIG_FLAGS to specify
+# special behavior of SWIG. Also global CMAKE_SWIG_FLAGS can be used to add
+# special flags to all swig calls.
+# Another special variable is CMAKE_SWIG_OUTDIR, it allows one to specify
+# where to write all the swig generated module (swig -outdir option)
+# The name-specific variable SWIG_MODULE_<name>_EXTRA_DEPS may be used
+# to specify extra dependencies for the generated modules.
+# If the source file generated by swig need some special flag you can use
+# set_source_files_properties( ${swig_generated_file_fullname}
+# PROPERTIES COMPILE_FLAGS "-bla")
+
+
+#=============================================================================
+# Copyright 2004-2009 Kitware, Inc.
+# Copyright 2009 Mathieu Malaterre <mathieu.malaterre@gmail.com>
+#
+# Distributed under the OSI-approved BSD License (the "License");
+# see accompanying file Copyright.txt for details.
+#
+# This software is distributed WITHOUT ANY WARRANTY; without even the
+# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the License for more information.
+#=============================================================================
+# (To distribute this file outside of CMake, substitute the full
+# License text for the above reference.)
+
+set(SWIG_CXX_EXTENSION "cxx")
+set(SWIG_EXTRA_LIBRARIES "")
+
+set(SWIG_PYTHON_EXTRA_FILE_EXTENSION "py")
+
+#
+# For given swig module initialize variables associated with it
+#
+macro(SWIG_MODULE_INITIALIZE name language)
+ string(TOUPPER "${language}" swig_uppercase_language)
+ string(TOLOWER "${language}" swig_lowercase_language)
+ set(SWIG_MODULE_${name}_LANGUAGE "${swig_uppercase_language}")
+ set(SWIG_MODULE_${name}_SWIG_LANGUAGE_FLAG "${swig_lowercase_language}")
+
+ set(SWIG_MODULE_${name}_REAL_NAME "${name}")
+ if("${SWIG_MODULE_${name}_LANGUAGE}" STREQUAL "UNKNOWN")
+ message(FATAL_ERROR "SWIG Error: Language \"${language}\" not found")
+ elseif("${SWIG_MODULE_${name}_LANGUAGE}" STREQUAL "PYTHON")
+ # when swig is used without the -interface it will produce in the module.py
+ # a 'import _modulename' statement, which implies having a corresponding
+ # _modulename.so (*NIX), _modulename.pyd (Win32).
+ set(SWIG_MODULE_${name}_REAL_NAME "_${name}")
+ elseif("${SWIG_MODULE_${name}_LANGUAGE}" STREQUAL "PERL")
+ set(SWIG_MODULE_${name}_EXTRA_FLAGS "-shadow")
+ endif()
+endmacro()
+
+#
+# For a given language, input file, and output file, determine extra files that
+# will be generated. This is internal swig macro.
+#
+
+macro(SWIG_GET_EXTRA_OUTPUT_FILES language outfiles generatedpath infile)
+ set(${outfiles} "")
+ get_source_file_property(SWIG_GET_EXTRA_OUTPUT_FILES_module_basename
+ ${infile} SWIG_MODULE_NAME)
+ if(SWIG_GET_EXTRA_OUTPUT_FILES_module_basename STREQUAL "NOTFOUND")
+ get_filename_component(SWIG_GET_EXTRA_OUTPUT_FILES_module_basename "${infile}" NAME_WE)
+ endif()
+ foreach(it ${SWIG_${language}_EXTRA_FILE_EXTENSION})
+ set(${outfiles} ${${outfiles}}
+ "${generatedpath}/${SWIG_GET_EXTRA_OUTPUT_FILES_module_basename}.${it}")
+ endforeach()
+endmacro()
+
+#
+# Take swig (*.i) file and add proper custom commands for it
+#
+macro(SWIG_ADD_SOURCE_TO_MODULE name outfiles infile)
+ set(swig_full_infile ${infile})
+ get_filename_component(swig_source_file_path "${infile}" PATH)
+ get_filename_component(swig_source_file_name_we "${infile}" NAME_WE)
+ get_source_file_property(swig_source_file_generated ${infile} GENERATED)
+ get_source_file_property(swig_source_file_cplusplus ${infile} CPLUSPLUS)
+ get_source_file_property(swig_source_file_flags ${infile} SWIG_FLAGS)
+ if("${swig_source_file_flags}" STREQUAL "NOTFOUND")
+ set(swig_source_file_flags "")
+ endif()
+ set(swig_source_file_fullname "${infile}")
+ if(${swig_source_file_path} MATCHES "^${CMAKE_CURRENT_SOURCE_DIR}")
+ string(REGEX REPLACE
+ "^${CMAKE_CURRENT_SOURCE_DIR}" ""
+ swig_source_file_relative_path
+ "${swig_source_file_path}")
+ else()
+ if(${swig_source_file_path} MATCHES "^${CMAKE_CURRENT_BINARY_DIR}")
+ string(REGEX REPLACE
+ "^${CMAKE_CURRENT_BINARY_DIR}" ""
+ swig_source_file_relative_path
+ "${swig_source_file_path}")
+ set(swig_source_file_generated 1)
+ else()
+ set(swig_source_file_relative_path "${swig_source_file_path}")
+ if(swig_source_file_generated)
+ set(swig_source_file_fullname "${CMAKE_CURRENT_BINARY_DIR}/${infile}")
+ else()
+ set(swig_source_file_fullname "${CMAKE_CURRENT_SOURCE_DIR}/${infile}")
+ endif()
+ endif()
+ endif()
+
+ set(swig_generated_file_fullname
+ "${CMAKE_CURRENT_BINARY_DIR}")
+ if(swig_source_file_relative_path)
+ set(swig_generated_file_fullname
+ "${swig_generated_file_fullname}/${swig_source_file_relative_path}")
+ endif()
+ # If CMAKE_SWIG_OUTDIR was specified then pass it to -outdir
+ if(CMAKE_SWIG_OUTDIR)
+ set(swig_outdir ${CMAKE_SWIG_OUTDIR})
+ else()
+ set(swig_outdir ${CMAKE_CURRENT_BINARY_DIR})
+ endif()
+ SWIG_GET_EXTRA_OUTPUT_FILES(${SWIG_MODULE_${name}_LANGUAGE}
+ swig_extra_generated_files
+ "${swig_outdir}"
+ "${infile}")
+ set(swig_generated_file_fullname
+ "${swig_generated_file_fullname}/${swig_source_file_name_we}")
+ # add the language into the name of the file (i.e. TCL_wrap)
+ # this allows for the same .i file to be wrapped into different languages
+ set(swig_generated_file_fullname
+ "${swig_generated_file_fullname}${SWIG_MODULE_${name}_LANGUAGE}_wrap")
+
+ if(swig_source_file_cplusplus)
+ set(swig_generated_file_fullname
+ "${swig_generated_file_fullname}.${SWIG_CXX_EXTENSION}")
+ else()
+ set(swig_generated_file_fullname
+ "${swig_generated_file_fullname}.c")
+ endif()
+
+ # Shut up some warnings from poor SWIG code generation that we
+ # can do nothing about, when this flag is available
+ include(CheckCXXCompilerFlag)
+ check_cxx_compiler_flag("-Wno-unused-but-set-variable" HAVE_WNO_UNUSED_BUT_SET_VARIABLE)
+ if(HAVE_WNO_UNUSED_BUT_SET_VARIABLE)
+ set_source_files_properties(${swig_generated_file_fullname}
+ PROPERTIES COMPILE_FLAGS "-Wno-unused-but-set-variable")
+ endif(HAVE_WNO_UNUSED_BUT_SET_VARIABLE)
+
+ get_directory_property(cmake_include_directories INCLUDE_DIRECTORIES)
+ set(swig_include_dirs)
+ foreach(it ${cmake_include_directories})
+ set(swig_include_dirs ${swig_include_dirs} "-I${it}")
+ endforeach()
+
+ set(swig_special_flags)
+ # default is c, so add c++ flag if it is c++
+ if(swig_source_file_cplusplus)
+ set(swig_special_flags ${swig_special_flags} "-c++")
+ endif()
+ set(swig_extra_flags)
+ if(SWIG_MODULE_${name}_EXTRA_FLAGS)
+ set(swig_extra_flags ${swig_extra_flags} ${SWIG_MODULE_${name}_EXTRA_FLAGS})
+ endif()
+
+ # hack to work around CMake bug in add_custom_command with multiple OUTPUT files
+
+ file(RELATIVE_PATH reldir ${CMAKE_BINARY_DIR} ${CMAKE_CURRENT_BINARY_DIR})
+ execute_process(
+ COMMAND ${PYTHON_EXECUTABLE} -c "import re, hashlib
+unique = hashlib.md5('${reldir}${ARGN}').hexdigest()[:5]
+print(re.sub('\\W', '_', '${name} ${reldir} ' + unique))"
+ OUTPUT_VARIABLE _target OUTPUT_STRIP_TRAILING_WHITESPACE
+ )
+
+ file(
+ WRITE ${CMAKE_CURRENT_BINARY_DIR}/${_target}.cpp.in
+ "int main(void){return 0;}\n"
+ )
+
+ # create dummy dependencies
+ add_custom_command(
+ OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/${_target}.cpp
+ COMMAND ${CMAKE_COMMAND} -E copy
+ ${CMAKE_CURRENT_BINARY_DIR}/${_target}.cpp.in
+ ${CMAKE_CURRENT_BINARY_DIR}/${_target}.cpp
+ DEPENDS "${swig_source_file_fullname}" ${SWIG_MODULE_${name}_EXTRA_DEPS}
+ COMMENT ""
+ )
+
+ # create the dummy target
+ add_executable(${_target} ${CMAKE_CURRENT_BINARY_DIR}/${_target}.cpp)
+
+ # add a custom command to the dummy target
+ add_custom_command(
+ TARGET ${_target}
+ # Let's create the ${swig_outdir} at execution time, in case dir contains $(OutDir)
+ COMMAND ${CMAKE_COMMAND} -E make_directory ${swig_outdir}
+ COMMAND "${SWIG_EXECUTABLE}"
+ ARGS "-${SWIG_MODULE_${name}_SWIG_LANGUAGE_FLAG}"
+ ${swig_source_file_flags}
+ ${CMAKE_SWIG_FLAGS}
+ -outdir ${swig_outdir}
+ ${swig_special_flags}
+ ${swig_extra_flags}
+ ${swig_include_dirs}
+ -o "${swig_generated_file_fullname}"
+ "${swig_source_file_fullname}"
+ COMMENT "Swig source"
+ )
+
+ #add dummy independent dependencies from the _target to each file
+ #that will be generated by the SWIG command above
+
+ set(${outfiles} "${swig_generated_file_fullname}" ${swig_extra_generated_files})
+
+ foreach(swig_gen_file ${${outfiles}})
+ add_custom_command(
+ OUTPUT ${swig_gen_file}
+ COMMAND ""
+ DEPENDS ${_target}
+ COMMENT ""
+ )
+ endforeach()
+
+ set_source_files_properties(
+ ${outfiles} PROPERTIES GENERATED 1
+ )
+
+endmacro()
+
+#
+# Create Swig module
+#
+macro(SWIG_ADD_MODULE name language)
+ SWIG_MODULE_INITIALIZE(${name} ${language})
+ set(swig_dot_i_sources)
+ set(swig_other_sources)
+ foreach(it ${ARGN})
+ if(${it} MATCHES ".*\\.i$")
+ set(swig_dot_i_sources ${swig_dot_i_sources} "${it}")
+ else()
+ set(swig_other_sources ${swig_other_sources} "${it}")
+ endif()
+ endforeach()
+
+ set(swig_generated_sources)
+ foreach(it ${swig_dot_i_sources})
+ SWIG_ADD_SOURCE_TO_MODULE(${name} swig_generated_source ${it})
+ set(swig_generated_sources ${swig_generated_sources} "${swig_generated_source}")
+ endforeach()
+ get_directory_property(swig_extra_clean_files ADDITIONAL_MAKE_CLEAN_FILES)
+ set_directory_properties(PROPERTIES
+ ADDITIONAL_MAKE_CLEAN_FILES "${swig_extra_clean_files};${swig_generated_sources}")
+ add_library(${SWIG_MODULE_${name}_REAL_NAME}
+ MODULE
+ ${swig_generated_sources}
+ ${swig_other_sources})
+ string(TOLOWER "${language}" swig_lowercase_language)
+ if ("${swig_lowercase_language}" STREQUAL "java")
+ if (APPLE)
+ # In java you want:
+ # System.loadLibrary("LIBRARY");
+ # then JNI will look for a library whose name is platform dependent, namely
+ # MacOS : libLIBRARY.jnilib
+ # Windows: LIBRARY.dll
+ # Linux : libLIBRARY.so
+ set_target_properties (${SWIG_MODULE_${name}_REAL_NAME} PROPERTIES SUFFIX ".jnilib")
+ endif ()
+ endif ()
+ if ("${swig_lowercase_language}" STREQUAL "python")
+ # this is only needed for the python case where a _modulename.so is generated
+ set_target_properties(${SWIG_MODULE_${name}_REAL_NAME} PROPERTIES PREFIX "")
+ # Python extension modules on Windows must have the extension ".pyd"
+ # instead of ".dll" as of Python 2.5. Older python versions do support
+ # this suffix.
+ # http://docs.python.org/whatsnew/ports.html#SECTION0001510000000000000000
+ # <quote>
+ # Windows: .dll is no longer supported as a filename extension for extension modules.
+ # .pyd is now the only filename extension that will be searched for.
+ # </quote>
+ if(WIN32 AND NOT CYGWIN)
+ set_target_properties(${SWIG_MODULE_${name}_REAL_NAME} PROPERTIES SUFFIX ".pyd")
+ endif()
+ endif ()
+endmacro()
+
+#
+# Like TARGET_LINK_LIBRARIES but for swig modules
+#
+macro(SWIG_LINK_LIBRARIES name)
+ if(SWIG_MODULE_${name}_REAL_NAME)
+ target_link_libraries(${SWIG_MODULE_${name}_REAL_NAME} ${ARGN})
+ else()
+ message(SEND_ERROR "Cannot find Swig library \"${name}\".")
+ endif()
+endmacro()
diff --git a/tools/gr-usrptest/cmake/Modules/usrptestConfig.cmake b/tools/gr-usrptest/cmake/Modules/usrptestConfig.cmake
new file mode 100644
index 000000000..5244d466e
--- /dev/null
+++ b/tools/gr-usrptest/cmake/Modules/usrptestConfig.cmake
@@ -0,0 +1,30 @@
+INCLUDE(FindPkgConfig)
+PKG_CHECK_MODULES(PC_USRPTEST usrptest)
+
+FIND_PATH(
+ USRPTEST_INCLUDE_DIRS
+ NAMES usrptest/api.h
+ HINTS $ENV{USRPTEST_DIR}/include
+ ${PC_USRPTEST_INCLUDEDIR}
+ PATHS ${CMAKE_INSTALL_PREFIX}/include
+ /usr/local/include
+ /usr/include
+)
+
+FIND_LIBRARY(
+ USRPTEST_LIBRARIES
+ NAMES gnuradio-usrptest
+ HINTS $ENV{USRPTEST_DIR}/lib
+ ${PC_USRPTEST_LIBDIR}
+ PATHS ${CMAKE_INSTALL_PREFIX}/lib
+ ${CMAKE_INSTALL_PREFIX}/lib64
+ /usr/local/lib
+ /usr/local/lib64
+ /usr/lib
+ /usr/lib64
+)
+
+INCLUDE(FindPackageHandleStandardArgs)
+FIND_PACKAGE_HANDLE_STANDARD_ARGS(USRPTEST DEFAULT_MSG USRPTEST_LIBRARIES USRPTEST_INCLUDE_DIRS)
+MARK_AS_ADVANCED(USRPTEST_LIBRARIES USRPTEST_INCLUDE_DIRS)
+
diff --git a/tools/gr-usrptest/cmake/cmake_uninstall.cmake.in b/tools/gr-usrptest/cmake/cmake_uninstall.cmake.in
new file mode 100644
index 000000000..9ae1ae4bd
--- /dev/null
+++ b/tools/gr-usrptest/cmake/cmake_uninstall.cmake.in
@@ -0,0 +1,32 @@
+# http://www.vtk.org/Wiki/CMake_FAQ#Can_I_do_.22make_uninstall.22_with_CMake.3F
+
+IF(NOT EXISTS "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt")
+ MESSAGE(FATAL_ERROR "Cannot find install manifest: \"@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt\"")
+ENDIF(NOT EXISTS "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt")
+
+FILE(READ "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt" files)
+STRING(REGEX REPLACE "\n" ";" files "${files}")
+FOREACH(file ${files})
+ MESSAGE(STATUS "Uninstalling \"$ENV{DESTDIR}${file}\"")
+ IF(EXISTS "$ENV{DESTDIR}${file}")
+ EXEC_PROGRAM(
+ "@CMAKE_COMMAND@" ARGS "-E remove \"$ENV{DESTDIR}${file}\""
+ OUTPUT_VARIABLE rm_out
+ RETURN_VALUE rm_retval
+ )
+ IF(NOT "${rm_retval}" STREQUAL 0)
+ MESSAGE(FATAL_ERROR "Problem when removing \"$ENV{DESTDIR}${file}\"")
+ ENDIF(NOT "${rm_retval}" STREQUAL 0)
+ ELSEIF(IS_SYMLINK "$ENV{DESTDIR}${file}")
+ EXEC_PROGRAM(
+ "@CMAKE_COMMAND@" ARGS "-E remove \"$ENV{DESTDIR}${file}\""
+ OUTPUT_VARIABLE rm_out
+ RETURN_VALUE rm_retval
+ )
+ IF(NOT "${rm_retval}" STREQUAL 0)
+ MESSAGE(FATAL_ERROR "Problem when removing \"$ENV{DESTDIR}${file}\"")
+ ENDIF(NOT "${rm_retval}" STREQUAL 0)
+ ELSE(EXISTS "$ENV{DESTDIR}${file}")
+ MESSAGE(STATUS "File \"$ENV{DESTDIR}${file}\" does not exist.")
+ ENDIF(EXISTS "$ENV{DESTDIR}${file}")
+ENDFOREACH(file)
diff --git a/tools/gr-usrptest/docs/CMakeLists.txt b/tools/gr-usrptest/docs/CMakeLists.txt
new file mode 100644
index 000000000..f16fbf6db
--- /dev/null
+++ b/tools/gr-usrptest/docs/CMakeLists.txt
@@ -0,0 +1,35 @@
+# Copyright 2011 Free Software Foundation, Inc.
+#
+# This file is part of GNU Radio
+#
+# GNU Radio is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# GNU Radio is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Radio; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+
+########################################################################
+# Setup dependencies
+########################################################################
+find_package(Doxygen)
+
+########################################################################
+# Begin conditional configuration
+########################################################################
+if(ENABLE_DOXYGEN)
+
+########################################################################
+# Add subdirectories
+########################################################################
+add_subdirectory(doxygen)
+
+endif(ENABLE_DOXYGEN)
diff --git a/tools/gr-usrptest/docs/README.usrptest b/tools/gr-usrptest/docs/README.usrptest
new file mode 100644
index 000000000..512e3334b
--- /dev/null
+++ b/tools/gr-usrptest/docs/README.usrptest
@@ -0,0 +1,11 @@
+This is the usrptest-write-a-block package meant as a guide to building
+out-of-tree packages. To use the usrptest blocks, the Python namespaces
+is in 'usrptest', which is imported as:
+
+ import usrptest
+
+See the Doxygen documentation for details about the blocks available
+in this package. A quick listing of the details can be found in Python
+after importing by using:
+
+ help(usrptest)
diff --git a/tools/gr-usrptest/docs/doxygen/CMakeLists.txt b/tools/gr-usrptest/docs/doxygen/CMakeLists.txt
new file mode 100644
index 000000000..1b4479929
--- /dev/null
+++ b/tools/gr-usrptest/docs/doxygen/CMakeLists.txt
@@ -0,0 +1,52 @@
+# Copyright 2011 Free Software Foundation, Inc.
+#
+# This file is part of GNU Radio
+#
+# GNU Radio is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# GNU Radio is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Radio; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+
+########################################################################
+# Create the doxygen configuration file
+########################################################################
+file(TO_NATIVE_PATH ${CMAKE_SOURCE_DIR} top_srcdir)
+file(TO_NATIVE_PATH ${CMAKE_BINARY_DIR} top_builddir)
+file(TO_NATIVE_PATH ${CMAKE_SOURCE_DIR} abs_top_srcdir)
+file(TO_NATIVE_PATH ${CMAKE_BINARY_DIR} abs_top_builddir)
+
+set(HAVE_DOT ${DOXYGEN_DOT_FOUND})
+set(enable_html_docs YES)
+set(enable_latex_docs NO)
+set(enable_xml_docs YES)
+
+configure_file(
+ ${CMAKE_CURRENT_SOURCE_DIR}/Doxyfile.in
+ ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile
+@ONLY)
+
+set(BUILT_DIRS ${CMAKE_CURRENT_BINARY_DIR}/xml ${CMAKE_CURRENT_BINARY_DIR}/html)
+
+########################################################################
+# Make and install doxygen docs
+########################################################################
+add_custom_command(
+ OUTPUT ${BUILT_DIRS}
+ COMMAND ${DOXYGEN_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMENT "Generating documentation with doxygen"
+)
+
+add_custom_target(doxygen_target ALL DEPENDS ${BUILT_DIRS})
+
+install(DIRECTORY ${BUILT_DIRS} DESTINATION ${GR_PKG_DOC_DIR})
diff --git a/tools/gr-usrptest/docs/doxygen/Doxyfile.in b/tools/gr-usrptest/docs/doxygen/Doxyfile.in
new file mode 100644
index 000000000..2becf4b28
--- /dev/null
+++ b/tools/gr-usrptest/docs/doxygen/Doxyfile.in
@@ -0,0 +1,1922 @@
+# Doxyfile 1.8.4
+
+# This file describes the settings to be used by the documentation system
+# doxygen (www.doxygen.org) for a project.
+#
+# All text after a double hash (##) is considered a comment and is placed
+# in front of the TAG it is preceding .
+# All text after a hash (#) is considered a comment and will be ignored.
+# The format is:
+# TAG = value [value, ...]
+# For lists items can also be appended using:
+# TAG += value [value, ...]
+# Values that contain spaces should be placed between quotes (" ").
+
+#---------------------------------------------------------------------------
+# Project related configuration options
+#---------------------------------------------------------------------------
+
+# This tag specifies the encoding used for all characters in the config file
+# that follow. The default is UTF-8 which is also the encoding used for all
+# text before the first occurrence of this tag. Doxygen uses libiconv (or the
+# iconv built into libc) for the transcoding. See
+# http://www.gnu.org/software/libiconv for the list of possible encodings.
+
+DOXYFILE_ENCODING = UTF-8
+
+# The PROJECT_NAME tag is a single word (or sequence of words) that should
+# identify the project. Note that if you do not use Doxywizard you need
+# to put quotes around the project name if it contains spaces.
+
+PROJECT_NAME = "GNU Radio's USRPTEST Package"
+
+# The PROJECT_NUMBER tag can be used to enter a project or revision number.
+# This could be handy for archiving the generated documentation or
+# if some version control system is used.
+
+PROJECT_NUMBER =
+
+# Using the PROJECT_BRIEF tag one can provide an optional one line description
+# for a project that appears at the top of each page and should give viewer
+# a quick idea about the purpose of the project. Keep the description short.
+
+PROJECT_BRIEF =
+
+# With the PROJECT_LOGO tag one can specify an logo or icon that is
+# included in the documentation. The maximum height of the logo should not
+# exceed 55 pixels and the maximum width should not exceed 200 pixels.
+# Doxygen will copy the logo to the output directory.
+
+PROJECT_LOGO =
+
+# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute)
+# base path where the generated documentation will be put.
+# If a relative path is entered, it will be relative to the location
+# where doxygen was started. If left blank the current directory will be used.
+
+OUTPUT_DIRECTORY =
+
+# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create
+# 4096 sub-directories (in 2 levels) under the output directory of each output
+# format and will distribute the generated files over these directories.
+# Enabling this option can be useful when feeding doxygen a huge amount of
+# source files, where putting all generated files in the same directory would
+# otherwise cause performance problems for the file system.
+
+CREATE_SUBDIRS = NO
+
+# The OUTPUT_LANGUAGE tag is used to specify the language in which all
+# documentation generated by doxygen is written. Doxygen will use this
+# information to generate all constant output in the proper language.
+# The default language is English, other supported languages are:
+# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional,
+# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German,
+# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English
+# messages), Korean, Korean-en, Latvian, Lithuanian, Norwegian, Macedonian,
+# Persian, Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrillic,
+# Slovak, Slovene, Spanish, Swedish, Ukrainian, and Vietnamese.
+
+OUTPUT_LANGUAGE = English
+
+# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will
+# include brief member descriptions after the members that are listed in
+# the file and class documentation (similar to JavaDoc).
+# Set to NO to disable this.
+
+BRIEF_MEMBER_DESC = YES
+
+# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend
+# the brief description of a member or function before the detailed description.
+# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
+# brief descriptions will be completely suppressed.
+
+REPEAT_BRIEF = YES
+
+# This tag implements a quasi-intelligent brief description abbreviator
+# that is used to form the text in various listings. Each string
+# in this list, if found as the leading text of the brief description, will be
+# stripped from the text and the result after processing the whole list, is
+# used as the annotated text. Otherwise, the brief description is used as-is.
+# If left blank, the following values are used ("$name" is automatically
+# replaced with the name of the entity): "The $name class" "The $name widget"
+# "The $name file" "is" "provides" "specifies" "contains"
+# "represents" "a" "an" "the"
+
+ABBREVIATE_BRIEF =
+
+# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
+# Doxygen will generate a detailed section even if there is only a brief
+# description.
+
+ALWAYS_DETAILED_SEC = YES
+
+# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all
+# inherited members of a class in the documentation of that class as if those
+# members were ordinary class members. Constructors, destructors and assignment
+# operators of the base classes will not be shown.
+
+INLINE_INHERITED_MEMB = NO
+
+# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full
+# path before files name in the file list and in the header files. If set
+# to NO the shortest path that makes the file name unique will be used.
+
+FULL_PATH_NAMES = NO
+
+# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag
+# can be used to strip a user-defined part of the path. Stripping is
+# only done if one of the specified strings matches the left-hand part of
+# the path. The tag can be used to show relative paths in the file list.
+# If left blank the directory from which doxygen is run is used as the
+# path to strip. Note that you specify absolute paths here, but also
+# relative paths, which will be relative from the directory where doxygen is
+# started.
+
+STRIP_FROM_PATH =
+
+# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of
+# the path mentioned in the documentation of a class, which tells
+# the reader which header file to include in order to use a class.
+# If left blank only the name of the header file containing the class
+# definition is used. Otherwise one should specify the include paths that
+# are normally passed to the compiler using the -I flag.
+
+STRIP_FROM_INC_PATH =
+
+# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter
+# (but less readable) file names. This can be useful if your file system
+# doesn't support long names like on DOS, Mac, or CD-ROM.
+
+SHORT_NAMES = NO
+
+# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen
+# will interpret the first line (until the first dot) of a JavaDoc-style
+# comment as the brief description. If set to NO, the JavaDoc
+# comments will behave just like regular Qt-style comments
+# (thus requiring an explicit @brief command for a brief description.)
+
+JAVADOC_AUTOBRIEF = NO
+
+# If the QT_AUTOBRIEF tag is set to YES then Doxygen will
+# interpret the first line (until the first dot) of a Qt-style
+# comment as the brief description. If set to NO, the comments
+# will behave just like regular Qt-style comments (thus requiring
+# an explicit \brief command for a brief description.)
+
+QT_AUTOBRIEF = NO
+
+# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen
+# treat a multi-line C++ special comment block (i.e. a block of //! or ///
+# comments) as a brief description. This used to be the default behaviour.
+# The new default is to treat a multi-line C++ comment block as a detailed
+# description. Set this tag to YES if you prefer the old behaviour instead.
+
+MULTILINE_CPP_IS_BRIEF = YES
+
+# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented
+# member inherits the documentation from any documented member that it
+# re-implements.
+
+INHERIT_DOCS = YES
+
+# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce
+# a new page for each member. If set to NO, the documentation of a member will
+# be part of the file/class/namespace that contains it.
+
+SEPARATE_MEMBER_PAGES = NO
+
+# The TAB_SIZE tag can be used to set the number of spaces in a tab.
+# Doxygen uses this value to replace tabs by spaces in code fragments.
+
+TAB_SIZE = 8
+
+# This tag can be used to specify a number of aliases that acts
+# as commands in the documentation. An alias has the form "name=value".
+# For example adding "sideeffect=\par Side Effects:\n" will allow you to
+# put the command \sideeffect (or @sideeffect) in the documentation, which
+# will result in a user-defined paragraph with heading "Side Effects:".
+# You can put \n's in the value part of an alias to insert newlines.
+
+ALIASES =
+
+# This tag can be used to specify a number of word-keyword mappings (TCL only).
+# A mapping has the form "name=value". For example adding
+# "class=itcl::class" will allow you to use the command class in the
+# itcl::class meaning.
+
+TCL_SUBST =
+
+# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C
+# sources only. Doxygen will then generate output that is more tailored for C.
+# For instance, some of the names that are used will be different. The list
+# of all members will be omitted, etc.
+
+OPTIMIZE_OUTPUT_FOR_C = NO
+
+# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java
+# sources only. Doxygen will then generate output that is more tailored for
+# Java. For instance, namespaces will be presented as packages, qualified
+# scopes will look different, etc.
+
+OPTIMIZE_OUTPUT_JAVA = NO
+
+# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran
+# sources only. Doxygen will then generate output that is more tailored for
+# Fortran.
+
+OPTIMIZE_FOR_FORTRAN = NO
+
+# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL
+# sources. Doxygen will then generate output that is tailored for
+# VHDL.
+
+OPTIMIZE_OUTPUT_VHDL = NO
+
+# Doxygen selects the parser to use depending on the extension of the files it
+# parses. With this tag you can assign which parser to use for a given
+# extension. Doxygen has a built-in mapping, but you can override or extend it
+# using this tag. The format is ext=language, where ext is a file extension,
+# and language is one of the parsers supported by doxygen: IDL, Java,
+# Javascript, CSharp, C, C++, D, PHP, Objective-C, Python, Fortran, VHDL, C,
+# C++. For instance to make doxygen treat .inc files as Fortran files (default
+# is PHP), and .f files as C (default is Fortran), use: inc=Fortran f=C. Note
+# that for custom extensions you also need to set FILE_PATTERNS otherwise the
+# files are not read by doxygen.
+
+EXTENSION_MAPPING =
+
+# If MARKDOWN_SUPPORT is enabled (the default) then doxygen pre-processes all
+# comments according to the Markdown format, which allows for more readable
+# documentation. See http://daringfireball.net/projects/markdown/ for details.
+# The output of markdown processing is further processed by doxygen, so you
+# can mix doxygen, HTML, and XML commands with Markdown formatting.
+# Disable only in case of backward compatibilities issues.
+
+MARKDOWN_SUPPORT = YES
+
+# When enabled doxygen tries to link words that correspond to documented
+# classes, or namespaces to their corresponding documentation. Such a link can
+# be prevented in individual cases by by putting a % sign in front of the word
+# or globally by setting AUTOLINK_SUPPORT to NO.
+
+AUTOLINK_SUPPORT = YES
+
+# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want
+# to include (a tag file for) the STL sources as input, then you should
+# set this tag to YES in order to let doxygen match functions declarations and
+# definitions whose arguments contain STL classes (e.g. func(std::string); v.s.
+# func(std::string) {}). This also makes the inheritance and collaboration
+# diagrams that involve STL classes more complete and accurate.
+
+BUILTIN_STL_SUPPORT = YES
+
+# If you use Microsoft's C++/CLI language, you should set this option to YES to
+# enable parsing support.
+
+CPP_CLI_SUPPORT = NO
+
+# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only.
+# Doxygen will parse them like normal C++ but will assume all classes use public
+# instead of private inheritance when no explicit protection keyword is present.
+
+SIP_SUPPORT = NO
+
+# For Microsoft's IDL there are propget and propput attributes to indicate
+# getter and setter methods for a property. Setting this option to YES (the
+# default) will make doxygen replace the get and set methods by a property in
+# the documentation. This will only work if the methods are indeed getting or
+# setting a simple type. If this is not the case, or you want to show the
+# methods anyway, you should set this option to NO.
+
+IDL_PROPERTY_SUPPORT = YES
+
+# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
+# tag is set to YES, then doxygen will reuse the documentation of the first
+# member in the group (if any) for the other members of the group. By default
+# all members of a group must be documented explicitly.
+
+DISTRIBUTE_GROUP_DOC = NO
+
+# Set the SUBGROUPING tag to YES (the default) to allow class member groups of
+# the same type (for instance a group of public functions) to be put as a
+# subgroup of that type (e.g. under the Public Functions section). Set it to
+# NO to prevent subgrouping. Alternatively, this can be done per class using
+# the \nosubgrouping command.
+
+SUBGROUPING = YES
+
+# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and
+# unions are shown inside the group in which they are included (e.g. using
+# @ingroup) instead of on a separate page (for HTML and Man pages) or
+# section (for LaTeX and RTF).
+
+INLINE_GROUPED_CLASSES = NO
+
+# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and
+# unions with only public data fields or simple typedef fields will be shown
+# inline in the documentation of the scope in which they are defined (i.e. file,
+# namespace, or group documentation), provided this scope is documented. If set
+# to NO (the default), structs, classes, and unions are shown on a separate
+# page (for HTML and Man pages) or section (for LaTeX and RTF).
+
+INLINE_SIMPLE_STRUCTS = NO
+
+# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum
+# is documented as struct, union, or enum with the name of the typedef. So
+# typedef struct TypeS {} TypeT, will appear in the documentation as a struct
+# with name TypeT. When disabled the typedef will appear as a member of a file,
+# namespace, or class. And the struct will be named TypeS. This can typically
+# be useful for C code in case the coding convention dictates that all compound
+# types are typedef'ed and only the typedef is referenced, never the tag name.
+
+TYPEDEF_HIDES_STRUCT = NO
+
+# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This
+# cache is used to resolve symbols given their name and scope. Since this can
+# be an expensive process and often the same symbol appear multiple times in
+# the code, doxygen keeps a cache of pre-resolved symbols. If the cache is too
+# small doxygen will become slower. If the cache is too large, memory is wasted.
+# The cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid
+# range is 0..9, the default is 0, corresponding to a cache size of 2^16 = 65536
+# symbols.
+
+LOOKUP_CACHE_SIZE = 0
+
+#---------------------------------------------------------------------------
+# Build related configuration options
+#---------------------------------------------------------------------------
+
+# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in
+# documentation are documented, even if no documentation was available.
+# Private class members and static file members will be hidden unless
+# the EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES
+
+EXTRACT_ALL = YES
+
+# If the EXTRACT_PRIVATE tag is set to YES all private members of a class
+# will be included in the documentation.
+
+EXTRACT_PRIVATE = NO
+
+# If the EXTRACT_PACKAGE tag is set to YES all members with package or internal
+# scope will be included in the documentation.
+
+EXTRACT_PACKAGE = NO
+
+# If the EXTRACT_STATIC tag is set to YES all static members of a file
+# will be included in the documentation.
+
+EXTRACT_STATIC = YES
+
+# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs)
+# defined locally in source files will be included in the documentation.
+# If set to NO only classes defined in header files are included.
+
+EXTRACT_LOCAL_CLASSES = YES
+
+# This flag is only useful for Objective-C code. When set to YES local
+# methods, which are defined in the implementation section but not in
+# the interface are included in the documentation.
+# If set to NO (the default) only methods in the interface are included.
+
+EXTRACT_LOCAL_METHODS = NO
+
+# If this flag is set to YES, the members of anonymous namespaces will be
+# extracted and appear in the documentation as a namespace called
+# 'anonymous_namespace{file}', where file will be replaced with the base
+# name of the file that contains the anonymous namespace. By default
+# anonymous namespaces are hidden.
+
+EXTRACT_ANON_NSPACES = NO
+
+# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all
+# undocumented members of documented classes, files or namespaces.
+# If set to NO (the default) these members will be included in the
+# various overviews, but no documentation section is generated.
+# This option has no effect if EXTRACT_ALL is enabled.
+
+HIDE_UNDOC_MEMBERS = NO
+
+# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all
+# undocumented classes that are normally visible in the class hierarchy.
+# If set to NO (the default) these classes will be included in the various
+# overviews. This option has no effect if EXTRACT_ALL is enabled.
+
+HIDE_UNDOC_CLASSES = NO
+
+# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all
+# friend (class|struct|union) declarations.
+# If set to NO (the default) these declarations will be included in the
+# documentation.
+
+HIDE_FRIEND_COMPOUNDS = NO
+
+# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any
+# documentation blocks found inside the body of a function.
+# If set to NO (the default) these blocks will be appended to the
+# function's detailed documentation block.
+
+HIDE_IN_BODY_DOCS = NO
+
+# The INTERNAL_DOCS tag determines if documentation
+# that is typed after a \internal command is included. If the tag is set
+# to NO (the default) then the documentation will be excluded.
+# Set it to YES to include the internal documentation.
+
+INTERNAL_DOCS = NO
+
+# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate
+# file names in lower-case letters. If set to YES upper-case letters are also
+# allowed. This is useful if you have classes or files whose names only differ
+# in case and if your file system supports case sensitive file names. Windows
+# and Mac users are advised to set this option to NO.
+
+CASE_SENSE_NAMES = YES
+
+# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen
+# will show members with their full class and namespace scopes in the
+# documentation. If set to YES the scope will be hidden.
+
+HIDE_SCOPE_NAMES = NO
+
+# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen
+# will put a list of the files that are included by a file in the documentation
+# of that file.
+
+SHOW_INCLUDE_FILES = YES
+
+# If the FORCE_LOCAL_INCLUDES tag is set to YES then Doxygen
+# will list include files with double quotes in the documentation
+# rather than with sharp brackets.
+
+FORCE_LOCAL_INCLUDES = NO
+
+# If the INLINE_INFO tag is set to YES (the default) then a tag [inline]
+# is inserted in the documentation for inline members.
+
+INLINE_INFO = YES
+
+# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen
+# will sort the (detailed) documentation of file and class members
+# alphabetically by member name. If set to NO the members will appear in
+# declaration order.
+
+SORT_MEMBER_DOCS = YES
+
+# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the
+# brief documentation of file, namespace and class members alphabetically
+# by member name. If set to NO (the default) the members will appear in
+# declaration order.
+
+SORT_BRIEF_DOCS = NO
+
+# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen
+# will sort the (brief and detailed) documentation of class members so that
+# constructors and destructors are listed first. If set to NO (the default)
+# the constructors will appear in the respective orders defined by
+# SORT_MEMBER_DOCS and SORT_BRIEF_DOCS.
+# This tag will be ignored for brief docs if SORT_BRIEF_DOCS is set to NO
+# and ignored for detailed docs if SORT_MEMBER_DOCS is set to NO.
+
+SORT_MEMBERS_CTORS_1ST = NO
+
+# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the
+# hierarchy of group names into alphabetical order. If set to NO (the default)
+# the group names will appear in their defined order.
+
+SORT_GROUP_NAMES = NO
+
+# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be
+# sorted by fully-qualified names, including namespaces. If set to
+# NO (the default), the class list will be sorted only by class name,
+# not including the namespace part.
+# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.
+# Note: This option applies only to the class list, not to the
+# alphabetical list.
+
+SORT_BY_SCOPE_NAME = NO
+
+# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to
+# do proper type resolution of all parameters of a function it will reject a
+# match between the prototype and the implementation of a member function even
+# if there is only one candidate or it is obvious which candidate to choose
+# by doing a simple string match. By disabling STRICT_PROTO_MATCHING doxygen
+# will still accept a match between prototype and implementation in such cases.
+
+STRICT_PROTO_MATCHING = NO
+
+# The GENERATE_TODOLIST tag can be used to enable (YES) or
+# disable (NO) the todo list. This list is created by putting \todo
+# commands in the documentation.
+
+GENERATE_TODOLIST = NO
+
+# The GENERATE_TESTLIST tag can be used to enable (YES) or
+# disable (NO) the test list. This list is created by putting \test
+# commands in the documentation.
+
+GENERATE_TESTLIST = NO
+
+# The GENERATE_BUGLIST tag can be used to enable (YES) or
+# disable (NO) the bug list. This list is created by putting \bug
+# commands in the documentation.
+
+GENERATE_BUGLIST = NO
+
+# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or
+# disable (NO) the deprecated list. This list is created by putting
+# \deprecated commands in the documentation.
+
+GENERATE_DEPRECATEDLIST= NO
+
+# The ENABLED_SECTIONS tag can be used to enable conditional
+# documentation sections, marked by \if section-label ... \endif
+# and \cond section-label ... \endcond blocks.
+
+ENABLED_SECTIONS =
+
+# The MAX_INITIALIZER_LINES tag determines the maximum number of lines
+# the initial value of a variable or macro consists of for it to appear in
+# the documentation. If the initializer consists of more lines than specified
+# here it will be hidden. Use a value of 0 to hide initializers completely.
+# The appearance of the initializer of individual variables and macros in the
+# documentation can be controlled using \showinitializer or \hideinitializer
+# command in the documentation regardless of this setting.
+
+MAX_INITIALIZER_LINES = 30
+
+# Set the SHOW_USED_FILES tag to NO to disable the list of files generated
+# at the bottom of the documentation of classes and structs. If set to YES the
+# list will mention the files that were used to generate the documentation.
+
+SHOW_USED_FILES = YES
+
+# Set the SHOW_FILES tag to NO to disable the generation of the Files page.
+# This will remove the Files entry from the Quick Index and from the
+# Folder Tree View (if specified). The default is YES.
+
+SHOW_FILES = YES
+
+# Set the SHOW_NAMESPACES tag to NO to disable the generation of the
+# Namespaces page.
+# This will remove the Namespaces entry from the Quick Index
+# and from the Folder Tree View (if specified). The default is YES.
+
+SHOW_NAMESPACES = NO
+
+# The FILE_VERSION_FILTER tag can be used to specify a program or script that
+# doxygen should invoke to get the current version for each file (typically from
+# the version control system). Doxygen will invoke the program by executing (via
+# popen()) the command <command> <input-file>, where <command> is the value of
+# the FILE_VERSION_FILTER tag, and <input-file> is the name of an input file
+# provided by doxygen. Whatever the program writes to standard output
+# is used as the file version. See the manual for examples.
+
+FILE_VERSION_FILTER =
+
+# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed
+# by doxygen. The layout file controls the global structure of the generated
+# output files in an output format independent way. To create the layout file
+# that represents doxygen's defaults, run doxygen with the -l option.
+# You can optionally specify a file name after the option, if omitted
+# DoxygenLayout.xml will be used as the name of the layout file.
+
+LAYOUT_FILE =
+
+# The CITE_BIB_FILES tag can be used to specify one or more bib files
+# containing the references data. This must be a list of .bib files. The
+# .bib extension is automatically appended if omitted. Using this command
+# requires the bibtex tool to be installed. See also
+# http://en.wikipedia.org/wiki/BibTeX for more info. For LaTeX the style
+# of the bibliography can be controlled using LATEX_BIB_STYLE. To use this
+# feature you need bibtex and perl available in the search path. Do not use
+# file names with spaces, bibtex cannot handle them.
+
+CITE_BIB_FILES =
+
+#---------------------------------------------------------------------------
+# configuration options related to warning and progress messages
+#---------------------------------------------------------------------------
+
+# The QUIET tag can be used to turn on/off the messages that are generated
+# by doxygen. Possible values are YES and NO. If left blank NO is used.
+
+QUIET = YES
+
+# The WARNINGS tag can be used to turn on/off the warning messages that are
+# generated by doxygen. Possible values are YES and NO. If left blank
+# NO is used.
+
+WARNINGS = YES
+
+# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings
+# for undocumented members. If EXTRACT_ALL is set to YES then this flag will
+# automatically be disabled.
+
+WARN_IF_UNDOCUMENTED = YES
+
+# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for
+# potential errors in the documentation, such as not documenting some
+# parameters in a documented function, or documenting parameters that
+# don't exist or using markup commands wrongly.
+
+WARN_IF_DOC_ERROR = YES
+
+# The WARN_NO_PARAMDOC option can be enabled to get warnings for
+# functions that are documented, but have no documentation for their parameters
+# or return value. If set to NO (the default) doxygen will only warn about
+# wrong or incomplete parameter documentation, but not about the absence of
+# documentation.
+
+WARN_NO_PARAMDOC = NO
+
+# The WARN_FORMAT tag determines the format of the warning messages that
+# doxygen can produce. The string should contain the $file, $line, and $text
+# tags, which will be replaced by the file and line number from which the
+# warning originated and the warning text. Optionally the format may contain
+# $version, which will be replaced by the version of the file (if it could
+# be obtained via FILE_VERSION_FILTER)
+
+WARN_FORMAT = "$file:$line: $text "
+
+# The WARN_LOGFILE tag can be used to specify a file to which warning
+# and error messages should be written. If left blank the output is written
+# to stderr.
+
+WARN_LOGFILE =
+
+#---------------------------------------------------------------------------
+# configuration options related to the input files
+#---------------------------------------------------------------------------
+
+# The INPUT tag can be used to specify the files and/or directories that contain
+# documented source files. You may enter file names like "myfile.cpp" or
+# directories like "/usr/src/myproject". Separate the files or directories
+# with spaces.
+
+INPUT = @top_srcdir@ \
+ @top_builddir@
+
+# This tag can be used to specify the character encoding of the source files
+# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is
+# also the default input encoding. Doxygen uses libiconv (or the iconv built
+# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for
+# the list of possible encodings.
+
+INPUT_ENCODING = UTF-8
+
+# If the value of the INPUT tag contains directories, you can use the
+# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
+# and *.h) to filter out the source-files in the directories. If left
+# blank the following patterns are tested:
+# *.c *.cc *.cxx *.cpp *.c++ *.d *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh
+# *.hxx *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.dox *.py
+# *.f90 *.f *.for *.vhd *.vhdl
+
+FILE_PATTERNS = *.h \
+ *.dox
+
+# The RECURSIVE tag can be used to turn specify whether or not subdirectories
+# should be searched for input files as well. Possible values are YES and NO.
+# If left blank NO is used.
+
+RECURSIVE = YES
+
+# The EXCLUDE tag can be used to specify files and/or directories that should be
+# excluded from the INPUT source files. This way you can easily exclude a
+# subdirectory from a directory tree whose root is specified with the INPUT tag.
+# Note that relative paths are relative to the directory from which doxygen is
+# run.
+
+EXCLUDE = @abs_top_builddir@/docs/doxygen/html \
+ @abs_top_builddir@/docs/doxygen/xml \
+ @abs_top_builddir@/docs/doxygen/other/doxypy.py \
+ @abs_top_builddir@/_CPack_Packages \
+ @abs_top_srcdir@/cmake
+
+# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or
+# directories that are symbolic links (a Unix file system feature) are excluded
+# from the input.
+
+EXCLUDE_SYMLINKS = NO
+
+# If the value of the INPUT tag contains directories, you can use the
+# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
+# certain files from those directories. Note that the wildcards are matched
+# against the file with absolute path, so to exclude all test directories
+# for example use the pattern */test/*
+
+EXCLUDE_PATTERNS = */.deps/* \
+ */.libs/* \
+ */.svn/* \
+ */CVS/* \
+ */__init__.py \
+ */qa_*.cc \
+ */qa_*.h \
+ */qa_*.py
+
+# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
+# (namespaces, classes, functions, etc.) that should be excluded from the
+# output. The symbol name can be a fully qualified name, a word, or if the
+# wildcard * is used, a substring. Examples: ANamespace, AClass,
+# AClass::ANamespace, ANamespace::*Test
+
+EXCLUDE_SYMBOLS = ad9862 \
+ numpy \
+ *swig* \
+ *Swig* \
+ *my_top_block* \
+ *my_graph* \
+ *app_top_block* \
+ *am_rx_graph* \
+ *_queue_watcher_thread* \
+ *parse* \
+ *MyFrame* \
+ *MyApp* \
+ *PyObject* \
+ *wfm_rx_block* \
+ *_sptr* \
+ *debug* \
+ *wfm_rx_sca_block* \
+ *tv_rx_block* \
+ *wxapt_rx_block* \
+ *example_signal*
+
+# The EXAMPLE_PATH tag can be used to specify one or more files or
+# directories that contain example code fragments that are included (see
+# the \include command).
+
+EXAMPLE_PATH =
+
+# If the value of the EXAMPLE_PATH tag contains directories, you can use the
+# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
+# and *.h) to filter out the source-files in the directories. If left
+# blank all files are included.
+
+EXAMPLE_PATTERNS =
+
+# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
+# searched for input files to be used with the \include or \dontinclude
+# commands irrespective of the value of the RECURSIVE tag.
+# Possible values are YES and NO. If left blank NO is used.
+
+EXAMPLE_RECURSIVE = NO
+
+# The IMAGE_PATH tag can be used to specify one or more files or
+# directories that contain image that are included in the documentation (see
+# the \image command).
+
+IMAGE_PATH =
+
+# The INPUT_FILTER tag can be used to specify a program that doxygen should
+# invoke to filter for each input file. Doxygen will invoke the filter program
+# by executing (via popen()) the command <filter> <input-file>, where <filter>
+# is the value of the INPUT_FILTER tag, and <input-file> is the name of an
+# input file. Doxygen will then use the output that the filter program writes
+# to standard output.
+# If FILTER_PATTERNS is specified, this tag will be ignored.
+# Note that the filter must not add or remove lines; it is applied before the
+# code is scanned, but not when the output code is generated. If lines are added
+# or removed, the anchors will not be placed correctly.
+
+INPUT_FILTER =
+
+# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
+# basis.
+# Doxygen will compare the file name with each pattern and apply the
+# filter if there is a match.
+# The filters are a list of the form:
+# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further
+# info on how filters are used. If FILTER_PATTERNS is empty or if
+# non of the patterns match the file name, INPUT_FILTER is applied.
+
+FILTER_PATTERNS = *.py=@top_srcdir@/doc/doxygen/other/doxypy.py
+
+# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
+# INPUT_FILTER) will be used to filter the input files when producing source
+# files to browse (i.e. when SOURCE_BROWSER is set to YES).
+
+FILTER_SOURCE_FILES = NO
+
+# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file
+# pattern. A pattern will override the setting for FILTER_PATTERN (if any)
+# and it is also possible to disable source filtering for a specific pattern
+# using *.ext= (so without naming a filter). This option only has effect when
+# FILTER_SOURCE_FILES is enabled.
+
+FILTER_SOURCE_PATTERNS =
+
+# If the USE_MD_FILE_AS_MAINPAGE tag refers to the name of a markdown file that
+# is part of the input, its contents will be placed on the main page
+# (index.html). This can be useful if you have a project on for instance GitHub
+# and want reuse the introduction page also for the doxygen output.
+
+USE_MDFILE_AS_MAINPAGE =
+
+#---------------------------------------------------------------------------
+# configuration options related to source browsing
+#---------------------------------------------------------------------------
+
+# If the SOURCE_BROWSER tag is set to YES then a list of source files will
+# be generated. Documented entities will be cross-referenced with these sources.
+# Note: To get rid of all source code in the generated output, make sure also
+# VERBATIM_HEADERS is set to NO.
+
+SOURCE_BROWSER = NO
+
+# Setting the INLINE_SOURCES tag to YES will include the body
+# of functions and classes directly in the documentation.
+
+INLINE_SOURCES = NO
+
+# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct
+# doxygen to hide any special comment blocks from generated source code
+# fragments. Normal C, C++ and Fortran comments will always remain visible.
+
+STRIP_CODE_COMMENTS = NO
+
+# If the REFERENCED_BY_RELATION tag is set to YES
+# then for each documented function all documented
+# functions referencing it will be listed.
+
+REFERENCED_BY_RELATION = YES
+
+# If the REFERENCES_RELATION tag is set to YES
+# then for each documented function all documented entities
+# called/used by that function will be listed.
+
+REFERENCES_RELATION = YES
+
+# If the REFERENCES_LINK_SOURCE tag is set to YES (the default)
+# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from
+# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will
+# link to the source code.
+# Otherwise they will link to the documentation.
+
+REFERENCES_LINK_SOURCE = YES
+
+# If the USE_HTAGS tag is set to YES then the references to source code
+# will point to the HTML generated by the htags(1) tool instead of doxygen
+# built-in source browser. The htags tool is part of GNU's global source
+# tagging system (see http://www.gnu.org/software/global/global.html). You
+# will need version 4.8.6 or higher.
+
+USE_HTAGS = NO
+
+# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen
+# will generate a verbatim copy of the header file for each class for
+# which an include is specified. Set to NO to disable this.
+
+VERBATIM_HEADERS = YES
+
+#---------------------------------------------------------------------------
+# configuration options related to the alphabetical class index
+#---------------------------------------------------------------------------
+
+# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index
+# of all compounds will be generated. Enable this if the project
+# contains a lot of classes, structs, unions or interfaces.
+
+ALPHABETICAL_INDEX = YES
+
+# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then
+# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns
+# in which this list will be split (can be a number in the range [1..20])
+
+COLS_IN_ALPHA_INDEX = 5
+
+# In case all classes in a project start with a common prefix, all
+# classes will be put under the same header in the alphabetical index.
+# The IGNORE_PREFIX tag can be used to specify one or more prefixes that
+# should be ignored while generating the index headers.
+
+IGNORE_PREFIX =
+
+#---------------------------------------------------------------------------
+# configuration options related to the HTML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_HTML tag is set to YES (the default) Doxygen will
+# generate HTML output.
+
+GENERATE_HTML = @enable_html_docs@
+
+# The HTML_OUTPUT tag is used to specify where the HTML docs will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be
+# put in front of it. If left blank `html' will be used as the default path.
+
+HTML_OUTPUT = html
+
+# The HTML_FILE_EXTENSION tag can be used to specify the file extension for
+# each generated HTML page (for example: .htm,.php,.asp). If it is left blank
+# doxygen will generate files with .html extension.
+
+HTML_FILE_EXTENSION = .html
+
+# The HTML_HEADER tag can be used to specify a personal HTML header for
+# each generated HTML page. If it is left blank doxygen will generate a
+# standard header. Note that when using a custom header you are responsible
+# for the proper inclusion of any scripts and style sheets that doxygen
+# needs, which is dependent on the configuration options used.
+# It is advised to generate a default header using "doxygen -w html
+# header.html footer.html stylesheet.css YourConfigFile" and then modify
+# that header. Note that the header is subject to change so you typically
+# have to redo this when upgrading to a newer version of doxygen or when
+# changing the value of configuration settings such as GENERATE_TREEVIEW!
+
+HTML_HEADER =
+
+# The HTML_FOOTER tag can be used to specify a personal HTML footer for
+# each generated HTML page. If it is left blank doxygen will generate a
+# standard footer.
+
+HTML_FOOTER =
+
+# The HTML_STYLESHEET tag can be used to specify a user-defined cascading
+# style sheet that is used by each HTML page. It can be used to
+# fine-tune the look of the HTML output. If left blank doxygen will
+# generate a default style sheet. Note that it is recommended to use
+# HTML_EXTRA_STYLESHEET instead of this one, as it is more robust and this
+# tag will in the future become obsolete.
+
+HTML_STYLESHEET =
+
+# The HTML_EXTRA_STYLESHEET tag can be used to specify an additional
+# user-defined cascading style sheet that is included after the standard
+# style sheets created by doxygen. Using this option one can overrule
+# certain style aspects. This is preferred over using HTML_STYLESHEET
+# since it does not replace the standard style sheet and is therefor more
+# robust against future updates. Doxygen will copy the style sheet file to
+# the output directory.
+
+HTML_EXTRA_STYLESHEET =
+
+# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or
+# other source files which should be copied to the HTML output directory. Note
+# that these files will be copied to the base HTML output directory. Use the
+# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these
+# files. In the HTML_STYLESHEET file, use the file name only. Also note that
+# the files will be copied as-is; there are no commands or markers available.
+
+HTML_EXTRA_FILES =
+
+# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output.
+# Doxygen will adjust the colors in the style sheet and background images
+# according to this color. Hue is specified as an angle on a colorwheel,
+# see http://en.wikipedia.org/wiki/Hue for more information.
+# For instance the value 0 represents red, 60 is yellow, 120 is green,
+# 180 is cyan, 240 is blue, 300 purple, and 360 is red again.
+# The allowed range is 0 to 359.
+
+HTML_COLORSTYLE_HUE = 220
+
+# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of
+# the colors in the HTML output. For a value of 0 the output will use
+# grayscales only. A value of 255 will produce the most vivid colors.
+
+HTML_COLORSTYLE_SAT = 100
+
+# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to
+# the luminance component of the colors in the HTML output. Values below
+# 100 gradually make the output lighter, whereas values above 100 make
+# the output darker. The value divided by 100 is the actual gamma applied,
+# so 80 represents a gamma of 0.8, The value 220 represents a gamma of 2.2,
+# and 100 does not change the gamma.
+
+HTML_COLORSTYLE_GAMMA = 80
+
+# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML
+# page will contain the date and time when the page was generated. Setting
+# this to NO can help when comparing the output of multiple runs.
+
+HTML_TIMESTAMP = YES
+
+# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
+# documentation will contain sections that can be hidden and shown after the
+# page has loaded.
+
+HTML_DYNAMIC_SECTIONS = NO
+
+# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of
+# entries shown in the various tree structured indices initially; the user
+# can expand and collapse entries dynamically later on. Doxygen will expand
+# the tree to such a level that at most the specified number of entries are
+# visible (unless a fully collapsed tree already exceeds this amount).
+# So setting the number of entries 1 will produce a full collapsed tree by
+# default. 0 is a special value representing an infinite number of entries
+# and will result in a full expanded tree by default.
+
+HTML_INDEX_NUM_ENTRIES = 100
+
+# If the GENERATE_DOCSET tag is set to YES, additional index files
+# will be generated that can be used as input for Apple's Xcode 3
+# integrated development environment, introduced with OSX 10.5 (Leopard).
+# To create a documentation set, doxygen will generate a Makefile in the
+# HTML output directory. Running make will produce the docset in that
+# directory and running "make install" will install the docset in
+# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find
+# it at startup.
+# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html
+# for more information.
+
+GENERATE_DOCSET = NO
+
+# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the
+# feed. A documentation feed provides an umbrella under which multiple
+# documentation sets from a single provider (such as a company or product suite)
+# can be grouped.
+
+DOCSET_FEEDNAME = "Doxygen generated docs"
+
+# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that
+# should uniquely identify the documentation set bundle. This should be a
+# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen
+# will append .docset to the name.
+
+DOCSET_BUNDLE_ID = org.doxygen.Project
+
+# When GENERATE_PUBLISHER_ID tag specifies a string that should uniquely
+# identify the documentation publisher. This should be a reverse domain-name
+# style string, e.g. com.mycompany.MyDocSet.documentation.
+
+DOCSET_PUBLISHER_ID = org.doxygen.Publisher
+
+# The GENERATE_PUBLISHER_NAME tag identifies the documentation publisher.
+
+DOCSET_PUBLISHER_NAME = Publisher
+
+# If the GENERATE_HTMLHELP tag is set to YES, additional index files
+# will be generated that can be used as input for tools like the
+# Microsoft HTML help workshop to generate a compiled HTML help file (.chm)
+# of the generated HTML documentation.
+
+GENERATE_HTMLHELP = NO
+
+# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can
+# be used to specify the file name of the resulting .chm file. You
+# can add a path in front of the file if the result should not be
+# written to the html output directory.
+
+CHM_FILE =
+
+# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can
+# be used to specify the location (absolute path including file name) of
+# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run
+# the HTML help compiler on the generated index.hhp.
+
+HHC_LOCATION =
+
+# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag
+# controls if a separate .chi index file is generated (YES) or that
+# it should be included in the master .chm file (NO).
+
+GENERATE_CHI = NO
+
+# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING
+# is used to encode HtmlHelp index (hhk), content (hhc) and project file
+# content.
+
+CHM_INDEX_ENCODING =
+
+# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag
+# controls whether a binary table of contents is generated (YES) or a
+# normal table of contents (NO) in the .chm file.
+
+BINARY_TOC = NO
+
+# The TOC_EXPAND flag can be set to YES to add extra items for group members
+# to the contents of the HTML help documentation and to the tree view.
+
+TOC_EXPAND = YES
+
+# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and
+# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated
+# that can be used as input for Qt's qhelpgenerator to generate a
+# Qt Compressed Help (.qch) of the generated HTML documentation.
+
+GENERATE_QHP = NO
+
+# If the QHG_LOCATION tag is specified, the QCH_FILE tag can
+# be used to specify the file name of the resulting .qch file.
+# The path specified is relative to the HTML output folder.
+
+QCH_FILE =
+
+# The QHP_NAMESPACE tag specifies the namespace to use when generating
+# Qt Help Project output. For more information please see
+# http://doc.trolltech.com/qthelpproject.html#namespace
+
+QHP_NAMESPACE = org.doxygen.Project
+
+# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating
+# Qt Help Project output. For more information please see
+# http://doc.trolltech.com/qthelpproject.html#virtual-folders
+
+QHP_VIRTUAL_FOLDER = doc
+
+# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to
+# add. For more information please see
+# http://doc.trolltech.com/qthelpproject.html#custom-filters
+
+QHP_CUST_FILTER_NAME =
+
+# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the
+# custom filter to add. For more information please see
+# <a href="http://doc.trolltech.com/qthelpproject.html#custom-filters">
+# Qt Help Project / Custom Filters</a>.
+
+QHP_CUST_FILTER_ATTRS =
+
+# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this
+# project's
+# filter section matches.
+# <a href="http://doc.trolltech.com/qthelpproject.html#filter-attributes">
+# Qt Help Project / Filter Attributes</a>.
+
+QHP_SECT_FILTER_ATTRS =
+
+# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can
+# be used to specify the location of Qt's qhelpgenerator.
+# If non-empty doxygen will try to run qhelpgenerator on the generated
+# .qhp file.
+
+QHG_LOCATION =
+
+# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files
+# will be generated, which together with the HTML files, form an Eclipse help
+# plugin. To install this plugin and make it available under the help contents
+# menu in Eclipse, the contents of the directory containing the HTML and XML
+# files needs to be copied into the plugins directory of eclipse. The name of
+# the directory within the plugins directory should be the same as
+# the ECLIPSE_DOC_ID value. After copying Eclipse needs to be restarted before
+# the help appears.
+
+GENERATE_ECLIPSEHELP = NO
+
+# A unique identifier for the eclipse help plugin. When installing the plugin
+# the directory name containing the HTML and XML files should also have
+# this name.
+
+ECLIPSE_DOC_ID = org.doxygen.Project
+
+# The DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs)
+# at top of each HTML page. The value NO (the default) enables the index and
+# the value YES disables it. Since the tabs have the same information as the
+# navigation tree you can set this option to NO if you already set
+# GENERATE_TREEVIEW to YES.
+
+DISABLE_INDEX = YES
+
+# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index
+# structure should be generated to display hierarchical information.
+# If the tag value is set to YES, a side panel will be generated
+# containing a tree-like index structure (just like the one that
+# is generated for HTML Help). For this to work a browser that supports
+# JavaScript, DHTML, CSS and frames is required (i.e. any modern browser).
+# Windows users are probably better off using the HTML help feature.
+# Since the tree basically has the same information as the tab index you
+# could consider to set DISABLE_INDEX to NO when enabling this option.
+
+GENERATE_TREEVIEW = YES
+
+# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values
+# (range [0,1..20]) that doxygen will group on one line in the generated HTML
+# documentation. Note that a value of 0 will completely suppress the enum
+# values from appearing in the overview section.
+
+ENUM_VALUES_PER_LINE = 4
+
+# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be
+# used to set the initial width (in pixels) of the frame in which the tree
+# is shown.
+
+TREEVIEW_WIDTH = 180
+
+# When the EXT_LINKS_IN_WINDOW option is set to YES doxygen will open
+# links to external symbols imported via tag files in a separate window.
+
+EXT_LINKS_IN_WINDOW = NO
+
+# Use this tag to change the font size of Latex formulas included
+# as images in the HTML documentation. The default is 10. Note that
+# when you change the font size after a successful doxygen run you need
+# to manually remove any form_*.png images from the HTML output directory
+# to force them to be regenerated.
+
+FORMULA_FONTSIZE = 10
+
+# Use the FORMULA_TRANPARENT tag to determine whether or not the images
+# generated for formulas are transparent PNGs. Transparent PNGs are
+# not supported properly for IE 6.0, but are supported on all modern browsers.
+# Note that when changing this option you need to delete any form_*.png files
+# in the HTML output before the changes have effect.
+
+FORMULA_TRANSPARENT = YES
+
+# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax
+# (see http://www.mathjax.org) which uses client side Javascript for the
+# rendering instead of using prerendered bitmaps. Use this if you do not
+# have LaTeX installed or if you want to formulas look prettier in the HTML
+# output. When enabled you may also need to install MathJax separately and
+# configure the path to it using the MATHJAX_RELPATH option.
+
+USE_MATHJAX = NO
+
+# When MathJax is enabled you can set the default output format to be used for
+# the MathJax output. Supported types are HTML-CSS, NativeMML (i.e. MathML) and
+# SVG. The default value is HTML-CSS, which is slower, but has the best
+# compatibility.
+
+MATHJAX_FORMAT = HTML-CSS
+
+# When MathJax is enabled you need to specify the location relative to the
+# HTML output directory using the MATHJAX_RELPATH option. The destination
+# directory should contain the MathJax.js script. For instance, if the mathjax
+# directory is located at the same level as the HTML output directory, then
+# MATHJAX_RELPATH should be ../mathjax. The default value points to
+# the MathJax Content Delivery Network so you can quickly see the result without
+# installing MathJax.
+# However, it is strongly recommended to install a local
+# copy of MathJax from http://www.mathjax.org before deployment.
+
+MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest
+
+# The MATHJAX_EXTENSIONS tag can be used to specify one or MathJax extension
+# names that should be enabled during MathJax rendering.
+
+MATHJAX_EXTENSIONS =
+
+# The MATHJAX_CODEFILE tag can be used to specify a file with javascript
+# pieces of code that will be used on startup of the MathJax code.
+
+MATHJAX_CODEFILE =
+
+# When the SEARCHENGINE tag is enabled doxygen will generate a search box
+# for the HTML output. The underlying search engine uses javascript
+# and DHTML and should work on any modern browser. Note that when using
+# HTML help (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets
+# (GENERATE_DOCSET) there is already a search function so this one should
+# typically be disabled. For large projects the javascript based search engine
+# can be slow, then enabling SERVER_BASED_SEARCH may provide a better solution.
+
+SEARCHENGINE = NO
+
+# When the SERVER_BASED_SEARCH tag is enabled the search engine will be
+# implemented using a web server instead of a web client using Javascript.
+# There are two flavours of web server based search depending on the
+# EXTERNAL_SEARCH setting. When disabled, doxygen will generate a PHP script for
+# searching and an index file used by the script. When EXTERNAL_SEARCH is
+# enabled the indexing and searching needs to be provided by external tools.
+# See the manual for details.
+
+SERVER_BASED_SEARCH = NO
+
+# When EXTERNAL_SEARCH is enabled doxygen will no longer generate the PHP
+# script for searching. Instead the search results are written to an XML file
+# which needs to be processed by an external indexer. Doxygen will invoke an
+# external search engine pointed to by the SEARCHENGINE_URL option to obtain
+# the search results. Doxygen ships with an example indexer (doxyindexer) and
+# search engine (doxysearch.cgi) which are based on the open source search
+# engine library Xapian. See the manual for configuration details.
+
+EXTERNAL_SEARCH = NO
+
+# The SEARCHENGINE_URL should point to a search engine hosted by a web server
+# which will returned the search results when EXTERNAL_SEARCH is enabled.
+# Doxygen ships with an example search engine (doxysearch) which is based on
+# the open source search engine library Xapian. See the manual for configuration
+# details.
+
+SEARCHENGINE_URL =
+
+# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the unindexed
+# search data is written to a file for indexing by an external tool. With the
+# SEARCHDATA_FILE tag the name of this file can be specified.
+
+SEARCHDATA_FILE = searchdata.xml
+
+# When SERVER_BASED_SEARCH AND EXTERNAL_SEARCH are both enabled the
+# EXTERNAL_SEARCH_ID tag can be used as an identifier for the project. This is
+# useful in combination with EXTRA_SEARCH_MAPPINGS to search through multiple
+# projects and redirect the results back to the right project.
+
+EXTERNAL_SEARCH_ID =
+
+# The EXTRA_SEARCH_MAPPINGS tag can be used to enable searching through doxygen
+# projects other than the one defined by this configuration file, but that are
+# all added to the same external search index. Each project needs to have a
+# unique id set via EXTERNAL_SEARCH_ID. The search mapping then maps the id
+# of to a relative location where the documentation can be found.
+# The format is: EXTRA_SEARCH_MAPPINGS = id1=loc1 id2=loc2 ...
+
+EXTRA_SEARCH_MAPPINGS =
+
+#---------------------------------------------------------------------------
+# configuration options related to the LaTeX output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will
+# generate Latex output.
+
+GENERATE_LATEX = @enable_latex_docs@
+
+# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be
+# put in front of it. If left blank `latex' will be used as the default path.
+
+LATEX_OUTPUT = latex
+
+# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be
+# invoked. If left blank `latex' will be used as the default command name.
+# Note that when enabling USE_PDFLATEX this option is only used for
+# generating bitmaps for formulas in the HTML output, but not in the
+# Makefile that is written to the output directory.
+
+LATEX_CMD_NAME = latex
+
+# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to
+# generate index for LaTeX. If left blank `makeindex' will be used as the
+# default command name.
+
+MAKEINDEX_CMD_NAME = makeindex
+
+# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact
+# LaTeX documents. This may be useful for small projects and may help to
+# save some trees in general.
+
+COMPACT_LATEX = NO
+
+# The PAPER_TYPE tag can be used to set the paper type that is used
+# by the printer. Possible values are: a4, letter, legal and
+# executive. If left blank a4 will be used.
+
+PAPER_TYPE = letter
+
+# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX
+# packages that should be included in the LaTeX output.
+
+EXTRA_PACKAGES =
+
+# The LATEX_HEADER tag can be used to specify a personal LaTeX header for
+# the generated latex document. The header should contain everything until
+# the first chapter. If it is left blank doxygen will generate a
+# standard header. Notice: only use this tag if you know what you are doing!
+
+LATEX_HEADER =
+
+# The LATEX_FOOTER tag can be used to specify a personal LaTeX footer for
+# the generated latex document. The footer should contain everything after
+# the last chapter. If it is left blank doxygen will generate a
+# standard footer. Notice: only use this tag if you know what you are doing!
+
+LATEX_FOOTER =
+
+# The LATEX_EXTRA_FILES tag can be used to specify one or more extra images
+# or other source files which should be copied to the LaTeX output directory.
+# Note that the files will be copied as-is; there are no commands or markers
+# available.
+
+LATEX_EXTRA_FILES =
+
+# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated
+# is prepared for conversion to pdf (using ps2pdf). The pdf file will
+# contain links (just like the HTML output) instead of page references
+# This makes the output suitable for online browsing using a pdf viewer.
+
+PDF_HYPERLINKS = YES
+
+# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of
+# plain latex in the generated Makefile. Set this option to YES to get a
+# higher quality PDF documentation.
+
+USE_PDFLATEX = NO
+
+# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode.
+# command to the generated LaTeX files. This will instruct LaTeX to keep
+# running if errors occur, instead of asking the user for help.
+# This option is also used when generating formulas in HTML.
+
+LATEX_BATCHMODE = NO
+
+# If LATEX_HIDE_INDICES is set to YES then doxygen will not
+# include the index chapters (such as File Index, Compound Index, etc.)
+# in the output.
+
+LATEX_HIDE_INDICES = NO
+
+# If LATEX_SOURCE_CODE is set to YES then doxygen will include
+# source code with syntax highlighting in the LaTeX output.
+# Note that which sources are shown also depends on other settings
+# such as SOURCE_BROWSER.
+
+LATEX_SOURCE_CODE = NO
+
+# The LATEX_BIB_STYLE tag can be used to specify the style to use for the
+# bibliography, e.g. plainnat, or ieeetr. The default style is "plain". See
+# http://en.wikipedia.org/wiki/BibTeX for more info.
+
+LATEX_BIB_STYLE = plain
+
+#---------------------------------------------------------------------------
+# configuration options related to the RTF output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output
+# The RTF output is optimized for Word 97 and may not look very pretty with
+# other RTF readers or editors.
+
+GENERATE_RTF = NO
+
+# The RTF_OUTPUT tag is used to specify where the RTF docs will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be
+# put in front of it. If left blank `rtf' will be used as the default path.
+
+RTF_OUTPUT = rtf
+
+# If the COMPACT_RTF tag is set to YES Doxygen generates more compact
+# RTF documents. This may be useful for small projects and may help to
+# save some trees in general.
+
+COMPACT_RTF = NO
+
+# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated
+# will contain hyperlink fields. The RTF file will
+# contain links (just like the HTML output) instead of page references.
+# This makes the output suitable for online browsing using WORD or other
+# programs which support those fields.
+# Note: wordpad (write) and others do not support links.
+
+RTF_HYPERLINKS = NO
+
+# Load style sheet definitions from file. Syntax is similar to doxygen's
+# config file, i.e. a series of assignments. You only have to provide
+# replacements, missing definitions are set to their default value.
+
+RTF_STYLESHEET_FILE =
+
+# Set optional variables used in the generation of an rtf document.
+# Syntax is similar to doxygen's config file.
+
+RTF_EXTENSIONS_FILE =
+
+#---------------------------------------------------------------------------
+# configuration options related to the man page output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_MAN tag is set to YES (the default) Doxygen will
+# generate man pages
+
+GENERATE_MAN = NO
+
+# The MAN_OUTPUT tag is used to specify where the man pages will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be
+# put in front of it. If left blank `man' will be used as the default path.
+
+MAN_OUTPUT = man
+
+# The MAN_EXTENSION tag determines the extension that is added to
+# the generated man pages (default is the subroutine's section .3)
+
+MAN_EXTENSION = .3
+
+# If the MAN_LINKS tag is set to YES and Doxygen generates man output,
+# then it will generate one additional man file for each entity
+# documented in the real man page(s). These additional files
+# only source the real man page, but without them the man command
+# would be unable to find the correct page. The default is NO.
+
+MAN_LINKS = NO
+
+#---------------------------------------------------------------------------
+# configuration options related to the XML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_XML tag is set to YES Doxygen will
+# generate an XML file that captures the structure of
+# the code including all documentation.
+
+GENERATE_XML = @enable_xml_docs@
+
+# The XML_OUTPUT tag is used to specify where the XML pages will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be
+# put in front of it. If left blank `xml' will be used as the default path.
+
+XML_OUTPUT = xml
+
+# The XML_SCHEMA tag can be used to specify an XML schema,
+# which can be used by a validating XML parser to check the
+# syntax of the XML files.
+
+XML_SCHEMA =
+
+# The XML_DTD tag can be used to specify an XML DTD,
+# which can be used by a validating XML parser to check the
+# syntax of the XML files.
+
+XML_DTD =
+
+# If the XML_PROGRAMLISTING tag is set to YES Doxygen will
+# dump the program listings (including syntax highlighting
+# and cross-referencing information) to the XML output. Note that
+# enabling this will significantly increase the size of the XML output.
+
+XML_PROGRAMLISTING = NO
+
+#---------------------------------------------------------------------------
+# configuration options related to the DOCBOOK output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_DOCBOOK tag is set to YES Doxygen will generate DOCBOOK files
+# that can be used to generate PDF.
+
+GENERATE_DOCBOOK = NO
+
+# The DOCBOOK_OUTPUT tag is used to specify where the DOCBOOK pages will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be put in
+# front of it. If left blank docbook will be used as the default path.
+
+DOCBOOK_OUTPUT = docbook
+
+#---------------------------------------------------------------------------
+# configuration options for the AutoGen Definitions output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will
+# generate an AutoGen Definitions (see autogen.sf.net) file
+# that captures the structure of the code including all
+# documentation. Note that this feature is still experimental
+# and incomplete at the moment.
+
+GENERATE_AUTOGEN_DEF = NO
+
+#---------------------------------------------------------------------------
+# configuration options related to the Perl module output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_PERLMOD tag is set to YES Doxygen will
+# generate a Perl module file that captures the structure of
+# the code including all documentation. Note that this
+# feature is still experimental and incomplete at the
+# moment.
+
+GENERATE_PERLMOD = NO
+
+# If the PERLMOD_LATEX tag is set to YES Doxygen will generate
+# the necessary Makefile rules, Perl scripts and LaTeX code to be able
+# to generate PDF and DVI output from the Perl module output.
+
+PERLMOD_LATEX = NO
+
+# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be
+# nicely formatted so it can be parsed by a human reader.
+# This is useful
+# if you want to understand what is going on.
+# On the other hand, if this
+# tag is set to NO the size of the Perl module output will be much smaller
+# and Perl will parse it just the same.
+
+PERLMOD_PRETTY = YES
+
+# The names of the make variables in the generated doxyrules.make file
+# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX.
+# This is useful so different doxyrules.make files included by the same
+# Makefile don't overwrite each other's variables.
+
+PERLMOD_MAKEVAR_PREFIX =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the preprocessor
+#---------------------------------------------------------------------------
+
+# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will
+# evaluate all C-preprocessor directives found in the sources and include
+# files.
+
+ENABLE_PREPROCESSING = YES
+
+# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro
+# names in the source code. If set to NO (the default) only conditional
+# compilation will be performed. Macro expansion can be done in a controlled
+# way by setting EXPAND_ONLY_PREDEF to YES.
+
+MACRO_EXPANSION = NO
+
+# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES
+# then the macro expansion is limited to the macros specified with the
+# PREDEFINED and EXPAND_AS_DEFINED tags.
+
+EXPAND_ONLY_PREDEF = NO
+
+# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files
+# pointed to by INCLUDE_PATH will be searched when a #include is found.
+
+SEARCH_INCLUDES = YES
+
+# The INCLUDE_PATH tag can be used to specify one or more directories that
+# contain include files that are not input files but should be processed by
+# the preprocessor.
+
+INCLUDE_PATH =
+
+# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
+# patterns (like *.h and *.hpp) to filter out the header-files in the
+# directories. If left blank, the patterns specified with FILE_PATTERNS will
+# be used.
+
+INCLUDE_FILE_PATTERNS =
+
+# The PREDEFINED tag can be used to specify one or more macro names that
+# are defined before the preprocessor is started (similar to the -D option of
+# gcc). The argument of the tag is a list of macros of the form: name
+# or name=definition (no spaces). If the definition and the = are
+# omitted =1 is assumed. To prevent a macro definition from being
+# undefined via #undef or recursively expanded use the := operator
+# instead of the = operator.
+
+PREDEFINED =
+
+# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then
+# this tag can be used to specify a list of macro names that should be expanded.
+# The macro definition that is found in the sources will be used.
+# Use the PREDEFINED tag if you want to use a different macro definition that
+# overrules the definition found in the source code.
+
+EXPAND_AS_DEFINED =
+
+# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then
+# doxygen's preprocessor will remove all references to function-like macros
+# that are alone on a line, have an all uppercase name, and do not end with a
+# semicolon, because these will confuse the parser if not removed.
+
+SKIP_FUNCTION_MACROS = YES
+
+#---------------------------------------------------------------------------
+# Configuration::additions related to external references
+#---------------------------------------------------------------------------
+
+# The TAGFILES option can be used to specify one or more tagfiles. For each
+# tag file the location of the external documentation should be added. The
+# format of a tag file without this location is as follows:
+#
+# TAGFILES = file1 file2 ...
+# Adding location for the tag files is done as follows:
+#
+# TAGFILES = file1=loc1 "file2 = loc2" ...
+# where "loc1" and "loc2" can be relative or absolute paths
+# or URLs. Note that each tag file must have a unique name (where the name does
+# NOT include the path). If a tag file is not located in the directory in which
+# doxygen is run, you must also specify the path to the tagfile here.
+
+TAGFILES =
+
+# When a file name is specified after GENERATE_TAGFILE, doxygen will create
+# a tag file that is based on the input files it reads.
+
+GENERATE_TAGFILE =
+
+# If the ALLEXTERNALS tag is set to YES all external classes will be listed
+# in the class index. If set to NO only the inherited external classes
+# will be listed.
+
+ALLEXTERNALS = NO
+
+# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed
+# in the modules index. If set to NO, only the current project's groups will
+# be listed.
+
+EXTERNAL_GROUPS = YES
+
+# If the EXTERNAL_PAGES tag is set to YES all external pages will be listed
+# in the related pages index. If set to NO, only the current project's
+# pages will be listed.
+
+EXTERNAL_PAGES = YES
+
+# The PERL_PATH should be the absolute path and name of the perl script
+# interpreter (i.e. the result of `which perl').
+
+PERL_PATH = /usr/bin/perl
+
+#---------------------------------------------------------------------------
+# Configuration options related to the dot tool
+#---------------------------------------------------------------------------
+
+# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will
+# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base
+# or super classes. Setting the tag to NO turns the diagrams off. Note that
+# this option also works with HAVE_DOT disabled, but it is recommended to
+# install and use dot, since it yields more powerful graphs.
+
+CLASS_DIAGRAMS = YES
+
+# You can define message sequence charts within doxygen comments using the \msc
+# command. Doxygen will then run the mscgen tool (see
+# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the
+# documentation. The MSCGEN_PATH tag allows you to specify the directory where
+# the mscgen tool resides. If left empty the tool is assumed to be found in the
+# default search path.
+
+MSCGEN_PATH =
+
+# If set to YES, the inheritance and collaboration graphs will hide
+# inheritance and usage relations if the target is undocumented
+# or is not a class.
+
+HIDE_UNDOC_RELATIONS = YES
+
+# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
+# available from the path. This tool is part of Graphviz, a graph visualization
+# toolkit from AT&T and Lucent Bell Labs. The other options in this section
+# have no effect if this option is set to NO (the default)
+
+HAVE_DOT = @HAVE_DOT@
+
+# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is
+# allowed to run in parallel. When set to 0 (the default) doxygen will
+# base this on the number of processors available in the system. You can set it
+# explicitly to a value larger than 0 to get control over the balance
+# between CPU load and processing speed.
+
+DOT_NUM_THREADS = 0
+
+# By default doxygen will use the Helvetica font for all dot files that
+# doxygen generates. When you want a differently looking font you can specify
+# the font name using DOT_FONTNAME. You need to make sure dot is able to find
+# the font, which can be done by putting it in a standard location or by setting
+# the DOTFONTPATH environment variable or by setting DOT_FONTPATH to the
+# directory containing the font.
+
+DOT_FONTNAME = Helvetica
+
+# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs.
+# The default size is 10pt.
+
+DOT_FONTSIZE = 10
+
+# By default doxygen will tell dot to use the Helvetica font.
+# If you specify a different font using DOT_FONTNAME you can use DOT_FONTPATH to
+# set the path where dot can find it.
+
+DOT_FONTPATH =
+
+# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen
+# will generate a graph for each documented class showing the direct and
+# indirect inheritance relations. Setting this tag to YES will force the
+# CLASS_DIAGRAMS tag to NO.
+
+CLASS_GRAPH = YES
+
+# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen
+# will generate a graph for each documented class showing the direct and
+# indirect implementation dependencies (inheritance, containment, and
+# class references variables) of the class with other documented classes.
+
+COLLABORATION_GRAPH = NO
+
+# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen
+# will generate a graph for groups, showing the direct groups dependencies
+
+GROUP_GRAPHS = YES
+
+# If the UML_LOOK tag is set to YES doxygen will generate inheritance and
+# collaboration diagrams in a style similar to the OMG's Unified Modeling
+# Language.
+
+UML_LOOK = NO
+
+# If the UML_LOOK tag is enabled, the fields and methods are shown inside
+# the class node. If there are many fields or methods and many nodes the
+# graph may become too big to be useful. The UML_LIMIT_NUM_FIELDS
+# threshold limits the number of items for each type to make the size more
+# manageable. Set this to 0 for no limit. Note that the threshold may be
+# exceeded by 50% before the limit is enforced.
+
+UML_LIMIT_NUM_FIELDS = 10
+
+# If set to YES, the inheritance and collaboration graphs will show the
+# relations between templates and their instances.
+
+TEMPLATE_RELATIONS = NO
+
+# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT
+# tags are set to YES then doxygen will generate a graph for each documented
+# file showing the direct and indirect include dependencies of the file with
+# other documented files.
+
+INCLUDE_GRAPH = YES
+
+# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and
+# HAVE_DOT tags are set to YES then doxygen will generate a graph for each
+# documented header file showing the documented files that directly or
+# indirectly include this file.
+
+INCLUDED_BY_GRAPH = YES
+
+# If the CALL_GRAPH and HAVE_DOT options are set to YES then
+# doxygen will generate a call dependency graph for every global function
+# or class method. Note that enabling this option will significantly increase
+# the time of a run. So in most cases it will be better to enable call graphs
+# for selected functions only using the \callgraph command.
+
+CALL_GRAPH = NO
+
+# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then
+# doxygen will generate a caller dependency graph for every global function
+# or class method. Note that enabling this option will significantly increase
+# the time of a run. So in most cases it will be better to enable caller
+# graphs for selected functions only using the \callergraph command.
+
+CALLER_GRAPH = NO
+
+# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen
+# will generate a graphical hierarchy of all classes instead of a textual one.
+
+GRAPHICAL_HIERARCHY = YES
+
+# If the DIRECTORY_GRAPH and HAVE_DOT tags are set to YES
+# then doxygen will show the dependencies a directory has on other directories
+# in a graphical way. The dependency relations are determined by the #include
+# relations between the files in the directories.
+
+DIRECTORY_GRAPH = YES
+
+# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
+# generated by dot. Possible values are svg, png, jpg, or gif.
+# If left blank png will be used. If you choose svg you need to set
+# HTML_FILE_EXTENSION to xhtml in order to make the SVG files
+# visible in IE 9+ (other browsers do not have this requirement).
+
+DOT_IMAGE_FORMAT = png
+
+# If DOT_IMAGE_FORMAT is set to svg, then this option can be set to YES to
+# enable generation of interactive SVG images that allow zooming and panning.
+# Note that this requires a modern browser other than Internet Explorer.
+# Tested and working are Firefox, Chrome, Safari, and Opera. For IE 9+ you
+# need to set HTML_FILE_EXTENSION to xhtml in order to make the SVG files
+# visible. Older versions of IE do not have SVG support.
+
+INTERACTIVE_SVG = NO
+
+# The tag DOT_PATH can be used to specify the path where the dot tool can be
+# found. If left blank, it is assumed the dot tool can be found in the path.
+
+DOT_PATH =
+
+# The DOTFILE_DIRS tag can be used to specify one or more directories that
+# contain dot files that are included in the documentation (see the
+# \dotfile command).
+
+DOTFILE_DIRS =
+
+# The MSCFILE_DIRS tag can be used to specify one or more directories that
+# contain msc files that are included in the documentation (see the
+# \mscfile command).
+
+MSCFILE_DIRS =
+
+# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of
+# nodes that will be shown in the graph. If the number of nodes in a graph
+# becomes larger than this value, doxygen will truncate the graph, which is
+# visualized by representing a node as a red box. Note that doxygen if the
+# number of direct children of the root node in a graph is already larger than
+# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note
+# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH.
+
+DOT_GRAPH_MAX_NODES = 50
+
+# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the
+# graphs generated by dot. A depth value of 3 means that only nodes reachable
+# from the root by following a path via at most 3 edges will be shown. Nodes
+# that lay further from the root node will be omitted. Note that setting this
+# option to 1 or 2 may greatly reduce the computation time needed for large
+# code bases. Also note that the size of a graph can be further restricted by
+# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction.
+
+MAX_DOT_GRAPH_DEPTH = 0
+
+# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent
+# background. This is disabled by default, because dot on Windows does not
+# seem to support this out of the box. Warning: Depending on the platform used,
+# enabling this option may lead to badly anti-aliased labels on the edges of
+# a graph (i.e. they become hard to read).
+
+DOT_TRANSPARENT = NO
+
+# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output
+# files in one run (i.e. multiple -o and -T options on the command line). This
+# makes dot run faster, but since only newer versions of dot (>1.8.10)
+# support this, this feature is disabled by default.
+
+DOT_MULTI_TARGETS = YES
+
+# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will
+# generate a legend page explaining the meaning of the various boxes and
+# arrows in the dot generated graphs.
+
+GENERATE_LEGEND = YES
+
+# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will
+# remove the intermediate dot files that are used to generate
+# the various graphs.
+
+DOT_CLEANUP = YES
diff --git a/tools/gr-usrptest/docs/doxygen/Doxyfile.swig_doc.in b/tools/gr-usrptest/docs/doxygen/Doxyfile.swig_doc.in
new file mode 100644
index 000000000..57736d7d0
--- /dev/null
+++ b/tools/gr-usrptest/docs/doxygen/Doxyfile.swig_doc.in
@@ -0,0 +1,1890 @@
+# Doxyfile 1.8.4
+
+# This file describes the settings to be used by the documentation system
+# doxygen (www.doxygen.org) for a project.
+#
+# All text after a double hash (##) is considered a comment and is placed
+# in front of the TAG it is preceding .
+# All text after a hash (#) is considered a comment and will be ignored.
+# The format is:
+# TAG = value [value, ...]
+# For lists items can also be appended using:
+# TAG += value [value, ...]
+# Values that contain spaces should be placed between quotes (" ").
+
+#---------------------------------------------------------------------------
+# Project related configuration options
+#---------------------------------------------------------------------------
+
+# This tag specifies the encoding used for all characters in the config file
+# that follow. The default is UTF-8 which is also the encoding used for all
+# text before the first occurrence of this tag. Doxygen uses libiconv (or the
+# iconv built into libc) for the transcoding. See
+# http://www.gnu.org/software/libiconv for the list of possible encodings.
+
+DOXYFILE_ENCODING = UTF-8
+
+# The PROJECT_NAME tag is a single word (or sequence of words) that should
+# identify the project. Note that if you do not use Doxywizard you need
+# to put quotes around the project name if it contains spaces.
+
+PROJECT_NAME = @CPACK_PACKAGE_NAME@
+
+# The PROJECT_NUMBER tag can be used to enter a project or revision number.
+# This could be handy for archiving the generated documentation or
+# if some version control system is used.
+
+PROJECT_NUMBER = @CPACK_PACKAGE_VERSION@
+
+# Using the PROJECT_BRIEF tag one can provide an optional one line description
+# for a project that appears at the top of each page and should give viewer
+# a quick idea about the purpose of the project. Keep the description short.
+
+PROJECT_BRIEF =
+
+# With the PROJECT_LOGO tag one can specify an logo or icon that is
+# included in the documentation. The maximum height of the logo should not
+# exceed 55 pixels and the maximum width should not exceed 200 pixels.
+# Doxygen will copy the logo to the output directory.
+
+PROJECT_LOGO =
+
+# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute)
+# base path where the generated documentation will be put.
+# If a relative path is entered, it will be relative to the location
+# where doxygen was started. If left blank the current directory will be used.
+
+OUTPUT_DIRECTORY = @OUTPUT_DIRECTORY@
+
+# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create
+# 4096 sub-directories (in 2 levels) under the output directory of each output
+# format and will distribute the generated files over these directories.
+# Enabling this option can be useful when feeding doxygen a huge amount of
+# source files, where putting all generated files in the same directory would
+# otherwise cause performance problems for the file system.
+
+CREATE_SUBDIRS = NO
+
+# The OUTPUT_LANGUAGE tag is used to specify the language in which all
+# documentation generated by doxygen is written. Doxygen will use this
+# information to generate all constant output in the proper language.
+# The default language is English, other supported languages are:
+# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional,
+# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German,
+# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English
+# messages), Korean, Korean-en, Latvian, Lithuanian, Norwegian, Macedonian,
+# Persian, Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrillic,
+# Slovak, Slovene, Spanish, Swedish, Ukrainian, and Vietnamese.
+
+OUTPUT_LANGUAGE = English
+
+# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will
+# include brief member descriptions after the members that are listed in
+# the file and class documentation (similar to JavaDoc).
+# Set to NO to disable this.
+
+BRIEF_MEMBER_DESC = YES
+
+# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend
+# the brief description of a member or function before the detailed description.
+# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
+# brief descriptions will be completely suppressed.
+
+REPEAT_BRIEF = YES
+
+# This tag implements a quasi-intelligent brief description abbreviator
+# that is used to form the text in various listings. Each string
+# in this list, if found as the leading text of the brief description, will be
+# stripped from the text and the result after processing the whole list, is
+# used as the annotated text. Otherwise, the brief description is used as-is.
+# If left blank, the following values are used ("$name" is automatically
+# replaced with the name of the entity): "The $name class" "The $name widget"
+# "The $name file" "is" "provides" "specifies" "contains"
+# "represents" "a" "an" "the"
+
+ABBREVIATE_BRIEF =
+
+# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
+# Doxygen will generate a detailed section even if there is only a brief
+# description.
+
+ALWAYS_DETAILED_SEC = NO
+
+# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all
+# inherited members of a class in the documentation of that class as if those
+# members were ordinary class members. Constructors, destructors and assignment
+# operators of the base classes will not be shown.
+
+INLINE_INHERITED_MEMB = NO
+
+# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full
+# path before files name in the file list and in the header files. If set
+# to NO the shortest path that makes the file name unique will be used.
+
+FULL_PATH_NAMES = YES
+
+# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag
+# can be used to strip a user-defined part of the path. Stripping is
+# only done if one of the specified strings matches the left-hand part of
+# the path. The tag can be used to show relative paths in the file list.
+# If left blank the directory from which doxygen is run is used as the
+# path to strip. Note that you specify absolute paths here, but also
+# relative paths, which will be relative from the directory where doxygen is
+# started.
+
+STRIP_FROM_PATH =
+
+# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of
+# the path mentioned in the documentation of a class, which tells
+# the reader which header file to include in order to use a class.
+# If left blank only the name of the header file containing the class
+# definition is used. Otherwise one should specify the include paths that
+# are normally passed to the compiler using the -I flag.
+
+STRIP_FROM_INC_PATH =
+
+# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter
+# (but less readable) file names. This can be useful if your file system
+# doesn't support long names like on DOS, Mac, or CD-ROM.
+
+SHORT_NAMES = NO
+
+# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen
+# will interpret the first line (until the first dot) of a JavaDoc-style
+# comment as the brief description. If set to NO, the JavaDoc
+# comments will behave just like regular Qt-style comments
+# (thus requiring an explicit @brief command for a brief description.)
+
+JAVADOC_AUTOBRIEF = NO
+
+# If the QT_AUTOBRIEF tag is set to YES then Doxygen will
+# interpret the first line (until the first dot) of a Qt-style
+# comment as the brief description. If set to NO, the comments
+# will behave just like regular Qt-style comments (thus requiring
+# an explicit \brief command for a brief description.)
+
+QT_AUTOBRIEF = NO
+
+# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen
+# treat a multi-line C++ special comment block (i.e. a block of //! or ///
+# comments) as a brief description. This used to be the default behaviour.
+# The new default is to treat a multi-line C++ comment block as a detailed
+# description. Set this tag to YES if you prefer the old behaviour instead.
+
+MULTILINE_CPP_IS_BRIEF = NO
+
+# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented
+# member inherits the documentation from any documented member that it
+# re-implements.
+
+INHERIT_DOCS = YES
+
+# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce
+# a new page for each member. If set to NO, the documentation of a member will
+# be part of the file/class/namespace that contains it.
+
+SEPARATE_MEMBER_PAGES = NO
+
+# The TAB_SIZE tag can be used to set the number of spaces in a tab.
+# Doxygen uses this value to replace tabs by spaces in code fragments.
+
+TAB_SIZE = 8
+
+# This tag can be used to specify a number of aliases that acts
+# as commands in the documentation. An alias has the form "name=value".
+# For example adding "sideeffect=\par Side Effects:\n" will allow you to
+# put the command \sideeffect (or @sideeffect) in the documentation, which
+# will result in a user-defined paragraph with heading "Side Effects:".
+# You can put \n's in the value part of an alias to insert newlines.
+
+ALIASES =
+
+# This tag can be used to specify a number of word-keyword mappings (TCL only).
+# A mapping has the form "name=value". For example adding
+# "class=itcl::class" will allow you to use the command class in the
+# itcl::class meaning.
+
+TCL_SUBST =
+
+# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C
+# sources only. Doxygen will then generate output that is more tailored for C.
+# For instance, some of the names that are used will be different. The list
+# of all members will be omitted, etc.
+
+OPTIMIZE_OUTPUT_FOR_C = NO
+
+# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java
+# sources only. Doxygen will then generate output that is more tailored for
+# Java. For instance, namespaces will be presented as packages, qualified
+# scopes will look different, etc.
+
+OPTIMIZE_OUTPUT_JAVA = NO
+
+# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran
+# sources only. Doxygen will then generate output that is more tailored for
+# Fortran.
+
+OPTIMIZE_FOR_FORTRAN = NO
+
+# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL
+# sources. Doxygen will then generate output that is tailored for
+# VHDL.
+
+OPTIMIZE_OUTPUT_VHDL = NO
+
+# Doxygen selects the parser to use depending on the extension of the files it
+# parses. With this tag you can assign which parser to use for a given
+# extension. Doxygen has a built-in mapping, but you can override or extend it
+# using this tag. The format is ext=language, where ext is a file extension,
+# and language is one of the parsers supported by doxygen: IDL, Java,
+# Javascript, CSharp, C, C++, D, PHP, Objective-C, Python, Fortran, VHDL, C,
+# C++. For instance to make doxygen treat .inc files as Fortran files (default
+# is PHP), and .f files as C (default is Fortran), use: inc=Fortran f=C. Note
+# that for custom extensions you also need to set FILE_PATTERNS otherwise the
+# files are not read by doxygen.
+
+EXTENSION_MAPPING =
+
+# If MARKDOWN_SUPPORT is enabled (the default) then doxygen pre-processes all
+# comments according to the Markdown format, which allows for more readable
+# documentation. See http://daringfireball.net/projects/markdown/ for details.
+# The output of markdown processing is further processed by doxygen, so you
+# can mix doxygen, HTML, and XML commands with Markdown formatting.
+# Disable only in case of backward compatibilities issues.
+
+MARKDOWN_SUPPORT = YES
+
+# When enabled doxygen tries to link words that correspond to documented
+# classes, or namespaces to their corresponding documentation. Such a link can
+# be prevented in individual cases by by putting a % sign in front of the word
+# or globally by setting AUTOLINK_SUPPORT to NO.
+
+AUTOLINK_SUPPORT = YES
+
+# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want
+# to include (a tag file for) the STL sources as input, then you should
+# set this tag to YES in order to let doxygen match functions declarations and
+# definitions whose arguments contain STL classes (e.g. func(std::string); v.s.
+# func(std::string) {}). This also makes the inheritance and collaboration
+# diagrams that involve STL classes more complete and accurate.
+
+BUILTIN_STL_SUPPORT = YES
+
+# If you use Microsoft's C++/CLI language, you should set this option to YES to
+# enable parsing support.
+
+CPP_CLI_SUPPORT = NO
+
+# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only.
+# Doxygen will parse them like normal C++ but will assume all classes use public
+# instead of private inheritance when no explicit protection keyword is present.
+
+SIP_SUPPORT = NO
+
+# For Microsoft's IDL there are propget and propput attributes to indicate
+# getter and setter methods for a property. Setting this option to YES (the
+# default) will make doxygen replace the get and set methods by a property in
+# the documentation. This will only work if the methods are indeed getting or
+# setting a simple type. If this is not the case, or you want to show the
+# methods anyway, you should set this option to NO.
+
+IDL_PROPERTY_SUPPORT = YES
+
+# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
+# tag is set to YES, then doxygen will reuse the documentation of the first
+# member in the group (if any) for the other members of the group. By default
+# all members of a group must be documented explicitly.
+
+DISTRIBUTE_GROUP_DOC = NO
+
+# Set the SUBGROUPING tag to YES (the default) to allow class member groups of
+# the same type (for instance a group of public functions) to be put as a
+# subgroup of that type (e.g. under the Public Functions section). Set it to
+# NO to prevent subgrouping. Alternatively, this can be done per class using
+# the \nosubgrouping command.
+
+SUBGROUPING = YES
+
+# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and
+# unions are shown inside the group in which they are included (e.g. using
+# @ingroup) instead of on a separate page (for HTML and Man pages) or
+# section (for LaTeX and RTF).
+
+INLINE_GROUPED_CLASSES = NO
+
+# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and
+# unions with only public data fields or simple typedef fields will be shown
+# inline in the documentation of the scope in which they are defined (i.e. file,
+# namespace, or group documentation), provided this scope is documented. If set
+# to NO (the default), structs, classes, and unions are shown on a separate
+# page (for HTML and Man pages) or section (for LaTeX and RTF).
+
+INLINE_SIMPLE_STRUCTS = NO
+
+# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum
+# is documented as struct, union, or enum with the name of the typedef. So
+# typedef struct TypeS {} TypeT, will appear in the documentation as a struct
+# with name TypeT. When disabled the typedef will appear as a member of a file,
+# namespace, or class. And the struct will be named TypeS. This can typically
+# be useful for C code in case the coding convention dictates that all compound
+# types are typedef'ed and only the typedef is referenced, never the tag name.
+
+TYPEDEF_HIDES_STRUCT = NO
+
+# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This
+# cache is used to resolve symbols given their name and scope. Since this can
+# be an expensive process and often the same symbol appear multiple times in
+# the code, doxygen keeps a cache of pre-resolved symbols. If the cache is too
+# small doxygen will become slower. If the cache is too large, memory is wasted.
+# The cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid
+# range is 0..9, the default is 0, corresponding to a cache size of 2^16 = 65536
+# symbols.
+
+LOOKUP_CACHE_SIZE = 0
+
+#---------------------------------------------------------------------------
+# Build related configuration options
+#---------------------------------------------------------------------------
+
+# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in
+# documentation are documented, even if no documentation was available.
+# Private class members and static file members will be hidden unless
+# the EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES
+
+EXTRACT_ALL = YES
+
+# If the EXTRACT_PRIVATE tag is set to YES all private members of a class
+# will be included in the documentation.
+
+EXTRACT_PRIVATE = NO
+
+# If the EXTRACT_PACKAGE tag is set to YES all members with package or internal
+# scope will be included in the documentation.
+
+EXTRACT_PACKAGE = NO
+
+# If the EXTRACT_STATIC tag is set to YES all static members of a file
+# will be included in the documentation.
+
+EXTRACT_STATIC = NO
+
+# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs)
+# defined locally in source files will be included in the documentation.
+# If set to NO only classes defined in header files are included.
+
+EXTRACT_LOCAL_CLASSES = YES
+
+# This flag is only useful for Objective-C code. When set to YES local
+# methods, which are defined in the implementation section but not in
+# the interface are included in the documentation.
+# If set to NO (the default) only methods in the interface are included.
+
+EXTRACT_LOCAL_METHODS = NO
+
+# If this flag is set to YES, the members of anonymous namespaces will be
+# extracted and appear in the documentation as a namespace called
+# 'anonymous_namespace{file}', where file will be replaced with the base
+# name of the file that contains the anonymous namespace. By default
+# anonymous namespaces are hidden.
+
+EXTRACT_ANON_NSPACES = NO
+
+# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all
+# undocumented members of documented classes, files or namespaces.
+# If set to NO (the default) these members will be included in the
+# various overviews, but no documentation section is generated.
+# This option has no effect if EXTRACT_ALL is enabled.
+
+HIDE_UNDOC_MEMBERS = NO
+
+# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all
+# undocumented classes that are normally visible in the class hierarchy.
+# If set to NO (the default) these classes will be included in the various
+# overviews. This option has no effect if EXTRACT_ALL is enabled.
+
+HIDE_UNDOC_CLASSES = NO
+
+# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all
+# friend (class|struct|union) declarations.
+# If set to NO (the default) these declarations will be included in the
+# documentation.
+
+HIDE_FRIEND_COMPOUNDS = NO
+
+# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any
+# documentation blocks found inside the body of a function.
+# If set to NO (the default) these blocks will be appended to the
+# function's detailed documentation block.
+
+HIDE_IN_BODY_DOCS = NO
+
+# The INTERNAL_DOCS tag determines if documentation
+# that is typed after a \internal command is included. If the tag is set
+# to NO (the default) then the documentation will be excluded.
+# Set it to YES to include the internal documentation.
+
+INTERNAL_DOCS = NO
+
+# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate
+# file names in lower-case letters. If set to YES upper-case letters are also
+# allowed. This is useful if you have classes or files whose names only differ
+# in case and if your file system supports case sensitive file names. Windows
+# and Mac users are advised to set this option to NO.
+
+CASE_SENSE_NAMES = YES
+
+# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen
+# will show members with their full class and namespace scopes in the
+# documentation. If set to YES the scope will be hidden.
+
+HIDE_SCOPE_NAMES = NO
+
+# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen
+# will put a list of the files that are included by a file in the documentation
+# of that file.
+
+SHOW_INCLUDE_FILES = YES
+
+# If the FORCE_LOCAL_INCLUDES tag is set to YES then Doxygen
+# will list include files with double quotes in the documentation
+# rather than with sharp brackets.
+
+FORCE_LOCAL_INCLUDES = NO
+
+# If the INLINE_INFO tag is set to YES (the default) then a tag [inline]
+# is inserted in the documentation for inline members.
+
+INLINE_INFO = YES
+
+# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen
+# will sort the (detailed) documentation of file and class members
+# alphabetically by member name. If set to NO the members will appear in
+# declaration order.
+
+SORT_MEMBER_DOCS = YES
+
+# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the
+# brief documentation of file, namespace and class members alphabetically
+# by member name. If set to NO (the default) the members will appear in
+# declaration order.
+
+SORT_BRIEF_DOCS = NO
+
+# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen
+# will sort the (brief and detailed) documentation of class members so that
+# constructors and destructors are listed first. If set to NO (the default)
+# the constructors will appear in the respective orders defined by
+# SORT_MEMBER_DOCS and SORT_BRIEF_DOCS.
+# This tag will be ignored for brief docs if SORT_BRIEF_DOCS is set to NO
+# and ignored for detailed docs if SORT_MEMBER_DOCS is set to NO.
+
+SORT_MEMBERS_CTORS_1ST = NO
+
+# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the
+# hierarchy of group names into alphabetical order. If set to NO (the default)
+# the group names will appear in their defined order.
+
+SORT_GROUP_NAMES = NO
+
+# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be
+# sorted by fully-qualified names, including namespaces. If set to
+# NO (the default), the class list will be sorted only by class name,
+# not including the namespace part.
+# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.
+# Note: This option applies only to the class list, not to the
+# alphabetical list.
+
+SORT_BY_SCOPE_NAME = NO
+
+# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to
+# do proper type resolution of all parameters of a function it will reject a
+# match between the prototype and the implementation of a member function even
+# if there is only one candidate or it is obvious which candidate to choose
+# by doing a simple string match. By disabling STRICT_PROTO_MATCHING doxygen
+# will still accept a match between prototype and implementation in such cases.
+
+STRICT_PROTO_MATCHING = NO
+
+# The GENERATE_TODOLIST tag can be used to enable (YES) or
+# disable (NO) the todo list. This list is created by putting \todo
+# commands in the documentation.
+
+GENERATE_TODOLIST = YES
+
+# The GENERATE_TESTLIST tag can be used to enable (YES) or
+# disable (NO) the test list. This list is created by putting \test
+# commands in the documentation.
+
+GENERATE_TESTLIST = YES
+
+# The GENERATE_BUGLIST tag can be used to enable (YES) or
+# disable (NO) the bug list. This list is created by putting \bug
+# commands in the documentation.
+
+GENERATE_BUGLIST = YES
+
+# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or
+# disable (NO) the deprecated list. This list is created by putting
+# \deprecated commands in the documentation.
+
+GENERATE_DEPRECATEDLIST= YES
+
+# The ENABLED_SECTIONS tag can be used to enable conditional
+# documentation sections, marked by \if section-label ... \endif
+# and \cond section-label ... \endcond blocks.
+
+ENABLED_SECTIONS =
+
+# The MAX_INITIALIZER_LINES tag determines the maximum number of lines
+# the initial value of a variable or macro consists of for it to appear in
+# the documentation. If the initializer consists of more lines than specified
+# here it will be hidden. Use a value of 0 to hide initializers completely.
+# The appearance of the initializer of individual variables and macros in the
+# documentation can be controlled using \showinitializer or \hideinitializer
+# command in the documentation regardless of this setting.
+
+MAX_INITIALIZER_LINES = 30
+
+# Set the SHOW_USED_FILES tag to NO to disable the list of files generated
+# at the bottom of the documentation of classes and structs. If set to YES the
+# list will mention the files that were used to generate the documentation.
+
+SHOW_USED_FILES = YES
+
+# Set the SHOW_FILES tag to NO to disable the generation of the Files page.
+# This will remove the Files entry from the Quick Index and from the
+# Folder Tree View (if specified). The default is YES.
+
+SHOW_FILES = YES
+
+# Set the SHOW_NAMESPACES tag to NO to disable the generation of the
+# Namespaces page.
+# This will remove the Namespaces entry from the Quick Index
+# and from the Folder Tree View (if specified). The default is YES.
+
+SHOW_NAMESPACES = YES
+
+# The FILE_VERSION_FILTER tag can be used to specify a program or script that
+# doxygen should invoke to get the current version for each file (typically from
+# the version control system). Doxygen will invoke the program by executing (via
+# popen()) the command <command> <input-file>, where <command> is the value of
+# the FILE_VERSION_FILTER tag, and <input-file> is the name of an input file
+# provided by doxygen. Whatever the program writes to standard output
+# is used as the file version. See the manual for examples.
+
+FILE_VERSION_FILTER =
+
+# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed
+# by doxygen. The layout file controls the global structure of the generated
+# output files in an output format independent way. To create the layout file
+# that represents doxygen's defaults, run doxygen with the -l option.
+# You can optionally specify a file name after the option, if omitted
+# DoxygenLayout.xml will be used as the name of the layout file.
+
+LAYOUT_FILE =
+
+# The CITE_BIB_FILES tag can be used to specify one or more bib files
+# containing the references data. This must be a list of .bib files. The
+# .bib extension is automatically appended if omitted. Using this command
+# requires the bibtex tool to be installed. See also
+# http://en.wikipedia.org/wiki/BibTeX for more info. For LaTeX the style
+# of the bibliography can be controlled using LATEX_BIB_STYLE. To use this
+# feature you need bibtex and perl available in the search path. Do not use
+# file names with spaces, bibtex cannot handle them.
+
+CITE_BIB_FILES =
+
+#---------------------------------------------------------------------------
+# configuration options related to warning and progress messages
+#---------------------------------------------------------------------------
+
+# The QUIET tag can be used to turn on/off the messages that are generated
+# by doxygen. Possible values are YES and NO. If left blank NO is used.
+
+QUIET = YES
+
+# The WARNINGS tag can be used to turn on/off the warning messages that are
+# generated by doxygen. Possible values are YES and NO. If left blank
+# NO is used.
+
+WARNINGS = YES
+
+# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings
+# for undocumented members. If EXTRACT_ALL is set to YES then this flag will
+# automatically be disabled.
+
+WARN_IF_UNDOCUMENTED = YES
+
+# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for
+# potential errors in the documentation, such as not documenting some
+# parameters in a documented function, or documenting parameters that
+# don't exist or using markup commands wrongly.
+
+WARN_IF_DOC_ERROR = YES
+
+# The WARN_NO_PARAMDOC option can be enabled to get warnings for
+# functions that are documented, but have no documentation for their parameters
+# or return value. If set to NO (the default) doxygen will only warn about
+# wrong or incomplete parameter documentation, but not about the absence of
+# documentation.
+
+WARN_NO_PARAMDOC = NO
+
+# The WARN_FORMAT tag determines the format of the warning messages that
+# doxygen can produce. The string should contain the $file, $line, and $text
+# tags, which will be replaced by the file and line number from which the
+# warning originated and the warning text. Optionally the format may contain
+# $version, which will be replaced by the version of the file (if it could
+# be obtained via FILE_VERSION_FILTER)
+
+WARN_FORMAT = "$file:$line: $text"
+
+# The WARN_LOGFILE tag can be used to specify a file to which warning
+# and error messages should be written. If left blank the output is written
+# to stderr.
+
+WARN_LOGFILE =
+
+#---------------------------------------------------------------------------
+# configuration options related to the input files
+#---------------------------------------------------------------------------
+
+# The INPUT tag can be used to specify the files and/or directories that contain
+# documented source files. You may enter file names like "myfile.cpp" or
+# directories like "/usr/src/myproject". Separate the files or directories
+# with spaces.
+
+INPUT = @INPUT_PATHS@
+
+# This tag can be used to specify the character encoding of the source files
+# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is
+# also the default input encoding. Doxygen uses libiconv (or the iconv built
+# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for
+# the list of possible encodings.
+
+INPUT_ENCODING = UTF-8
+
+# If the value of the INPUT tag contains directories, you can use the
+# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
+# and *.h) to filter out the source-files in the directories. If left
+# blank the following patterns are tested:
+# *.c *.cc *.cxx *.cpp *.c++ *.d *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh
+# *.hxx *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.dox *.py
+# *.f90 *.f *.for *.vhd *.vhdl
+
+FILE_PATTERNS = *.h
+
+# The RECURSIVE tag can be used to turn specify whether or not subdirectories
+# should be searched for input files as well. Possible values are YES and NO.
+# If left blank NO is used.
+
+RECURSIVE = YES
+
+# The EXCLUDE tag can be used to specify files and/or directories that should be
+# excluded from the INPUT source files. This way you can easily exclude a
+# subdirectory from a directory tree whose root is specified with the INPUT tag.
+# Note that relative paths are relative to the directory from which doxygen is
+# run.
+
+EXCLUDE =
+
+# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or
+# directories that are symbolic links (a Unix file system feature) are excluded
+# from the input.
+
+EXCLUDE_SYMLINKS = NO
+
+# If the value of the INPUT tag contains directories, you can use the
+# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
+# certain files from those directories. Note that the wildcards are matched
+# against the file with absolute path, so to exclude all test directories
+# for example use the pattern */test/*
+
+EXCLUDE_PATTERNS =
+
+# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
+# (namespaces, classes, functions, etc.) that should be excluded from the
+# output. The symbol name can be a fully qualified name, a word, or if the
+# wildcard * is used, a substring. Examples: ANamespace, AClass,
+# AClass::ANamespace, ANamespace::*Test
+
+EXCLUDE_SYMBOLS =
+
+# The EXAMPLE_PATH tag can be used to specify one or more files or
+# directories that contain example code fragments that are included (see
+# the \include command).
+
+EXAMPLE_PATH =
+
+# If the value of the EXAMPLE_PATH tag contains directories, you can use the
+# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
+# and *.h) to filter out the source-files in the directories. If left
+# blank all files are included.
+
+EXAMPLE_PATTERNS =
+
+# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
+# searched for input files to be used with the \include or \dontinclude
+# commands irrespective of the value of the RECURSIVE tag.
+# Possible values are YES and NO. If left blank NO is used.
+
+EXAMPLE_RECURSIVE = NO
+
+# The IMAGE_PATH tag can be used to specify one or more files or
+# directories that contain image that are included in the documentation (see
+# the \image command).
+
+IMAGE_PATH =
+
+# The INPUT_FILTER tag can be used to specify a program that doxygen should
+# invoke to filter for each input file. Doxygen will invoke the filter program
+# by executing (via popen()) the command <filter> <input-file>, where <filter>
+# is the value of the INPUT_FILTER tag, and <input-file> is the name of an
+# input file. Doxygen will then use the output that the filter program writes
+# to standard output.
+# If FILTER_PATTERNS is specified, this tag will be ignored.
+# Note that the filter must not add or remove lines; it is applied before the
+# code is scanned, but not when the output code is generated. If lines are added
+# or removed, the anchors will not be placed correctly.
+
+INPUT_FILTER =
+
+# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
+# basis.
+# Doxygen will compare the file name with each pattern and apply the
+# filter if there is a match.
+# The filters are a list of the form:
+# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further
+# info on how filters are used. If FILTER_PATTERNS is empty or if
+# non of the patterns match the file name, INPUT_FILTER is applied.
+
+FILTER_PATTERNS =
+
+# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
+# INPUT_FILTER) will be used to filter the input files when producing source
+# files to browse (i.e. when SOURCE_BROWSER is set to YES).
+
+FILTER_SOURCE_FILES = NO
+
+# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file
+# pattern. A pattern will override the setting for FILTER_PATTERN (if any)
+# and it is also possible to disable source filtering for a specific pattern
+# using *.ext= (so without naming a filter). This option only has effect when
+# FILTER_SOURCE_FILES is enabled.
+
+FILTER_SOURCE_PATTERNS =
+
+# If the USE_MD_FILE_AS_MAINPAGE tag refers to the name of a markdown file that
+# is part of the input, its contents will be placed on the main page
+# (index.html). This can be useful if you have a project on for instance GitHub
+# and want reuse the introduction page also for the doxygen output.
+
+USE_MDFILE_AS_MAINPAGE =
+
+#---------------------------------------------------------------------------
+# configuration options related to source browsing
+#---------------------------------------------------------------------------
+
+# If the SOURCE_BROWSER tag is set to YES then a list of source files will
+# be generated. Documented entities will be cross-referenced with these sources.
+# Note: To get rid of all source code in the generated output, make sure also
+# VERBATIM_HEADERS is set to NO.
+
+SOURCE_BROWSER = NO
+
+# Setting the INLINE_SOURCES tag to YES will include the body
+# of functions and classes directly in the documentation.
+
+INLINE_SOURCES = NO
+
+# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct
+# doxygen to hide any special comment blocks from generated source code
+# fragments. Normal C, C++ and Fortran comments will always remain visible.
+
+STRIP_CODE_COMMENTS = YES
+
+# If the REFERENCED_BY_RELATION tag is set to YES
+# then for each documented function all documented
+# functions referencing it will be listed.
+
+REFERENCED_BY_RELATION = NO
+
+# If the REFERENCES_RELATION tag is set to YES
+# then for each documented function all documented entities
+# called/used by that function will be listed.
+
+REFERENCES_RELATION = NO
+
+# If the REFERENCES_LINK_SOURCE tag is set to YES (the default)
+# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from
+# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will
+# link to the source code.
+# Otherwise they will link to the documentation.
+
+REFERENCES_LINK_SOURCE = YES
+
+# If the USE_HTAGS tag is set to YES then the references to source code
+# will point to the HTML generated by the htags(1) tool instead of doxygen
+# built-in source browser. The htags tool is part of GNU's global source
+# tagging system (see http://www.gnu.org/software/global/global.html). You
+# will need version 4.8.6 or higher.
+
+USE_HTAGS = NO
+
+# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen
+# will generate a verbatim copy of the header file for each class for
+# which an include is specified. Set to NO to disable this.
+
+VERBATIM_HEADERS = YES
+
+#---------------------------------------------------------------------------
+# configuration options related to the alphabetical class index
+#---------------------------------------------------------------------------
+
+# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index
+# of all compounds will be generated. Enable this if the project
+# contains a lot of classes, structs, unions or interfaces.
+
+ALPHABETICAL_INDEX = NO
+
+# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then
+# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns
+# in which this list will be split (can be a number in the range [1..20])
+
+COLS_IN_ALPHA_INDEX = 5
+
+# In case all classes in a project start with a common prefix, all
+# classes will be put under the same header in the alphabetical index.
+# The IGNORE_PREFIX tag can be used to specify one or more prefixes that
+# should be ignored while generating the index headers.
+
+IGNORE_PREFIX =
+
+#---------------------------------------------------------------------------
+# configuration options related to the HTML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_HTML tag is set to YES (the default) Doxygen will
+# generate HTML output.
+
+GENERATE_HTML = NO
+
+# The HTML_OUTPUT tag is used to specify where the HTML docs will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be
+# put in front of it. If left blank `html' will be used as the default path.
+
+HTML_OUTPUT = html
+
+# The HTML_FILE_EXTENSION tag can be used to specify the file extension for
+# each generated HTML page (for example: .htm,.php,.asp). If it is left blank
+# doxygen will generate files with .html extension.
+
+HTML_FILE_EXTENSION = .html
+
+# The HTML_HEADER tag can be used to specify a personal HTML header for
+# each generated HTML page. If it is left blank doxygen will generate a
+# standard header. Note that when using a custom header you are responsible
+# for the proper inclusion of any scripts and style sheets that doxygen
+# needs, which is dependent on the configuration options used.
+# It is advised to generate a default header using "doxygen -w html
+# header.html footer.html stylesheet.css YourConfigFile" and then modify
+# that header. Note that the header is subject to change so you typically
+# have to redo this when upgrading to a newer version of doxygen or when
+# changing the value of configuration settings such as GENERATE_TREEVIEW!
+
+HTML_HEADER =
+
+# The HTML_FOOTER tag can be used to specify a personal HTML footer for
+# each generated HTML page. If it is left blank doxygen will generate a
+# standard footer.
+
+HTML_FOOTER =
+
+# The HTML_STYLESHEET tag can be used to specify a user-defined cascading
+# style sheet that is used by each HTML page. It can be used to
+# fine-tune the look of the HTML output. If left blank doxygen will
+# generate a default style sheet. Note that it is recommended to use
+# HTML_EXTRA_STYLESHEET instead of this one, as it is more robust and this
+# tag will in the future become obsolete.
+
+HTML_STYLESHEET =
+
+# The HTML_EXTRA_STYLESHEET tag can be used to specify an additional
+# user-defined cascading style sheet that is included after the standard
+# style sheets created by doxygen. Using this option one can overrule
+# certain style aspects. This is preferred over using HTML_STYLESHEET
+# since it does not replace the standard style sheet and is therefor more
+# robust against future updates. Doxygen will copy the style sheet file to
+# the output directory.
+
+HTML_EXTRA_STYLESHEET =
+
+# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or
+# other source files which should be copied to the HTML output directory. Note
+# that these files will be copied to the base HTML output directory. Use the
+# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these
+# files. In the HTML_STYLESHEET file, use the file name only. Also note that
+# the files will be copied as-is; there are no commands or markers available.
+
+HTML_EXTRA_FILES =
+
+# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output.
+# Doxygen will adjust the colors in the style sheet and background images
+# according to this color. Hue is specified as an angle on a colorwheel,
+# see http://en.wikipedia.org/wiki/Hue for more information.
+# For instance the value 0 represents red, 60 is yellow, 120 is green,
+# 180 is cyan, 240 is blue, 300 purple, and 360 is red again.
+# The allowed range is 0 to 359.
+
+HTML_COLORSTYLE_HUE = 220
+
+# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of
+# the colors in the HTML output. For a value of 0 the output will use
+# grayscales only. A value of 255 will produce the most vivid colors.
+
+HTML_COLORSTYLE_SAT = 100
+
+# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to
+# the luminance component of the colors in the HTML output. Values below
+# 100 gradually make the output lighter, whereas values above 100 make
+# the output darker. The value divided by 100 is the actual gamma applied,
+# so 80 represents a gamma of 0.8, The value 220 represents a gamma of 2.2,
+# and 100 does not change the gamma.
+
+HTML_COLORSTYLE_GAMMA = 80
+
+# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML
+# page will contain the date and time when the page was generated. Setting
+# this to NO can help when comparing the output of multiple runs.
+
+HTML_TIMESTAMP = YES
+
+# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
+# documentation will contain sections that can be hidden and shown after the
+# page has loaded.
+
+HTML_DYNAMIC_SECTIONS = NO
+
+# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of
+# entries shown in the various tree structured indices initially; the user
+# can expand and collapse entries dynamically later on. Doxygen will expand
+# the tree to such a level that at most the specified number of entries are
+# visible (unless a fully collapsed tree already exceeds this amount).
+# So setting the number of entries 1 will produce a full collapsed tree by
+# default. 0 is a special value representing an infinite number of entries
+# and will result in a full expanded tree by default.
+
+HTML_INDEX_NUM_ENTRIES = 100
+
+# If the GENERATE_DOCSET tag is set to YES, additional index files
+# will be generated that can be used as input for Apple's Xcode 3
+# integrated development environment, introduced with OSX 10.5 (Leopard).
+# To create a documentation set, doxygen will generate a Makefile in the
+# HTML output directory. Running make will produce the docset in that
+# directory and running "make install" will install the docset in
+# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find
+# it at startup.
+# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html
+# for more information.
+
+GENERATE_DOCSET = NO
+
+# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the
+# feed. A documentation feed provides an umbrella under which multiple
+# documentation sets from a single provider (such as a company or product suite)
+# can be grouped.
+
+DOCSET_FEEDNAME = "Doxygen generated docs"
+
+# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that
+# should uniquely identify the documentation set bundle. This should be a
+# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen
+# will append .docset to the name.
+
+DOCSET_BUNDLE_ID = org.doxygen.Project
+
+# When GENERATE_PUBLISHER_ID tag specifies a string that should uniquely
+# identify the documentation publisher. This should be a reverse domain-name
+# style string, e.g. com.mycompany.MyDocSet.documentation.
+
+DOCSET_PUBLISHER_ID = org.doxygen.Publisher
+
+# The GENERATE_PUBLISHER_NAME tag identifies the documentation publisher.
+
+DOCSET_PUBLISHER_NAME = Publisher
+
+# If the GENERATE_HTMLHELP tag is set to YES, additional index files
+# will be generated that can be used as input for tools like the
+# Microsoft HTML help workshop to generate a compiled HTML help file (.chm)
+# of the generated HTML documentation.
+
+GENERATE_HTMLHELP = NO
+
+# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can
+# be used to specify the file name of the resulting .chm file. You
+# can add a path in front of the file if the result should not be
+# written to the html output directory.
+
+CHM_FILE =
+
+# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can
+# be used to specify the location (absolute path including file name) of
+# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run
+# the HTML help compiler on the generated index.hhp.
+
+HHC_LOCATION =
+
+# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag
+# controls if a separate .chi index file is generated (YES) or that
+# it should be included in the master .chm file (NO).
+
+GENERATE_CHI = NO
+
+# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING
+# is used to encode HtmlHelp index (hhk), content (hhc) and project file
+# content.
+
+CHM_INDEX_ENCODING =
+
+# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag
+# controls whether a binary table of contents is generated (YES) or a
+# normal table of contents (NO) in the .chm file.
+
+BINARY_TOC = NO
+
+# The TOC_EXPAND flag can be set to YES to add extra items for group members
+# to the contents of the HTML help documentation and to the tree view.
+
+TOC_EXPAND = NO
+
+# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and
+# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated
+# that can be used as input for Qt's qhelpgenerator to generate a
+# Qt Compressed Help (.qch) of the generated HTML documentation.
+
+GENERATE_QHP = NO
+
+# If the QHG_LOCATION tag is specified, the QCH_FILE tag can
+# be used to specify the file name of the resulting .qch file.
+# The path specified is relative to the HTML output folder.
+
+QCH_FILE =
+
+# The QHP_NAMESPACE tag specifies the namespace to use when generating
+# Qt Help Project output. For more information please see
+# http://doc.trolltech.com/qthelpproject.html#namespace
+
+QHP_NAMESPACE =
+
+# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating
+# Qt Help Project output. For more information please see
+# http://doc.trolltech.com/qthelpproject.html#virtual-folders
+
+QHP_VIRTUAL_FOLDER = doc
+
+# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to
+# add. For more information please see
+# http://doc.trolltech.com/qthelpproject.html#custom-filters
+
+QHP_CUST_FILTER_NAME =
+
+# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the
+# custom filter to add. For more information please see
+# <a href="http://doc.trolltech.com/qthelpproject.html#custom-filters">
+# Qt Help Project / Custom Filters</a>.
+
+QHP_CUST_FILTER_ATTRS =
+
+# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this
+# project's
+# filter section matches.
+# <a href="http://doc.trolltech.com/qthelpproject.html#filter-attributes">
+# Qt Help Project / Filter Attributes</a>.
+
+QHP_SECT_FILTER_ATTRS =
+
+# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can
+# be used to specify the location of Qt's qhelpgenerator.
+# If non-empty doxygen will try to run qhelpgenerator on the generated
+# .qhp file.
+
+QHG_LOCATION =
+
+# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files
+# will be generated, which together with the HTML files, form an Eclipse help
+# plugin. To install this plugin and make it available under the help contents
+# menu in Eclipse, the contents of the directory containing the HTML and XML
+# files needs to be copied into the plugins directory of eclipse. The name of
+# the directory within the plugins directory should be the same as
+# the ECLIPSE_DOC_ID value. After copying Eclipse needs to be restarted before
+# the help appears.
+
+GENERATE_ECLIPSEHELP = NO
+
+# A unique identifier for the eclipse help plugin. When installing the plugin
+# the directory name containing the HTML and XML files should also have
+# this name.
+
+ECLIPSE_DOC_ID = org.doxygen.Project
+
+# The DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs)
+# at top of each HTML page. The value NO (the default) enables the index and
+# the value YES disables it. Since the tabs have the same information as the
+# navigation tree you can set this option to NO if you already set
+# GENERATE_TREEVIEW to YES.
+
+DISABLE_INDEX = NO
+
+# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index
+# structure should be generated to display hierarchical information.
+# If the tag value is set to YES, a side panel will be generated
+# containing a tree-like index structure (just like the one that
+# is generated for HTML Help). For this to work a browser that supports
+# JavaScript, DHTML, CSS and frames is required (i.e. any modern browser).
+# Windows users are probably better off using the HTML help feature.
+# Since the tree basically has the same information as the tab index you
+# could consider to set DISABLE_INDEX to NO when enabling this option.
+
+GENERATE_TREEVIEW = NO
+
+# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values
+# (range [0,1..20]) that doxygen will group on one line in the generated HTML
+# documentation. Note that a value of 0 will completely suppress the enum
+# values from appearing in the overview section.
+
+ENUM_VALUES_PER_LINE = 4
+
+# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be
+# used to set the initial width (in pixels) of the frame in which the tree
+# is shown.
+
+TREEVIEW_WIDTH = 250
+
+# When the EXT_LINKS_IN_WINDOW option is set to YES doxygen will open
+# links to external symbols imported via tag files in a separate window.
+
+EXT_LINKS_IN_WINDOW = NO
+
+# Use this tag to change the font size of Latex formulas included
+# as images in the HTML documentation. The default is 10. Note that
+# when you change the font size after a successful doxygen run you need
+# to manually remove any form_*.png images from the HTML output directory
+# to force them to be regenerated.
+
+FORMULA_FONTSIZE = 10
+
+# Use the FORMULA_TRANPARENT tag to determine whether or not the images
+# generated for formulas are transparent PNGs. Transparent PNGs are
+# not supported properly for IE 6.0, but are supported on all modern browsers.
+# Note that when changing this option you need to delete any form_*.png files
+# in the HTML output before the changes have effect.
+
+FORMULA_TRANSPARENT = YES
+
+# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax
+# (see http://www.mathjax.org) which uses client side Javascript for the
+# rendering instead of using prerendered bitmaps. Use this if you do not
+# have LaTeX installed or if you want to formulas look prettier in the HTML
+# output. When enabled you may also need to install MathJax separately and
+# configure the path to it using the MATHJAX_RELPATH option.
+
+USE_MATHJAX = NO
+
+# When MathJax is enabled you can set the default output format to be used for
+# the MathJax output. Supported types are HTML-CSS, NativeMML (i.e. MathML) and
+# SVG. The default value is HTML-CSS, which is slower, but has the best
+# compatibility.
+
+MATHJAX_FORMAT = HTML-CSS
+
+# When MathJax is enabled you need to specify the location relative to the
+# HTML output directory using the MATHJAX_RELPATH option. The destination
+# directory should contain the MathJax.js script. For instance, if the mathjax
+# directory is located at the same level as the HTML output directory, then
+# MATHJAX_RELPATH should be ../mathjax. The default value points to
+# the MathJax Content Delivery Network so you can quickly see the result without
+# installing MathJax.
+# However, it is strongly recommended to install a local
+# copy of MathJax from http://www.mathjax.org before deployment.
+
+MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest
+
+# The MATHJAX_EXTENSIONS tag can be used to specify one or MathJax extension
+# names that should be enabled during MathJax rendering.
+
+MATHJAX_EXTENSIONS =
+
+# The MATHJAX_CODEFILE tag can be used to specify a file with javascript
+# pieces of code that will be used on startup of the MathJax code.
+
+MATHJAX_CODEFILE =
+
+# When the SEARCHENGINE tag is enabled doxygen will generate a search box
+# for the HTML output. The underlying search engine uses javascript
+# and DHTML and should work on any modern browser. Note that when using
+# HTML help (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets
+# (GENERATE_DOCSET) there is already a search function so this one should
+# typically be disabled. For large projects the javascript based search engine
+# can be slow, then enabling SERVER_BASED_SEARCH may provide a better solution.
+
+SEARCHENGINE = YES
+
+# When the SERVER_BASED_SEARCH tag is enabled the search engine will be
+# implemented using a web server instead of a web client using Javascript.
+# There are two flavours of web server based search depending on the
+# EXTERNAL_SEARCH setting. When disabled, doxygen will generate a PHP script for
+# searching and an index file used by the script. When EXTERNAL_SEARCH is
+# enabled the indexing and searching needs to be provided by external tools.
+# See the manual for details.
+
+SERVER_BASED_SEARCH = NO
+
+# When EXTERNAL_SEARCH is enabled doxygen will no longer generate the PHP
+# script for searching. Instead the search results are written to an XML file
+# which needs to be processed by an external indexer. Doxygen will invoke an
+# external search engine pointed to by the SEARCHENGINE_URL option to obtain
+# the search results. Doxygen ships with an example indexer (doxyindexer) and
+# search engine (doxysearch.cgi) which are based on the open source search
+# engine library Xapian. See the manual for configuration details.
+
+EXTERNAL_SEARCH = NO
+
+# The SEARCHENGINE_URL should point to a search engine hosted by a web server
+# which will returned the search results when EXTERNAL_SEARCH is enabled.
+# Doxygen ships with an example search engine (doxysearch) which is based on
+# the open source search engine library Xapian. See the manual for configuration
+# details.
+
+SEARCHENGINE_URL =
+
+# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the unindexed
+# search data is written to a file for indexing by an external tool. With the
+# SEARCHDATA_FILE tag the name of this file can be specified.
+
+SEARCHDATA_FILE = searchdata.xml
+
+# When SERVER_BASED_SEARCH AND EXTERNAL_SEARCH are both enabled the
+# EXTERNAL_SEARCH_ID tag can be used as an identifier for the project. This is
+# useful in combination with EXTRA_SEARCH_MAPPINGS to search through multiple
+# projects and redirect the results back to the right project.
+
+EXTERNAL_SEARCH_ID =
+
+# The EXTRA_SEARCH_MAPPINGS tag can be used to enable searching through doxygen
+# projects other than the one defined by this configuration file, but that are
+# all added to the same external search index. Each project needs to have a
+# unique id set via EXTERNAL_SEARCH_ID. The search mapping then maps the id
+# of to a relative location where the documentation can be found.
+# The format is: EXTRA_SEARCH_MAPPINGS = id1=loc1 id2=loc2 ...
+
+EXTRA_SEARCH_MAPPINGS =
+
+#---------------------------------------------------------------------------
+# configuration options related to the LaTeX output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will
+# generate Latex output.
+
+GENERATE_LATEX = NO
+
+# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be
+# put in front of it. If left blank `latex' will be used as the default path.
+
+LATEX_OUTPUT = latex
+
+# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be
+# invoked. If left blank `latex' will be used as the default command name.
+# Note that when enabling USE_PDFLATEX this option is only used for
+# generating bitmaps for formulas in the HTML output, but not in the
+# Makefile that is written to the output directory.
+
+LATEX_CMD_NAME = latex
+
+# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to
+# generate index for LaTeX. If left blank `makeindex' will be used as the
+# default command name.
+
+MAKEINDEX_CMD_NAME = makeindex
+
+# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact
+# LaTeX documents. This may be useful for small projects and may help to
+# save some trees in general.
+
+COMPACT_LATEX = NO
+
+# The PAPER_TYPE tag can be used to set the paper type that is used
+# by the printer. Possible values are: a4, letter, legal and
+# executive. If left blank a4 will be used.
+
+PAPER_TYPE = a4wide
+
+# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX
+# packages that should be included in the LaTeX output.
+
+EXTRA_PACKAGES =
+
+# The LATEX_HEADER tag can be used to specify a personal LaTeX header for
+# the generated latex document. The header should contain everything until
+# the first chapter. If it is left blank doxygen will generate a
+# standard header. Notice: only use this tag if you know what you are doing!
+
+LATEX_HEADER =
+
+# The LATEX_FOOTER tag can be used to specify a personal LaTeX footer for
+# the generated latex document. The footer should contain everything after
+# the last chapter. If it is left blank doxygen will generate a
+# standard footer. Notice: only use this tag if you know what you are doing!
+
+LATEX_FOOTER =
+
+# The LATEX_EXTRA_FILES tag can be used to specify one or more extra images
+# or other source files which should be copied to the LaTeX output directory.
+# Note that the files will be copied as-is; there are no commands or markers
+# available.
+
+LATEX_EXTRA_FILES =
+
+# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated
+# is prepared for conversion to pdf (using ps2pdf). The pdf file will
+# contain links (just like the HTML output) instead of page references
+# This makes the output suitable for online browsing using a pdf viewer.
+
+PDF_HYPERLINKS = YES
+
+# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of
+# plain latex in the generated Makefile. Set this option to YES to get a
+# higher quality PDF documentation.
+
+USE_PDFLATEX = YES
+
+# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode.
+# command to the generated LaTeX files. This will instruct LaTeX to keep
+# running if errors occur, instead of asking the user for help.
+# This option is also used when generating formulas in HTML.
+
+LATEX_BATCHMODE = NO
+
+# If LATEX_HIDE_INDICES is set to YES then doxygen will not
+# include the index chapters (such as File Index, Compound Index, etc.)
+# in the output.
+
+LATEX_HIDE_INDICES = NO
+
+# If LATEX_SOURCE_CODE is set to YES then doxygen will include
+# source code with syntax highlighting in the LaTeX output.
+# Note that which sources are shown also depends on other settings
+# such as SOURCE_BROWSER.
+
+LATEX_SOURCE_CODE = NO
+
+# The LATEX_BIB_STYLE tag can be used to specify the style to use for the
+# bibliography, e.g. plainnat, or ieeetr. The default style is "plain". See
+# http://en.wikipedia.org/wiki/BibTeX for more info.
+
+LATEX_BIB_STYLE = plain
+
+#---------------------------------------------------------------------------
+# configuration options related to the RTF output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output
+# The RTF output is optimized for Word 97 and may not look very pretty with
+# other RTF readers or editors.
+
+GENERATE_RTF = NO
+
+# The RTF_OUTPUT tag is used to specify where the RTF docs will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be
+# put in front of it. If left blank `rtf' will be used as the default path.
+
+RTF_OUTPUT = rtf
+
+# If the COMPACT_RTF tag is set to YES Doxygen generates more compact
+# RTF documents. This may be useful for small projects and may help to
+# save some trees in general.
+
+COMPACT_RTF = NO
+
+# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated
+# will contain hyperlink fields. The RTF file will
+# contain links (just like the HTML output) instead of page references.
+# This makes the output suitable for online browsing using WORD or other
+# programs which support those fields.
+# Note: wordpad (write) and others do not support links.
+
+RTF_HYPERLINKS = NO
+
+# Load style sheet definitions from file. Syntax is similar to doxygen's
+# config file, i.e. a series of assignments. You only have to provide
+# replacements, missing definitions are set to their default value.
+
+RTF_STYLESHEET_FILE =
+
+# Set optional variables used in the generation of an rtf document.
+# Syntax is similar to doxygen's config file.
+
+RTF_EXTENSIONS_FILE =
+
+#---------------------------------------------------------------------------
+# configuration options related to the man page output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_MAN tag is set to YES (the default) Doxygen will
+# generate man pages
+
+GENERATE_MAN = NO
+
+# The MAN_OUTPUT tag is used to specify where the man pages will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be
+# put in front of it. If left blank `man' will be used as the default path.
+
+MAN_OUTPUT = man
+
+# The MAN_EXTENSION tag determines the extension that is added to
+# the generated man pages (default is the subroutine's section .3)
+
+MAN_EXTENSION = .3
+
+# If the MAN_LINKS tag is set to YES and Doxygen generates man output,
+# then it will generate one additional man file for each entity
+# documented in the real man page(s). These additional files
+# only source the real man page, but without them the man command
+# would be unable to find the correct page. The default is NO.
+
+MAN_LINKS = NO
+
+#---------------------------------------------------------------------------
+# configuration options related to the XML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_XML tag is set to YES Doxygen will
+# generate an XML file that captures the structure of
+# the code including all documentation.
+
+GENERATE_XML = YES
+
+# The XML_OUTPUT tag is used to specify where the XML pages will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be
+# put in front of it. If left blank `xml' will be used as the default path.
+
+XML_OUTPUT = xml
+
+# The XML_SCHEMA tag can be used to specify an XML schema,
+# which can be used by a validating XML parser to check the
+# syntax of the XML files.
+
+XML_SCHEMA =
+
+# The XML_DTD tag can be used to specify an XML DTD,
+# which can be used by a validating XML parser to check the
+# syntax of the XML files.
+
+XML_DTD =
+
+# If the XML_PROGRAMLISTING tag is set to YES Doxygen will
+# dump the program listings (including syntax highlighting
+# and cross-referencing information) to the XML output. Note that
+# enabling this will significantly increase the size of the XML output.
+
+XML_PROGRAMLISTING = YES
+
+#---------------------------------------------------------------------------
+# configuration options related to the DOCBOOK output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_DOCBOOK tag is set to YES Doxygen will generate DOCBOOK files
+# that can be used to generate PDF.
+
+GENERATE_DOCBOOK = NO
+
+# The DOCBOOK_OUTPUT tag is used to specify where the DOCBOOK pages will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be put in
+# front of it. If left blank docbook will be used as the default path.
+
+DOCBOOK_OUTPUT = docbook
+
+#---------------------------------------------------------------------------
+# configuration options for the AutoGen Definitions output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will
+# generate an AutoGen Definitions (see autogen.sf.net) file
+# that captures the structure of the code including all
+# documentation. Note that this feature is still experimental
+# and incomplete at the moment.
+
+GENERATE_AUTOGEN_DEF = NO
+
+#---------------------------------------------------------------------------
+# configuration options related to the Perl module output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_PERLMOD tag is set to YES Doxygen will
+# generate a Perl module file that captures the structure of
+# the code including all documentation. Note that this
+# feature is still experimental and incomplete at the
+# moment.
+
+GENERATE_PERLMOD = NO
+
+# If the PERLMOD_LATEX tag is set to YES Doxygen will generate
+# the necessary Makefile rules, Perl scripts and LaTeX code to be able
+# to generate PDF and DVI output from the Perl module output.
+
+PERLMOD_LATEX = NO
+
+# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be
+# nicely formatted so it can be parsed by a human reader.
+# This is useful
+# if you want to understand what is going on.
+# On the other hand, if this
+# tag is set to NO the size of the Perl module output will be much smaller
+# and Perl will parse it just the same.
+
+PERLMOD_PRETTY = YES
+
+# The names of the make variables in the generated doxyrules.make file
+# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX.
+# This is useful so different doxyrules.make files included by the same
+# Makefile don't overwrite each other's variables.
+
+PERLMOD_MAKEVAR_PREFIX =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the preprocessor
+#---------------------------------------------------------------------------
+
+# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will
+# evaluate all C-preprocessor directives found in the sources and include
+# files.
+
+ENABLE_PREPROCESSING = YES
+
+# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro
+# names in the source code. If set to NO (the default) only conditional
+# compilation will be performed. Macro expansion can be done in a controlled
+# way by setting EXPAND_ONLY_PREDEF to YES.
+
+MACRO_EXPANSION = YES
+
+# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES
+# then the macro expansion is limited to the macros specified with the
+# PREDEFINED and EXPAND_AS_DEFINED tags.
+
+EXPAND_ONLY_PREDEF = NO
+
+# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files
+# pointed to by INCLUDE_PATH will be searched when a #include is found.
+
+SEARCH_INCLUDES = YES
+
+# The INCLUDE_PATH tag can be used to specify one or more directories that
+# contain include files that are not input files but should be processed by
+# the preprocessor.
+
+INCLUDE_PATH =
+
+# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
+# patterns (like *.h and *.hpp) to filter out the header-files in the
+# directories. If left blank, the patterns specified with FILE_PATTERNS will
+# be used.
+
+INCLUDE_FILE_PATTERNS =
+
+# The PREDEFINED tag can be used to specify one or more macro names that
+# are defined before the preprocessor is started (similar to the -D option of
+# gcc). The argument of the tag is a list of macros of the form: name
+# or name=definition (no spaces). If the definition and the = are
+# omitted =1 is assumed. To prevent a macro definition from being
+# undefined via #undef or recursively expanded use the := operator
+# instead of the = operator.
+
+PREDEFINED =
+
+# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then
+# this tag can be used to specify a list of macro names that should be expanded.
+# The macro definition that is found in the sources will be used.
+# Use the PREDEFINED tag if you want to use a different macro definition that
+# overrules the definition found in the source code.
+
+EXPAND_AS_DEFINED =
+
+# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then
+# doxygen's preprocessor will remove all references to function-like macros
+# that are alone on a line, have an all uppercase name, and do not end with a
+# semicolon, because these will confuse the parser if not removed.
+
+SKIP_FUNCTION_MACROS = YES
+
+#---------------------------------------------------------------------------
+# Configuration::additions related to external references
+#---------------------------------------------------------------------------
+
+# The TAGFILES option can be used to specify one or more tagfiles. For each
+# tag file the location of the external documentation should be added. The
+# format of a tag file without this location is as follows:
+#
+# TAGFILES = file1 file2 ...
+# Adding location for the tag files is done as follows:
+#
+# TAGFILES = file1=loc1 "file2 = loc2" ...
+# where "loc1" and "loc2" can be relative or absolute paths
+# or URLs. Note that each tag file must have a unique name (where the name does
+# NOT include the path). If a tag file is not located in the directory in which
+# doxygen is run, you must also specify the path to the tagfile here.
+
+TAGFILES =
+
+# When a file name is specified after GENERATE_TAGFILE, doxygen will create
+# a tag file that is based on the input files it reads.
+
+GENERATE_TAGFILE =
+
+# If the ALLEXTERNALS tag is set to YES all external classes will be listed
+# in the class index. If set to NO only the inherited external classes
+# will be listed.
+
+ALLEXTERNALS = NO
+
+# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed
+# in the modules index. If set to NO, only the current project's groups will
+# be listed.
+
+EXTERNAL_GROUPS = YES
+
+# If the EXTERNAL_PAGES tag is set to YES all external pages will be listed
+# in the related pages index. If set to NO, only the current project's
+# pages will be listed.
+
+EXTERNAL_PAGES = YES
+
+# The PERL_PATH should be the absolute path and name of the perl script
+# interpreter (i.e. the result of `which perl').
+
+PERL_PATH = /usr/bin/perl
+
+#---------------------------------------------------------------------------
+# Configuration options related to the dot tool
+#---------------------------------------------------------------------------
+
+# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will
+# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base
+# or super classes. Setting the tag to NO turns the diagrams off. Note that
+# this option also works with HAVE_DOT disabled, but it is recommended to
+# install and use dot, since it yields more powerful graphs.
+
+CLASS_DIAGRAMS = YES
+
+# You can define message sequence charts within doxygen comments using the \msc
+# command. Doxygen will then run the mscgen tool (see
+# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the
+# documentation. The MSCGEN_PATH tag allows you to specify the directory where
+# the mscgen tool resides. If left empty the tool is assumed to be found in the
+# default search path.
+
+MSCGEN_PATH =
+
+# If set to YES, the inheritance and collaboration graphs will hide
+# inheritance and usage relations if the target is undocumented
+# or is not a class.
+
+HIDE_UNDOC_RELATIONS = YES
+
+# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
+# available from the path. This tool is part of Graphviz, a graph visualization
+# toolkit from AT&T and Lucent Bell Labs. The other options in this section
+# have no effect if this option is set to NO (the default)
+
+HAVE_DOT = NO
+
+# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is
+# allowed to run in parallel. When set to 0 (the default) doxygen will
+# base this on the number of processors available in the system. You can set it
+# explicitly to a value larger than 0 to get control over the balance
+# between CPU load and processing speed.
+
+DOT_NUM_THREADS = 0
+
+# By default doxygen will use the Helvetica font for all dot files that
+# doxygen generates. When you want a differently looking font you can specify
+# the font name using DOT_FONTNAME. You need to make sure dot is able to find
+# the font, which can be done by putting it in a standard location or by setting
+# the DOTFONTPATH environment variable or by setting DOT_FONTPATH to the
+# directory containing the font.
+
+DOT_FONTNAME = Helvetica
+
+# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs.
+# The default size is 10pt.
+
+DOT_FONTSIZE = 10
+
+# By default doxygen will tell dot to use the Helvetica font.
+# If you specify a different font using DOT_FONTNAME you can use DOT_FONTPATH to
+# set the path where dot can find it.
+
+DOT_FONTPATH =
+
+# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen
+# will generate a graph for each documented class showing the direct and
+# indirect inheritance relations. Setting this tag to YES will force the
+# CLASS_DIAGRAMS tag to NO.
+
+CLASS_GRAPH = YES
+
+# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen
+# will generate a graph for each documented class showing the direct and
+# indirect implementation dependencies (inheritance, containment, and
+# class references variables) of the class with other documented classes.
+
+COLLABORATION_GRAPH = YES
+
+# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen
+# will generate a graph for groups, showing the direct groups dependencies
+
+GROUP_GRAPHS = YES
+
+# If the UML_LOOK tag is set to YES doxygen will generate inheritance and
+# collaboration diagrams in a style similar to the OMG's Unified Modeling
+# Language.
+
+UML_LOOK = NO
+
+# If the UML_LOOK tag is enabled, the fields and methods are shown inside
+# the class node. If there are many fields or methods and many nodes the
+# graph may become too big to be useful. The UML_LIMIT_NUM_FIELDS
+# threshold limits the number of items for each type to make the size more
+# manageable. Set this to 0 for no limit. Note that the threshold may be
+# exceeded by 50% before the limit is enforced.
+
+UML_LIMIT_NUM_FIELDS = 10
+
+# If set to YES, the inheritance and collaboration graphs will show the
+# relations between templates and their instances.
+
+TEMPLATE_RELATIONS = NO
+
+# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT
+# tags are set to YES then doxygen will generate a graph for each documented
+# file showing the direct and indirect include dependencies of the file with
+# other documented files.
+
+INCLUDE_GRAPH = YES
+
+# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and
+# HAVE_DOT tags are set to YES then doxygen will generate a graph for each
+# documented header file showing the documented files that directly or
+# indirectly include this file.
+
+INCLUDED_BY_GRAPH = YES
+
+# If the CALL_GRAPH and HAVE_DOT options are set to YES then
+# doxygen will generate a call dependency graph for every global function
+# or class method. Note that enabling this option will significantly increase
+# the time of a run. So in most cases it will be better to enable call graphs
+# for selected functions only using the \callgraph command.
+
+CALL_GRAPH = NO
+
+# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then
+# doxygen will generate a caller dependency graph for every global function
+# or class method. Note that enabling this option will significantly increase
+# the time of a run. So in most cases it will be better to enable caller
+# graphs for selected functions only using the \callergraph command.
+
+CALLER_GRAPH = NO
+
+# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen
+# will generate a graphical hierarchy of all classes instead of a textual one.
+
+GRAPHICAL_HIERARCHY = YES
+
+# If the DIRECTORY_GRAPH and HAVE_DOT tags are set to YES
+# then doxygen will show the dependencies a directory has on other directories
+# in a graphical way. The dependency relations are determined by the #include
+# relations between the files in the directories.
+
+DIRECTORY_GRAPH = YES
+
+# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
+# generated by dot. Possible values are svg, png, jpg, or gif.
+# If left blank png will be used. If you choose svg you need to set
+# HTML_FILE_EXTENSION to xhtml in order to make the SVG files
+# visible in IE 9+ (other browsers do not have this requirement).
+
+DOT_IMAGE_FORMAT = png
+
+# If DOT_IMAGE_FORMAT is set to svg, then this option can be set to YES to
+# enable generation of interactive SVG images that allow zooming and panning.
+# Note that this requires a modern browser other than Internet Explorer.
+# Tested and working are Firefox, Chrome, Safari, and Opera. For IE 9+ you
+# need to set HTML_FILE_EXTENSION to xhtml in order to make the SVG files
+# visible. Older versions of IE do not have SVG support.
+
+INTERACTIVE_SVG = NO
+
+# The tag DOT_PATH can be used to specify the path where the dot tool can be
+# found. If left blank, it is assumed the dot tool can be found in the path.
+
+DOT_PATH =
+
+# The DOTFILE_DIRS tag can be used to specify one or more directories that
+# contain dot files that are included in the documentation (see the
+# \dotfile command).
+
+DOTFILE_DIRS =
+
+# The MSCFILE_DIRS tag can be used to specify one or more directories that
+# contain msc files that are included in the documentation (see the
+# \mscfile command).
+
+MSCFILE_DIRS =
+
+# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of
+# nodes that will be shown in the graph. If the number of nodes in a graph
+# becomes larger than this value, doxygen will truncate the graph, which is
+# visualized by representing a node as a red box. Note that doxygen if the
+# number of direct children of the root node in a graph is already larger than
+# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note
+# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH.
+
+DOT_GRAPH_MAX_NODES = 50
+
+# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the
+# graphs generated by dot. A depth value of 3 means that only nodes reachable
+# from the root by following a path via at most 3 edges will be shown. Nodes
+# that lay further from the root node will be omitted. Note that setting this
+# option to 1 or 2 may greatly reduce the computation time needed for large
+# code bases. Also note that the size of a graph can be further restricted by
+# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction.
+
+MAX_DOT_GRAPH_DEPTH = 0
+
+# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent
+# background. This is disabled by default, because dot on Windows does not
+# seem to support this out of the box. Warning: Depending on the platform used,
+# enabling this option may lead to badly anti-aliased labels on the edges of
+# a graph (i.e. they become hard to read).
+
+DOT_TRANSPARENT = NO
+
+# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output
+# files in one run (i.e. multiple -o and -T options on the command line). This
+# makes dot run faster, but since only newer versions of dot (>1.8.10)
+# support this, this feature is disabled by default.
+
+DOT_MULTI_TARGETS = YES
+
+# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will
+# generate a legend page explaining the meaning of the various boxes and
+# arrows in the dot generated graphs.
+
+GENERATE_LEGEND = YES
+
+# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will
+# remove the intermediate dot files that are used to generate
+# the various graphs.
+
+DOT_CLEANUP = YES
diff --git a/tools/gr-usrptest/docs/doxygen/doxyxml/__init__.py b/tools/gr-usrptest/docs/doxygen/doxyxml/__init__.py
new file mode 100644
index 000000000..5cd0b3c6c
--- /dev/null
+++ b/tools/gr-usrptest/docs/doxygen/doxyxml/__init__.py
@@ -0,0 +1,82 @@
+#
+# Copyright 2010 Free Software Foundation, Inc.
+#
+# This file is part of GNU Radio
+#
+# GNU Radio is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# GNU Radio is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Radio; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+#
+"""
+Python interface to contents of doxygen xml documentation.
+
+Example use:
+See the contents of the example folder for the C++ and
+doxygen-generated xml used in this example.
+
+>>> # Parse the doxygen docs.
+>>> import os
+>>> this_dir = os.path.dirname(globals()['__file__'])
+>>> xml_path = this_dir + "/example/xml/"
+>>> di = DoxyIndex(xml_path)
+
+Get a list of all top-level objects.
+
+>>> print([mem.name() for mem in di.members()])
+[u'Aadvark', u'aadvarky_enough', u'main']
+
+Get all functions.
+
+>>> print([mem.name() for mem in di.in_category(DoxyFunction)])
+[u'aadvarky_enough', u'main']
+
+Check if an object is present.
+
+>>> di.has_member(u'Aadvark')
+True
+>>> di.has_member(u'Fish')
+False
+
+Get an item by name and check its properties.
+
+>>> aad = di.get_member(u'Aadvark')
+>>> print(aad.brief_description)
+Models the mammal Aadvark.
+>>> print(aad.detailed_description)
+Sadly the model is incomplete and cannot capture all aspects of an aadvark yet.
+<BLANKLINE>
+This line is uninformative and is only to test line breaks in the comments.
+>>> [mem.name() for mem in aad.members()]
+[u'aadvarkness', u'print', u'Aadvark', u'get_aadvarkness']
+>>> aad.get_member(u'print').brief_description
+u'Outputs the vital aadvark statistics.'
+
+"""
+
+from doxyindex import DoxyIndex, DoxyFunction, DoxyParam, DoxyClass, DoxyFile, DoxyNamespace, DoxyGroup, DoxyFriend, DoxyOther
+
+def _test():
+ import os
+ this_dir = os.path.dirname(globals()['__file__'])
+ xml_path = this_dir + "/example/xml/"
+ di = DoxyIndex(xml_path)
+ # Get the Aadvark class
+ aad = di.get_member('Aadvark')
+ aad.brief_description
+ import doctest
+ return doctest.testmod()
+
+if __name__ == "__main__":
+ _test()
+
diff --git a/tools/gr-usrptest/docs/doxygen/doxyxml/base.py b/tools/gr-usrptest/docs/doxygen/doxyxml/base.py
new file mode 100644
index 000000000..e8f026ab9
--- /dev/null
+++ b/tools/gr-usrptest/docs/doxygen/doxyxml/base.py
@@ -0,0 +1,219 @@
+#
+# Copyright 2010 Free Software Foundation, Inc.
+#
+# This file is part of GNU Radio
+#
+# GNU Radio is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# GNU Radio is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Radio; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+#
+"""
+A base class is created.
+
+Classes based upon this are used to make more user-friendly interfaces
+to the doxygen xml docs than the generated classes provide.
+"""
+
+import os
+import pdb
+
+from xml.parsers.expat import ExpatError
+
+from generated import compound
+
+
+class Base(object):
+
+ class Duplicate(StandardError):
+ pass
+
+ class NoSuchMember(StandardError):
+ pass
+
+ class ParsingError(StandardError):
+ pass
+
+ def __init__(self, parse_data, top=None):
+ self._parsed = False
+ self._error = False
+ self._parse_data = parse_data
+ self._members = []
+ self._dict_members = {}
+ self._in_category = {}
+ self._data = {}
+ if top is not None:
+ self._xml_path = top._xml_path
+ # Set up holder of references
+ else:
+ top = self
+ self._refs = {}
+ self._xml_path = parse_data
+ self.top = top
+
+ @classmethod
+ def from_refid(cls, refid, top=None):
+ """ Instantiate class from a refid rather than parsing object. """
+ # First check to see if its already been instantiated.
+ if top is not None and refid in top._refs:
+ return top._refs[refid]
+ # Otherwise create a new instance and set refid.
+ inst = cls(None, top=top)
+ inst.refid = refid
+ inst.add_ref(inst)
+ return inst
+
+ @classmethod
+ def from_parse_data(cls, parse_data, top=None):
+ refid = getattr(parse_data, 'refid', None)
+ if refid is not None and top is not None and refid in top._refs:
+ return top._refs[refid]
+ inst = cls(parse_data, top=top)
+ if refid is not None:
+ inst.refid = refid
+ inst.add_ref(inst)
+ return inst
+
+ def add_ref(self, obj):
+ if hasattr(obj, 'refid'):
+ self.top._refs[obj.refid] = obj
+
+ mem_classes = []
+
+ def get_cls(self, mem):
+ for cls in self.mem_classes:
+ if cls.can_parse(mem):
+ return cls
+ raise StandardError(("Did not find a class for object '%s'." \
+ % (mem.get_name())))
+
+ def convert_mem(self, mem):
+ try:
+ cls = self.get_cls(mem)
+ converted = cls.from_parse_data(mem, self.top)
+ if converted is None:
+ raise StandardError('No class matched this object.')
+ self.add_ref(converted)
+ return converted
+ except StandardError, e:
+ print e
+
+ @classmethod
+ def includes(cls, inst):
+ return isinstance(inst, cls)
+
+ @classmethod
+ def can_parse(cls, obj):
+ return False
+
+ def _parse(self):
+ self._parsed = True
+
+ def _get_dict_members(self, cat=None):
+ """
+ For given category a dictionary is returned mapping member names to
+ members of that category. For names that are duplicated the name is
+ mapped to None.
+ """
+ self.confirm_no_error()
+ if cat not in self._dict_members:
+ new_dict = {}
+ for mem in self.in_category(cat):
+ if mem.name() not in new_dict:
+ new_dict[mem.name()] = mem
+ else:
+ new_dict[mem.name()] = self.Duplicate
+ self._dict_members[cat] = new_dict
+ return self._dict_members[cat]
+
+ def in_category(self, cat):
+ self.confirm_no_error()
+ if cat is None:
+ return self._members
+ if cat not in self._in_category:
+ self._in_category[cat] = [mem for mem in self._members
+ if cat.includes(mem)]
+ return self._in_category[cat]
+
+ def get_member(self, name, cat=None):
+ self.confirm_no_error()
+ # Check if it's in a namespace or class.
+ bits = name.split('::')
+ first = bits[0]
+ rest = '::'.join(bits[1:])
+ member = self._get_dict_members(cat).get(first, self.NoSuchMember)
+ # Raise any errors that are returned.
+ if member in set([self.NoSuchMember, self.Duplicate]):
+ raise member()
+ if rest:
+ return member.get_member(rest, cat=cat)
+ return member
+
+ def has_member(self, name, cat=None):
+ try:
+ mem = self.get_member(name, cat=cat)
+ return True
+ except self.NoSuchMember:
+ return False
+
+ def data(self):
+ self.confirm_no_error()
+ return self._data
+
+ def members(self):
+ self.confirm_no_error()
+ return self._members
+
+ def process_memberdefs(self):
+ mdtss = []
+ for sec in self._retrieved_data.compounddef.sectiondef:
+ mdtss += sec.memberdef
+ # At the moment we lose all information associated with sections.
+ # Sometimes a memberdef is in several sectiondef.
+ # We make sure we don't get duplicates here.
+ uniques = set([])
+ for mem in mdtss:
+ converted = self.convert_mem(mem)
+ pair = (mem.name, mem.__class__)
+ if pair not in uniques:
+ uniques.add(pair)
+ self._members.append(converted)
+
+ def retrieve_data(self):
+ filename = os.path.join(self._xml_path, self.refid + '.xml')
+ try:
+ self._retrieved_data = compound.parse(filename)
+ except ExpatError:
+ print('Error in xml in file %s' % filename)
+ self._error = True
+ self._retrieved_data = None
+
+ def check_parsed(self):
+ if not self._parsed:
+ self._parse()
+
+ def confirm_no_error(self):
+ self.check_parsed()
+ if self._error:
+ raise self.ParsingError()
+
+ def error(self):
+ self.check_parsed()
+ return self._error
+
+ def name(self):
+ # first see if we can do it without processing.
+ if self._parse_data is not None:
+ return self._parse_data.name
+ self.check_parsed()
+ return self._retrieved_data.compounddef.name
diff --git a/tools/gr-usrptest/docs/doxygen/doxyxml/doxyindex.py b/tools/gr-usrptest/docs/doxygen/doxyxml/doxyindex.py
new file mode 100644
index 000000000..78e815376
--- /dev/null
+++ b/tools/gr-usrptest/docs/doxygen/doxyxml/doxyindex.py
@@ -0,0 +1,301 @@
+#
+# Copyright 2010 Free Software Foundation, Inc.
+#
+# This file is part of GNU Radio
+#
+# GNU Radio is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# GNU Radio is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Radio; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+#
+"""
+Classes providing more user-friendly interfaces to the doxygen xml
+docs than the generated classes provide.
+"""
+
+import os
+
+from generated import index
+from base import Base
+from text import description
+
+class DoxyIndex(Base):
+ """
+ Parses a doxygen xml directory.
+ """
+
+ __module__ = "gnuradio.utils.doxyxml"
+
+ def _parse(self):
+ if self._parsed:
+ return
+ super(DoxyIndex, self)._parse()
+ self._root = index.parse(os.path.join(self._xml_path, 'index.xml'))
+ for mem in self._root.compound:
+ converted = self.convert_mem(mem)
+ # For files and namespaces we want the contents to be
+ # accessible directly from the parent rather than having
+ # to go through the file object.
+ if self.get_cls(mem) == DoxyFile:
+ if mem.name.endswith('.h'):
+ self._members += converted.members()
+ self._members.append(converted)
+ elif self.get_cls(mem) == DoxyNamespace:
+ self._members += converted.members()
+ self._members.append(converted)
+ else:
+ self._members.append(converted)
+
+
+def generate_swig_doc_i(self):
+ """
+ %feature("docstring") gr_make_align_on_samplenumbers_ss::align_state "
+ Wraps the C++: gr_align_on_samplenumbers_ss::align_state";
+ """
+ pass
+
+
+class DoxyCompMem(Base):
+
+
+ kind = None
+
+ def __init__(self, *args, **kwargs):
+ super(DoxyCompMem, self).__init__(*args, **kwargs)
+
+ @classmethod
+ def can_parse(cls, obj):
+ return obj.kind == cls.kind
+
+ def set_descriptions(self, parse_data):
+ bd = description(getattr(parse_data, 'briefdescription', None))
+ dd = description(getattr(parse_data, 'detaileddescription', None))
+ self._data['brief_description'] = bd
+ self._data['detailed_description'] = dd
+
+ def set_parameters(self, data):
+ vs = [ddc.value for ddc in data.detaileddescription.content_]
+ pls = []
+ for v in vs:
+ if hasattr(v, 'parameterlist'):
+ pls += v.parameterlist
+ pis = []
+ for pl in pls:
+ pis += pl.parameteritem
+ dpis = []
+ for pi in pis:
+ dpi = DoxyParameterItem(pi)
+ dpi._parse()
+ dpis.append(dpi)
+ self._data['params'] = dpis
+
+
+class DoxyCompound(DoxyCompMem):
+ pass
+
+class DoxyMember(DoxyCompMem):
+ pass
+
+class DoxyFunction(DoxyMember):
+
+ __module__ = "gnuradio.utils.doxyxml"
+
+ kind = 'function'
+
+ def _parse(self):
+ if self._parsed:
+ return
+ super(DoxyFunction, self)._parse()
+ self.set_descriptions(self._parse_data)
+ self.set_parameters(self._parse_data)
+ if not self._data['params']:
+ # If the params weren't set by a comment then just grab the names.
+ self._data['params'] = []
+ prms = self._parse_data.param
+ for prm in prms:
+ self._data['params'].append(DoxyParam(prm))
+
+ brief_description = property(lambda self: self.data()['brief_description'])
+ detailed_description = property(lambda self: self.data()['detailed_description'])
+ params = property(lambda self: self.data()['params'])
+
+Base.mem_classes.append(DoxyFunction)
+
+
+class DoxyParam(DoxyMember):
+
+ __module__ = "gnuradio.utils.doxyxml"
+
+ def _parse(self):
+ if self._parsed:
+ return
+ super(DoxyParam, self)._parse()
+ self.set_descriptions(self._parse_data)
+ self._data['declname'] = self._parse_data.declname
+
+ @property
+ def description(self):
+ descriptions = []
+ if self.brief_description:
+ descriptions.append(self.brief_description)
+ if self.detailed_description:
+ descriptions.append(self.detailed_description)
+ return '\n\n'.join(descriptions)
+
+ brief_description = property(lambda self: self.data()['brief_description'])
+ detailed_description = property(lambda self: self.data()['detailed_description'])
+ name = property(lambda self: self.data()['declname'])
+
+class DoxyParameterItem(DoxyMember):
+ """A different representation of a parameter in Doxygen."""
+
+ def _parse(self):
+ if self._parsed:
+ return
+ super(DoxyParameterItem, self)._parse()
+ names = []
+ for nl in self._parse_data.parameternamelist:
+ for pn in nl.parametername:
+ names.append(description(pn))
+ # Just take first name
+ self._data['name'] = names[0]
+ # Get description
+ pd = description(self._parse_data.get_parameterdescription())
+ self._data['description'] = pd
+
+ description = property(lambda self: self.data()['description'])
+ name = property(lambda self: self.data()['name'])
+
+
+class DoxyClass(DoxyCompound):
+
+ __module__ = "gnuradio.utils.doxyxml"
+
+ kind = 'class'
+
+ def _parse(self):
+ if self._parsed:
+ return
+ super(DoxyClass, self)._parse()
+ self.retrieve_data()
+ if self._error:
+ return
+ self.set_descriptions(self._retrieved_data.compounddef)
+ self.set_parameters(self._retrieved_data.compounddef)
+ # Sectiondef.kind tells about whether private or public.
+ # We just ignore this for now.
+ self.process_memberdefs()
+
+ brief_description = property(lambda self: self.data()['brief_description'])
+ detailed_description = property(lambda self: self.data()['detailed_description'])
+ params = property(lambda self: self.data()['params'])
+
+Base.mem_classes.append(DoxyClass)
+
+
+class DoxyFile(DoxyCompound):
+
+ __module__ = "gnuradio.utils.doxyxml"
+
+ kind = 'file'
+
+ def _parse(self):
+ if self._parsed:
+ return
+ super(DoxyFile, self)._parse()
+ self.retrieve_data()
+ self.set_descriptions(self._retrieved_data.compounddef)
+ if self._error:
+ return
+ self.process_memberdefs()
+
+ brief_description = property(lambda self: self.data()['brief_description'])
+ detailed_description = property(lambda self: self.data()['detailed_description'])
+
+Base.mem_classes.append(DoxyFile)
+
+
+class DoxyNamespace(DoxyCompound):
+
+ __module__ = "gnuradio.utils.doxyxml"
+
+ kind = 'namespace'
+
+ def _parse(self):
+ if self._parsed:
+ return
+ super(DoxyNamespace, self)._parse()
+ self.retrieve_data()
+ self.set_descriptions(self._retrieved_data.compounddef)
+ if self._error:
+ return
+ self.process_memberdefs()
+
+Base.mem_classes.append(DoxyNamespace)
+
+
+class DoxyGroup(DoxyCompound):
+
+ __module__ = "gnuradio.utils.doxyxml"
+
+ kind = 'group'
+
+ def _parse(self):
+ if self._parsed:
+ return
+ super(DoxyGroup, self)._parse()
+ self.retrieve_data()
+ if self._error:
+ return
+ cdef = self._retrieved_data.compounddef
+ self._data['title'] = description(cdef.title)
+ # Process inner groups
+ grps = cdef.innergroup
+ for grp in grps:
+ converted = DoxyGroup.from_refid(grp.refid, top=self.top)
+ self._members.append(converted)
+ # Process inner classes
+ klasses = cdef.innerclass
+ for kls in klasses:
+ converted = DoxyClass.from_refid(kls.refid, top=self.top)
+ self._members.append(converted)
+ # Process normal members
+ self.process_memberdefs()
+
+ title = property(lambda self: self.data()['title'])
+
+
+Base.mem_classes.append(DoxyGroup)
+
+
+class DoxyFriend(DoxyMember):
+
+ __module__ = "gnuradio.utils.doxyxml"
+
+ kind = 'friend'
+
+Base.mem_classes.append(DoxyFriend)
+
+
+class DoxyOther(Base):
+
+ __module__ = "gnuradio.utils.doxyxml"
+
+ kinds = set(['variable', 'struct', 'union', 'define', 'typedef', 'enum',
+ 'dir', 'page', 'signal', 'slot', 'property'])
+
+ @classmethod
+ def can_parse(cls, obj):
+ return obj.kind in cls.kinds
+
+Base.mem_classes.append(DoxyOther)
diff --git a/tools/gr-usrptest/docs/doxygen/doxyxml/generated/__init__.py b/tools/gr-usrptest/docs/doxygen/doxyxml/generated/__init__.py
new file mode 100644
index 000000000..39823979f
--- /dev/null
+++ b/tools/gr-usrptest/docs/doxygen/doxyxml/generated/__init__.py
@@ -0,0 +1,7 @@
+"""
+Contains generated files produced by generateDS.py.
+
+These do the real work of parsing the doxygen xml files but the
+resultant classes are not very friendly to navigate so the rest of the
+doxyxml module processes them further.
+"""
diff --git a/tools/gr-usrptest/docs/doxygen/doxyxml/generated/compound.py b/tools/gr-usrptest/docs/doxygen/doxyxml/generated/compound.py
new file mode 100644
index 000000000..1522ac23f
--- /dev/null
+++ b/tools/gr-usrptest/docs/doxygen/doxyxml/generated/compound.py
@@ -0,0 +1,503 @@
+#!/usr/bin/env python
+
+"""
+Generated Mon Feb 9 19:08:05 2009 by generateDS.py.
+"""
+
+from string import lower as str_lower
+from xml.dom import minidom
+from xml.dom import Node
+
+import sys
+
+import compoundsuper as supermod
+from compoundsuper import MixedContainer
+
+
+class DoxygenTypeSub(supermod.DoxygenType):
+ def __init__(self, version=None, compounddef=None):
+ supermod.DoxygenType.__init__(self, version, compounddef)
+
+ def find(self, details):
+
+ return self.compounddef.find(details)
+
+supermod.DoxygenType.subclass = DoxygenTypeSub
+# end class DoxygenTypeSub
+
+
+class compounddefTypeSub(supermod.compounddefType):
+ def __init__(self, kind=None, prot=None, id=None, compoundname='', title='', basecompoundref=None, derivedcompoundref=None, includes=None, includedby=None, incdepgraph=None, invincdepgraph=None, innerdir=None, innerfile=None, innerclass=None, innernamespace=None, innerpage=None, innergroup=None, templateparamlist=None, sectiondef=None, briefdescription=None, detaileddescription=None, inheritancegraph=None, collaborationgraph=None, programlisting=None, location=None, listofallmembers=None):
+ supermod.compounddefType.__init__(self, kind, prot, id, compoundname, title, basecompoundref, derivedcompoundref, includes, includedby, incdepgraph, invincdepgraph, innerdir, innerfile, innerclass, innernamespace, innerpage, innergroup, templateparamlist, sectiondef, briefdescription, detaileddescription, inheritancegraph, collaborationgraph, programlisting, location, listofallmembers)
+
+ def find(self, details):
+
+ if self.id == details.refid:
+ return self
+
+ for sectiondef in self.sectiondef:
+ result = sectiondef.find(details)
+ if result:
+ return result
+
+
+supermod.compounddefType.subclass = compounddefTypeSub
+# end class compounddefTypeSub
+
+
+class listofallmembersTypeSub(supermod.listofallmembersType):
+ def __init__(self, member=None):
+ supermod.listofallmembersType.__init__(self, member)
+supermod.listofallmembersType.subclass = listofallmembersTypeSub
+# end class listofallmembersTypeSub
+
+
+class memberRefTypeSub(supermod.memberRefType):
+ def __init__(self, virt=None, prot=None, refid=None, ambiguityscope=None, scope='', name=''):
+ supermod.memberRefType.__init__(self, virt, prot, refid, ambiguityscope, scope, name)
+supermod.memberRefType.subclass = memberRefTypeSub
+# end class memberRefTypeSub
+
+
+class compoundRefTypeSub(supermod.compoundRefType):
+ def __init__(self, virt=None, prot=None, refid=None, valueOf_='', mixedclass_=None, content_=None):
+ supermod.compoundRefType.__init__(self, mixedclass_, content_)
+supermod.compoundRefType.subclass = compoundRefTypeSub
+# end class compoundRefTypeSub
+
+
+class reimplementTypeSub(supermod.reimplementType):
+ def __init__(self, refid=None, valueOf_='', mixedclass_=None, content_=None):
+ supermod.reimplementType.__init__(self, mixedclass_, content_)
+supermod.reimplementType.subclass = reimplementTypeSub
+# end class reimplementTypeSub
+
+
+class incTypeSub(supermod.incType):
+ def __init__(self, local=None, refid=None, valueOf_='', mixedclass_=None, content_=None):
+ supermod.incType.__init__(self, mixedclass_, content_)
+supermod.incType.subclass = incTypeSub
+# end class incTypeSub
+
+
+class refTypeSub(supermod.refType):
+ def __init__(self, prot=None, refid=None, valueOf_='', mixedclass_=None, content_=None):
+ supermod.refType.__init__(self, mixedclass_, content_)
+supermod.refType.subclass = refTypeSub
+# end class refTypeSub
+
+
+
+class refTextTypeSub(supermod.refTextType):
+ def __init__(self, refid=None, kindref=None, external=None, valueOf_='', mixedclass_=None, content_=None):
+ supermod.refTextType.__init__(self, mixedclass_, content_)
+
+supermod.refTextType.subclass = refTextTypeSub
+# end class refTextTypeSub
+
+class sectiondefTypeSub(supermod.sectiondefType):
+
+
+ def __init__(self, kind=None, header='', description=None, memberdef=None):
+ supermod.sectiondefType.__init__(self, kind, header, description, memberdef)
+
+ def find(self, details):
+
+ for memberdef in self.memberdef:
+ if memberdef.id == details.refid:
+ return memberdef
+
+ return None
+
+
+supermod.sectiondefType.subclass = sectiondefTypeSub
+# end class sectiondefTypeSub
+
+
+class memberdefTypeSub(supermod.memberdefType):
+ def __init__(self, initonly=None, kind=None, volatile=None, const=None, raise_=None, virt=None, readable=None, prot=None, explicit=None, new=None, final=None, writable=None, add=None, static=None, remove=None, sealed=None, mutable=None, gettable=None, inline=None, settable=None, id=None, templateparamlist=None, type_=None, definition='', argsstring='', name='', read='', write='', bitfield='', reimplements=None, reimplementedby=None, param=None, enumvalue=None, initializer=None, exceptions=None, briefdescription=None, detaileddescription=None, inbodydescription=None, location=None, references=None, referencedby=None):
+ supermod.memberdefType.__init__(self, initonly, kind, volatile, const, raise_, virt, readable, prot, explicit, new, final, writable, add, static, remove, sealed, mutable, gettable, inline, settable, id, templateparamlist, type_, definition, argsstring, name, read, write, bitfield, reimplements, reimplementedby, param, enumvalue, initializer, exceptions, briefdescription, detaileddescription, inbodydescription, location, references, referencedby)
+supermod.memberdefType.subclass = memberdefTypeSub
+# end class memberdefTypeSub
+
+
+class descriptionTypeSub(supermod.descriptionType):
+ def __init__(self, title='', para=None, sect1=None, internal=None, mixedclass_=None, content_=None):
+ supermod.descriptionType.__init__(self, mixedclass_, content_)
+supermod.descriptionType.subclass = descriptionTypeSub
+# end class descriptionTypeSub
+
+
+class enumvalueTypeSub(supermod.enumvalueType):
+ def __init__(self, prot=None, id=None, name='', initializer=None, briefdescription=None, detaileddescription=None, mixedclass_=None, content_=None):
+ supermod.enumvalueType.__init__(self, mixedclass_, content_)
+supermod.enumvalueType.subclass = enumvalueTypeSub
+# end class enumvalueTypeSub
+
+
+class templateparamlistTypeSub(supermod.templateparamlistType):
+ def __init__(self, param=None):
+ supermod.templateparamlistType.__init__(self, param)
+supermod.templateparamlistType.subclass = templateparamlistTypeSub
+# end class templateparamlistTypeSub
+
+
+class paramTypeSub(supermod.paramType):
+ def __init__(self, type_=None, declname='', defname='', array='', defval=None, briefdescription=None):
+ supermod.paramType.__init__(self, type_, declname, defname, array, defval, briefdescription)
+supermod.paramType.subclass = paramTypeSub
+# end class paramTypeSub
+
+
+class linkedTextTypeSub(supermod.linkedTextType):
+ def __init__(self, ref=None, mixedclass_=None, content_=None):
+ supermod.linkedTextType.__init__(self, mixedclass_, content_)
+supermod.linkedTextType.subclass = linkedTextTypeSub
+# end class linkedTextTypeSub
+
+
+class graphTypeSub(supermod.graphType):
+ def __init__(self, node=None):
+ supermod.graphType.__init__(self, node)
+supermod.graphType.subclass = graphTypeSub
+# end class graphTypeSub
+
+
+class nodeTypeSub(supermod.nodeType):
+ def __init__(self, id=None, label='', link=None, childnode=None):
+ supermod.nodeType.__init__(self, id, label, link, childnode)
+supermod.nodeType.subclass = nodeTypeSub
+# end class nodeTypeSub
+
+
+class childnodeTypeSub(supermod.childnodeType):
+ def __init__(self, relation=None, refid=None, edgelabel=None):
+ supermod.childnodeType.__init__(self, relation, refid, edgelabel)
+supermod.childnodeType.subclass = childnodeTypeSub
+# end class childnodeTypeSub
+
+
+class linkTypeSub(supermod.linkType):
+ def __init__(self, refid=None, external=None, valueOf_=''):
+ supermod.linkType.__init__(self, refid, external)
+supermod.linkType.subclass = linkTypeSub
+# end class linkTypeSub
+
+
+class listingTypeSub(supermod.listingType):
+ def __init__(self, codeline=None):
+ supermod.listingType.__init__(self, codeline)
+supermod.listingType.subclass = listingTypeSub
+# end class listingTypeSub
+
+
+class codelineTypeSub(supermod.codelineType):
+ def __init__(self, external=None, lineno=None, refkind=None, refid=None, highlight=None):
+ supermod.codelineType.__init__(self, external, lineno, refkind, refid, highlight)
+supermod.codelineType.subclass = codelineTypeSub
+# end class codelineTypeSub
+
+
+class highlightTypeSub(supermod.highlightType):
+ def __init__(self, class_=None, sp=None, ref=None, mixedclass_=None, content_=None):
+ supermod.highlightType.__init__(self, mixedclass_, content_)
+supermod.highlightType.subclass = highlightTypeSub
+# end class highlightTypeSub
+
+
+class referenceTypeSub(supermod.referenceType):
+ def __init__(self, endline=None, startline=None, refid=None, compoundref=None, valueOf_='', mixedclass_=None, content_=None):
+ supermod.referenceType.__init__(self, mixedclass_, content_)
+supermod.referenceType.subclass = referenceTypeSub
+# end class referenceTypeSub
+
+
+class locationTypeSub(supermod.locationType):
+ def __init__(self, bodystart=None, line=None, bodyend=None, bodyfile=None, file=None, valueOf_=''):
+ supermod.locationType.__init__(self, bodystart, line, bodyend, bodyfile, file)
+supermod.locationType.subclass = locationTypeSub
+# end class locationTypeSub
+
+
+class docSect1TypeSub(supermod.docSect1Type):
+ def __init__(self, id=None, title='', para=None, sect2=None, internal=None, mixedclass_=None, content_=None):
+ supermod.docSect1Type.__init__(self, mixedclass_, content_)
+supermod.docSect1Type.subclass = docSect1TypeSub
+# end class docSect1TypeSub
+
+
+class docSect2TypeSub(supermod.docSect2Type):
+ def __init__(self, id=None, title='', para=None, sect3=None, internal=None, mixedclass_=None, content_=None):
+ supermod.docSect2Type.__init__(self, mixedclass_, content_)
+supermod.docSect2Type.subclass = docSect2TypeSub
+# end class docSect2TypeSub
+
+
+class docSect3TypeSub(supermod.docSect3Type):
+ def __init__(self, id=None, title='', para=None, sect4=None, internal=None, mixedclass_=None, content_=None):
+ supermod.docSect3Type.__init__(self, mixedclass_, content_)
+supermod.docSect3Type.subclass = docSect3TypeSub
+# end class docSect3TypeSub
+
+
+class docSect4TypeSub(supermod.docSect4Type):
+ def __init__(self, id=None, title='', para=None, internal=None, mixedclass_=None, content_=None):
+ supermod.docSect4Type.__init__(self, mixedclass_, content_)
+supermod.docSect4Type.subclass = docSect4TypeSub
+# end class docSect4TypeSub
+
+
+class docInternalTypeSub(supermod.docInternalType):
+ def __init__(self, para=None, sect1=None, mixedclass_=None, content_=None):
+ supermod.docInternalType.__init__(self, mixedclass_, content_)
+supermod.docInternalType.subclass = docInternalTypeSub
+# end class docInternalTypeSub
+
+
+class docInternalS1TypeSub(supermod.docInternalS1Type):
+ def __init__(self, para=None, sect2=None, mixedclass_=None, content_=None):
+ supermod.docInternalS1Type.__init__(self, mixedclass_, content_)
+supermod.docInternalS1Type.subclass = docInternalS1TypeSub
+# end class docInternalS1TypeSub
+
+
+class docInternalS2TypeSub(supermod.docInternalS2Type):
+ def __init__(self, para=None, sect3=None, mixedclass_=None, content_=None):
+ supermod.docInternalS2Type.__init__(self, mixedclass_, content_)
+supermod.docInternalS2Type.subclass = docInternalS2TypeSub
+# end class docInternalS2TypeSub
+
+
+class docInternalS3TypeSub(supermod.docInternalS3Type):
+ def __init__(self, para=None, sect3=None, mixedclass_=None, content_=None):
+ supermod.docInternalS3Type.__init__(self, mixedclass_, content_)
+supermod.docInternalS3Type.subclass = docInternalS3TypeSub
+# end class docInternalS3TypeSub
+
+
+class docInternalS4TypeSub(supermod.docInternalS4Type):
+ def __init__(self, para=None, mixedclass_=None, content_=None):
+ supermod.docInternalS4Type.__init__(self, mixedclass_, content_)
+supermod.docInternalS4Type.subclass = docInternalS4TypeSub
+# end class docInternalS4TypeSub
+
+
+class docURLLinkSub(supermod.docURLLink):
+ def __init__(self, url=None, valueOf_='', mixedclass_=None, content_=None):
+ supermod.docURLLink.__init__(self, mixedclass_, content_)
+supermod.docURLLink.subclass = docURLLinkSub
+# end class docURLLinkSub
+
+
+class docAnchorTypeSub(supermod.docAnchorType):
+ def __init__(self, id=None, valueOf_='', mixedclass_=None, content_=None):
+ supermod.docAnchorType.__init__(self, mixedclass_, content_)
+supermod.docAnchorType.subclass = docAnchorTypeSub
+# end class docAnchorTypeSub
+
+
+class docFormulaTypeSub(supermod.docFormulaType):
+ def __init__(self, id=None, valueOf_='', mixedclass_=None, content_=None):
+ supermod.docFormulaType.__init__(self, mixedclass_, content_)
+supermod.docFormulaType.subclass = docFormulaTypeSub
+# end class docFormulaTypeSub
+
+
+class docIndexEntryTypeSub(supermod.docIndexEntryType):
+ def __init__(self, primaryie='', secondaryie=''):
+ supermod.docIndexEntryType.__init__(self, primaryie, secondaryie)
+supermod.docIndexEntryType.subclass = docIndexEntryTypeSub
+# end class docIndexEntryTypeSub
+
+
+class docListTypeSub(supermod.docListType):
+ def __init__(self, listitem=None):
+ supermod.docListType.__init__(self, listitem)
+supermod.docListType.subclass = docListTypeSub
+# end class docListTypeSub
+
+
+class docListItemTypeSub(supermod.docListItemType):
+ def __init__(self, para=None):
+ supermod.docListItemType.__init__(self, para)
+supermod.docListItemType.subclass = docListItemTypeSub
+# end class docListItemTypeSub
+
+
+class docSimpleSectTypeSub(supermod.docSimpleSectType):
+ def __init__(self, kind=None, title=None, para=None):
+ supermod.docSimpleSectType.__init__(self, kind, title, para)
+supermod.docSimpleSectType.subclass = docSimpleSectTypeSub
+# end class docSimpleSectTypeSub
+
+
+class docVarListEntryTypeSub(supermod.docVarListEntryType):
+ def __init__(self, term=None):
+ supermod.docVarListEntryType.__init__(self, term)
+supermod.docVarListEntryType.subclass = docVarListEntryTypeSub
+# end class docVarListEntryTypeSub
+
+
+class docRefTextTypeSub(supermod.docRefTextType):
+ def __init__(self, refid=None, kindref=None, external=None, valueOf_='', mixedclass_=None, content_=None):
+ supermod.docRefTextType.__init__(self, mixedclass_, content_)
+supermod.docRefTextType.subclass = docRefTextTypeSub
+# end class docRefTextTypeSub
+
+
+class docTableTypeSub(supermod.docTableType):
+ def __init__(self, rows=None, cols=None, row=None, caption=None):
+ supermod.docTableType.__init__(self, rows, cols, row, caption)
+supermod.docTableType.subclass = docTableTypeSub
+# end class docTableTypeSub
+
+
+class docRowTypeSub(supermod.docRowType):
+ def __init__(self, entry=None):
+ supermod.docRowType.__init__(self, entry)
+supermod.docRowType.subclass = docRowTypeSub
+# end class docRowTypeSub
+
+
+class docEntryTypeSub(supermod.docEntryType):
+ def __init__(self, thead=None, para=None):
+ supermod.docEntryType.__init__(self, thead, para)
+supermod.docEntryType.subclass = docEntryTypeSub
+# end class docEntryTypeSub
+
+
+class docHeadingTypeSub(supermod.docHeadingType):
+ def __init__(self, level=None, valueOf_='', mixedclass_=None, content_=None):
+ supermod.docHeadingType.__init__(self, mixedclass_, content_)
+supermod.docHeadingType.subclass = docHeadingTypeSub
+# end class docHeadingTypeSub
+
+
+class docImageTypeSub(supermod.docImageType):
+ def __init__(self, width=None, type_=None, name=None, height=None, valueOf_='', mixedclass_=None, content_=None):
+ supermod.docImageType.__init__(self, mixedclass_, content_)
+supermod.docImageType.subclass = docImageTypeSub
+# end class docImageTypeSub
+
+
+class docDotFileTypeSub(supermod.docDotFileType):
+ def __init__(self, name=None, valueOf_='', mixedclass_=None, content_=None):
+ supermod.docDotFileType.__init__(self, mixedclass_, content_)
+supermod.docDotFileType.subclass = docDotFileTypeSub
+# end class docDotFileTypeSub
+
+
+class docTocItemTypeSub(supermod.docTocItemType):
+ def __init__(self, id=None, valueOf_='', mixedclass_=None, content_=None):
+ supermod.docTocItemType.__init__(self, mixedclass_, content_)
+supermod.docTocItemType.subclass = docTocItemTypeSub
+# end class docTocItemTypeSub
+
+
+class docTocListTypeSub(supermod.docTocListType):
+ def __init__(self, tocitem=None):
+ supermod.docTocListType.__init__(self, tocitem)
+supermod.docTocListType.subclass = docTocListTypeSub
+# end class docTocListTypeSub
+
+
+class docLanguageTypeSub(supermod.docLanguageType):
+ def __init__(self, langid=None, para=None):
+ supermod.docLanguageType.__init__(self, langid, para)
+supermod.docLanguageType.subclass = docLanguageTypeSub
+# end class docLanguageTypeSub
+
+
+class docParamListTypeSub(supermod.docParamListType):
+ def __init__(self, kind=None, parameteritem=None):
+ supermod.docParamListType.__init__(self, kind, parameteritem)
+supermod.docParamListType.subclass = docParamListTypeSub
+# end class docParamListTypeSub
+
+
+class docParamListItemSub(supermod.docParamListItem):
+ def __init__(self, parameternamelist=None, parameterdescription=None):
+ supermod.docParamListItem.__init__(self, parameternamelist, parameterdescription)
+supermod.docParamListItem.subclass = docParamListItemSub
+# end class docParamListItemSub
+
+
+class docParamNameListSub(supermod.docParamNameList):
+ def __init__(self, parametername=None):
+ supermod.docParamNameList.__init__(self, parametername)
+supermod.docParamNameList.subclass = docParamNameListSub
+# end class docParamNameListSub
+
+
+class docParamNameSub(supermod.docParamName):
+ def __init__(self, direction=None, ref=None, mixedclass_=None, content_=None):
+ supermod.docParamName.__init__(self, mixedclass_, content_)
+supermod.docParamName.subclass = docParamNameSub
+# end class docParamNameSub
+
+
+class docXRefSectTypeSub(supermod.docXRefSectType):
+ def __init__(self, id=None, xreftitle=None, xrefdescription=None):
+ supermod.docXRefSectType.__init__(self, id, xreftitle, xrefdescription)
+supermod.docXRefSectType.subclass = docXRefSectTypeSub
+# end class docXRefSectTypeSub
+
+
+class docCopyTypeSub(supermod.docCopyType):
+ def __init__(self, link=None, para=None, sect1=None, internal=None):
+ supermod.docCopyType.__init__(self, link, para, sect1, internal)
+supermod.docCopyType.subclass = docCopyTypeSub
+# end class docCopyTypeSub
+
+
+class docCharTypeSub(supermod.docCharType):
+ def __init__(self, char=None, valueOf_=''):
+ supermod.docCharType.__init__(self, char)
+supermod.docCharType.subclass = docCharTypeSub
+# end class docCharTypeSub
+
+class docParaTypeSub(supermod.docParaType):
+ def __init__(self, char=None, valueOf_=''):
+ supermod.docParaType.__init__(self, char)
+
+ self.parameterlist = []
+ self.simplesects = []
+ self.content = []
+
+ def buildChildren(self, child_, nodeName_):
+ supermod.docParaType.buildChildren(self, child_, nodeName_)
+
+ if child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == "ref":
+ obj_ = supermod.docRefTextType.factory()
+ obj_.build(child_)
+ self.content.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'parameterlist':
+ obj_ = supermod.docParamListType.factory()
+ obj_.build(child_)
+ self.parameterlist.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'simplesect':
+ obj_ = supermod.docSimpleSectType.factory()
+ obj_.build(child_)
+ self.simplesects.append(obj_)
+
+
+supermod.docParaType.subclass = docParaTypeSub
+# end class docParaTypeSub
+
+
+
+def parse(inFilename):
+ doc = minidom.parse(inFilename)
+ rootNode = doc.documentElement
+ rootObj = supermod.DoxygenType.factory()
+ rootObj.build(rootNode)
+ return rootObj
+
+
diff --git a/tools/gr-usrptest/docs/doxygen/doxyxml/generated/compoundsuper.py b/tools/gr-usrptest/docs/doxygen/doxyxml/generated/compoundsuper.py
new file mode 100644
index 000000000..6255dda16
--- /dev/null
+++ b/tools/gr-usrptest/docs/doxygen/doxyxml/generated/compoundsuper.py
@@ -0,0 +1,8342 @@
+#!/usr/bin/env python
+
+#
+# Generated Thu Jun 11 18:44:25 2009 by generateDS.py.
+#
+
+import sys
+import getopt
+from string import lower as str_lower
+from xml.dom import minidom
+from xml.dom import Node
+
+#
+# User methods
+#
+# Calls to the methods in these classes are generated by generateDS.py.
+# You can replace these methods by re-implementing the following class
+# in a module named generatedssuper.py.
+
+try:
+ from generatedssuper import GeneratedsSuper
+except ImportError, exp:
+
+ class GeneratedsSuper:
+ def format_string(self, input_data, input_name=''):
+ return input_data
+ def format_integer(self, input_data, input_name=''):
+ return '%d' % input_data
+ def format_float(self, input_data, input_name=''):
+ return '%f' % input_data
+ def format_double(self, input_data, input_name=''):
+ return '%e' % input_data
+ def format_boolean(self, input_data, input_name=''):
+ return '%s' % input_data
+
+
+#
+# If you have installed IPython you can uncomment and use the following.
+# IPython is available from http://ipython.scipy.org/.
+#
+
+## from IPython.Shell import IPShellEmbed
+## args = ''
+## ipshell = IPShellEmbed(args,
+## banner = 'Dropping into IPython',
+## exit_msg = 'Leaving Interpreter, back to program.')
+
+# Then use the following line where and when you want to drop into the
+# IPython shell:
+# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
+
+#
+# Globals
+#
+
+ExternalEncoding = 'ascii'
+
+#
+# Support/utility functions.
+#
+
+def showIndent(outfile, level):
+ for idx in range(level):
+ outfile.write(' ')
+
+def quote_xml(inStr):
+ s1 = (isinstance(inStr, basestring) and inStr or
+ '%s' % inStr)
+ s1 = s1.replace('&', '&amp;')
+ s1 = s1.replace('<', '&lt;')
+ s1 = s1.replace('>', '&gt;')
+ return s1
+
+def quote_attrib(inStr):
+ s1 = (isinstance(inStr, basestring) and inStr or
+ '%s' % inStr)
+ s1 = s1.replace('&', '&amp;')
+ s1 = s1.replace('<', '&lt;')
+ s1 = s1.replace('>', '&gt;')
+ if '"' in s1:
+ if "'" in s1:
+ s1 = '"%s"' % s1.replace('"', "&quot;")
+ else:
+ s1 = "'%s'" % s1
+ else:
+ s1 = '"%s"' % s1
+ return s1
+
+def quote_python(inStr):
+ s1 = inStr
+ if s1.find("'") == -1:
+ if s1.find('\n') == -1:
+ return "'%s'" % s1
+ else:
+ return "'''%s'''" % s1
+ else:
+ if s1.find('"') != -1:
+ s1 = s1.replace('"', '\\"')
+ if s1.find('\n') == -1:
+ return '"%s"' % s1
+ else:
+ return '"""%s"""' % s1
+
+
+class MixedContainer:
+ # Constants for category:
+ CategoryNone = 0
+ CategoryText = 1
+ CategorySimple = 2
+ CategoryComplex = 3
+ # Constants for content_type:
+ TypeNone = 0
+ TypeText = 1
+ TypeString = 2
+ TypeInteger = 3
+ TypeFloat = 4
+ TypeDecimal = 5
+ TypeDouble = 6
+ TypeBoolean = 7
+ def __init__(self, category, content_type, name, value):
+ self.category = category
+ self.content_type = content_type
+ self.name = name
+ self.value = value
+ def getCategory(self):
+ return self.category
+ def getContenttype(self, content_type):
+ return self.content_type
+ def getValue(self):
+ return self.value
+ def getName(self):
+ return self.name
+ def export(self, outfile, level, name, namespace):
+ if self.category == MixedContainer.CategoryText:
+ outfile.write(self.value)
+ elif self.category == MixedContainer.CategorySimple:
+ self.exportSimple(outfile, level, name)
+ else: # category == MixedContainer.CategoryComplex
+ self.value.export(outfile, level, namespace,name)
+ def exportSimple(self, outfile, level, name):
+ if self.content_type == MixedContainer.TypeString:
+ outfile.write('<%s>%s</%s>' % (self.name, self.value, self.name))
+ elif self.content_type == MixedContainer.TypeInteger or \
+ self.content_type == MixedContainer.TypeBoolean:
+ outfile.write('<%s>%d</%s>' % (self.name, self.value, self.name))
+ elif self.content_type == MixedContainer.TypeFloat or \
+ self.content_type == MixedContainer.TypeDecimal:
+ outfile.write('<%s>%f</%s>' % (self.name, self.value, self.name))
+ elif self.content_type == MixedContainer.TypeDouble:
+ outfile.write('<%s>%g</%s>' % (self.name, self.value, self.name))
+ def exportLiteral(self, outfile, level, name):
+ if self.category == MixedContainer.CategoryText:
+ showIndent(outfile, level)
+ outfile.write('MixedContainer(%d, %d, "%s", "%s"),\n' % \
+ (self.category, self.content_type, self.name, self.value))
+ elif self.category == MixedContainer.CategorySimple:
+ showIndent(outfile, level)
+ outfile.write('MixedContainer(%d, %d, "%s", "%s"),\n' % \
+ (self.category, self.content_type, self.name, self.value))
+ else: # category == MixedContainer.CategoryComplex
+ showIndent(outfile, level)
+ outfile.write('MixedContainer(%d, %d, "%s",\n' % \
+ (self.category, self.content_type, self.name,))
+ self.value.exportLiteral(outfile, level + 1)
+ showIndent(outfile, level)
+ outfile.write(')\n')
+
+
+class _MemberSpec(object):
+ def __init__(self, name='', data_type='', container=0):
+ self.name = name
+ self.data_type = data_type
+ self.container = container
+ def set_name(self, name): self.name = name
+ def get_name(self): return self.name
+ def set_data_type(self, data_type): self.data_type = data_type
+ def get_data_type(self): return self.data_type
+ def set_container(self, container): self.container = container
+ def get_container(self): return self.container
+
+
+#
+# Data representation classes.
+#
+
+class DoxygenType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, version=None, compounddef=None):
+ self.version = version
+ self.compounddef = compounddef
+ def factory(*args_, **kwargs_):
+ if DoxygenType.subclass:
+ return DoxygenType.subclass(*args_, **kwargs_)
+ else:
+ return DoxygenType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_compounddef(self): return self.compounddef
+ def set_compounddef(self, compounddef): self.compounddef = compounddef
+ def get_version(self): return self.version
+ def set_version(self, version): self.version = version
+ def export(self, outfile, level, namespace_='', name_='DoxygenType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='DoxygenType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='DoxygenType'):
+ outfile.write(' version=%s' % (quote_attrib(self.version), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='DoxygenType'):
+ if self.compounddef:
+ self.compounddef.export(outfile, level, namespace_, name_='compounddef')
+ def hasContent_(self):
+ if (
+ self.compounddef is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='DoxygenType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.version is not None:
+ showIndent(outfile, level)
+ outfile.write('version = "%s",\n' % (self.version,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ if self.compounddef:
+ showIndent(outfile, level)
+ outfile.write('compounddef=model_.compounddefType(\n')
+ self.compounddef.exportLiteral(outfile, level, name_='compounddef')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('version'):
+ self.version = attrs.get('version').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'compounddef':
+ obj_ = compounddefType.factory()
+ obj_.build(child_)
+ self.set_compounddef(obj_)
+# end class DoxygenType
+
+
+class compounddefType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, kind=None, prot=None, id=None, compoundname=None, title=None, basecompoundref=None, derivedcompoundref=None, includes=None, includedby=None, incdepgraph=None, invincdepgraph=None, innerdir=None, innerfile=None, innerclass=None, innernamespace=None, innerpage=None, innergroup=None, templateparamlist=None, sectiondef=None, briefdescription=None, detaileddescription=None, inheritancegraph=None, collaborationgraph=None, programlisting=None, location=None, listofallmembers=None):
+ self.kind = kind
+ self.prot = prot
+ self.id = id
+ self.compoundname = compoundname
+ self.title = title
+ if basecompoundref is None:
+ self.basecompoundref = []
+ else:
+ self.basecompoundref = basecompoundref
+ if derivedcompoundref is None:
+ self.derivedcompoundref = []
+ else:
+ self.derivedcompoundref = derivedcompoundref
+ if includes is None:
+ self.includes = []
+ else:
+ self.includes = includes
+ if includedby is None:
+ self.includedby = []
+ else:
+ self.includedby = includedby
+ self.incdepgraph = incdepgraph
+ self.invincdepgraph = invincdepgraph
+ if innerdir is None:
+ self.innerdir = []
+ else:
+ self.innerdir = innerdir
+ if innerfile is None:
+ self.innerfile = []
+ else:
+ self.innerfile = innerfile
+ if innerclass is None:
+ self.innerclass = []
+ else:
+ self.innerclass = innerclass
+ if innernamespace is None:
+ self.innernamespace = []
+ else:
+ self.innernamespace = innernamespace
+ if innerpage is None:
+ self.innerpage = []
+ else:
+ self.innerpage = innerpage
+ if innergroup is None:
+ self.innergroup = []
+ else:
+ self.innergroup = innergroup
+ self.templateparamlist = templateparamlist
+ if sectiondef is None:
+ self.sectiondef = []
+ else:
+ self.sectiondef = sectiondef
+ self.briefdescription = briefdescription
+ self.detaileddescription = detaileddescription
+ self.inheritancegraph = inheritancegraph
+ self.collaborationgraph = collaborationgraph
+ self.programlisting = programlisting
+ self.location = location
+ self.listofallmembers = listofallmembers
+ def factory(*args_, **kwargs_):
+ if compounddefType.subclass:
+ return compounddefType.subclass(*args_, **kwargs_)
+ else:
+ return compounddefType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_compoundname(self): return self.compoundname
+ def set_compoundname(self, compoundname): self.compoundname = compoundname
+ def get_title(self): return self.title
+ def set_title(self, title): self.title = title
+ def get_basecompoundref(self): return self.basecompoundref
+ def set_basecompoundref(self, basecompoundref): self.basecompoundref = basecompoundref
+ def add_basecompoundref(self, value): self.basecompoundref.append(value)
+ def insert_basecompoundref(self, index, value): self.basecompoundref[index] = value
+ def get_derivedcompoundref(self): return self.derivedcompoundref
+ def set_derivedcompoundref(self, derivedcompoundref): self.derivedcompoundref = derivedcompoundref
+ def add_derivedcompoundref(self, value): self.derivedcompoundref.append(value)
+ def insert_derivedcompoundref(self, index, value): self.derivedcompoundref[index] = value
+ def get_includes(self): return self.includes
+ def set_includes(self, includes): self.includes = includes
+ def add_includes(self, value): self.includes.append(value)
+ def insert_includes(self, index, value): self.includes[index] = value
+ def get_includedby(self): return self.includedby
+ def set_includedby(self, includedby): self.includedby = includedby
+ def add_includedby(self, value): self.includedby.append(value)
+ def insert_includedby(self, index, value): self.includedby[index] = value
+ def get_incdepgraph(self): return self.incdepgraph
+ def set_incdepgraph(self, incdepgraph): self.incdepgraph = incdepgraph
+ def get_invincdepgraph(self): return self.invincdepgraph
+ def set_invincdepgraph(self, invincdepgraph): self.invincdepgraph = invincdepgraph
+ def get_innerdir(self): return self.innerdir
+ def set_innerdir(self, innerdir): self.innerdir = innerdir
+ def add_innerdir(self, value): self.innerdir.append(value)
+ def insert_innerdir(self, index, value): self.innerdir[index] = value
+ def get_innerfile(self): return self.innerfile
+ def set_innerfile(self, innerfile): self.innerfile = innerfile
+ def add_innerfile(self, value): self.innerfile.append(value)
+ def insert_innerfile(self, index, value): self.innerfile[index] = value
+ def get_innerclass(self): return self.innerclass
+ def set_innerclass(self, innerclass): self.innerclass = innerclass
+ def add_innerclass(self, value): self.innerclass.append(value)
+ def insert_innerclass(self, index, value): self.innerclass[index] = value
+ def get_innernamespace(self): return self.innernamespace
+ def set_innernamespace(self, innernamespace): self.innernamespace = innernamespace
+ def add_innernamespace(self, value): self.innernamespace.append(value)
+ def insert_innernamespace(self, index, value): self.innernamespace[index] = value
+ def get_innerpage(self): return self.innerpage
+ def set_innerpage(self, innerpage): self.innerpage = innerpage
+ def add_innerpage(self, value): self.innerpage.append(value)
+ def insert_innerpage(self, index, value): self.innerpage[index] = value
+ def get_innergroup(self): return self.innergroup
+ def set_innergroup(self, innergroup): self.innergroup = innergroup
+ def add_innergroup(self, value): self.innergroup.append(value)
+ def insert_innergroup(self, index, value): self.innergroup[index] = value
+ def get_templateparamlist(self): return self.templateparamlist
+ def set_templateparamlist(self, templateparamlist): self.templateparamlist = templateparamlist
+ def get_sectiondef(self): return self.sectiondef
+ def set_sectiondef(self, sectiondef): self.sectiondef = sectiondef
+ def add_sectiondef(self, value): self.sectiondef.append(value)
+ def insert_sectiondef(self, index, value): self.sectiondef[index] = value
+ def get_briefdescription(self): return self.briefdescription
+ def set_briefdescription(self, briefdescription): self.briefdescription = briefdescription
+ def get_detaileddescription(self): return self.detaileddescription
+ def set_detaileddescription(self, detaileddescription): self.detaileddescription = detaileddescription
+ def get_inheritancegraph(self): return self.inheritancegraph
+ def set_inheritancegraph(self, inheritancegraph): self.inheritancegraph = inheritancegraph
+ def get_collaborationgraph(self): return self.collaborationgraph
+ def set_collaborationgraph(self, collaborationgraph): self.collaborationgraph = collaborationgraph
+ def get_programlisting(self): return self.programlisting
+ def set_programlisting(self, programlisting): self.programlisting = programlisting
+ def get_location(self): return self.location
+ def set_location(self, location): self.location = location
+ def get_listofallmembers(self): return self.listofallmembers
+ def set_listofallmembers(self, listofallmembers): self.listofallmembers = listofallmembers
+ def get_kind(self): return self.kind
+ def set_kind(self, kind): self.kind = kind
+ def get_prot(self): return self.prot
+ def set_prot(self, prot): self.prot = prot
+ def get_id(self): return self.id
+ def set_id(self, id): self.id = id
+ def export(self, outfile, level, namespace_='', name_='compounddefType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='compounddefType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='compounddefType'):
+ if self.kind is not None:
+ outfile.write(' kind=%s' % (quote_attrib(self.kind), ))
+ if self.prot is not None:
+ outfile.write(' prot=%s' % (quote_attrib(self.prot), ))
+ if self.id is not None:
+ outfile.write(' id=%s' % (self.format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='compounddefType'):
+ if self.compoundname is not None:
+ showIndent(outfile, level)
+ outfile.write('<%scompoundname>%s</%scompoundname>\n' % (namespace_, self.format_string(quote_xml(self.compoundname).encode(ExternalEncoding), input_name='compoundname'), namespace_))
+ if self.title is not None:
+ showIndent(outfile, level)
+ outfile.write('<%stitle>%s</%stitle>\n' % (namespace_, self.format_string(quote_xml(self.title).encode(ExternalEncoding), input_name='title'), namespace_))
+ for basecompoundref_ in self.basecompoundref:
+ basecompoundref_.export(outfile, level, namespace_, name_='basecompoundref')
+ for derivedcompoundref_ in self.derivedcompoundref:
+ derivedcompoundref_.export(outfile, level, namespace_, name_='derivedcompoundref')
+ for includes_ in self.includes:
+ includes_.export(outfile, level, namespace_, name_='includes')
+ for includedby_ in self.includedby:
+ includedby_.export(outfile, level, namespace_, name_='includedby')
+ if self.incdepgraph:
+ self.incdepgraph.export(outfile, level, namespace_, name_='incdepgraph')
+ if self.invincdepgraph:
+ self.invincdepgraph.export(outfile, level, namespace_, name_='invincdepgraph')
+ for innerdir_ in self.innerdir:
+ innerdir_.export(outfile, level, namespace_, name_='innerdir')
+ for innerfile_ in self.innerfile:
+ innerfile_.export(outfile, level, namespace_, name_='innerfile')
+ for innerclass_ in self.innerclass:
+ innerclass_.export(outfile, level, namespace_, name_='innerclass')
+ for innernamespace_ in self.innernamespace:
+ innernamespace_.export(outfile, level, namespace_, name_='innernamespace')
+ for innerpage_ in self.innerpage:
+ innerpage_.export(outfile, level, namespace_, name_='innerpage')
+ for innergroup_ in self.innergroup:
+ innergroup_.export(outfile, level, namespace_, name_='innergroup')
+ if self.templateparamlist:
+ self.templateparamlist.export(outfile, level, namespace_, name_='templateparamlist')
+ for sectiondef_ in self.sectiondef:
+ sectiondef_.export(outfile, level, namespace_, name_='sectiondef')
+ if self.briefdescription:
+ self.briefdescription.export(outfile, level, namespace_, name_='briefdescription')
+ if self.detaileddescription:
+ self.detaileddescription.export(outfile, level, namespace_, name_='detaileddescription')
+ if self.inheritancegraph:
+ self.inheritancegraph.export(outfile, level, namespace_, name_='inheritancegraph')
+ if self.collaborationgraph:
+ self.collaborationgraph.export(outfile, level, namespace_, name_='collaborationgraph')
+ if self.programlisting:
+ self.programlisting.export(outfile, level, namespace_, name_='programlisting')
+ if self.location:
+ self.location.export(outfile, level, namespace_, name_='location')
+ if self.listofallmembers:
+ self.listofallmembers.export(outfile, level, namespace_, name_='listofallmembers')
+ def hasContent_(self):
+ if (
+ self.compoundname is not None or
+ self.title is not None or
+ self.basecompoundref is not None or
+ self.derivedcompoundref is not None or
+ self.includes is not None or
+ self.includedby is not None or
+ self.incdepgraph is not None or
+ self.invincdepgraph is not None or
+ self.innerdir is not None or
+ self.innerfile is not None or
+ self.innerclass is not None or
+ self.innernamespace is not None or
+ self.innerpage is not None or
+ self.innergroup is not None or
+ self.templateparamlist is not None or
+ self.sectiondef is not None or
+ self.briefdescription is not None or
+ self.detaileddescription is not None or
+ self.inheritancegraph is not None or
+ self.collaborationgraph is not None or
+ self.programlisting is not None or
+ self.location is not None or
+ self.listofallmembers is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='compounddefType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.kind is not None:
+ showIndent(outfile, level)
+ outfile.write('kind = "%s",\n' % (self.kind,))
+ if self.prot is not None:
+ showIndent(outfile, level)
+ outfile.write('prot = "%s",\n' % (self.prot,))
+ if self.id is not None:
+ showIndent(outfile, level)
+ outfile.write('id = %s,\n' % (self.id,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('compoundname=%s,\n' % quote_python(self.compoundname).encode(ExternalEncoding))
+ if self.title:
+ showIndent(outfile, level)
+ outfile.write('title=model_.xsd_string(\n')
+ self.title.exportLiteral(outfile, level, name_='title')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ showIndent(outfile, level)
+ outfile.write('basecompoundref=[\n')
+ level += 1
+ for basecompoundref in self.basecompoundref:
+ showIndent(outfile, level)
+ outfile.write('model_.basecompoundref(\n')
+ basecompoundref.exportLiteral(outfile, level, name_='basecompoundref')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('derivedcompoundref=[\n')
+ level += 1
+ for derivedcompoundref in self.derivedcompoundref:
+ showIndent(outfile, level)
+ outfile.write('model_.derivedcompoundref(\n')
+ derivedcompoundref.exportLiteral(outfile, level, name_='derivedcompoundref')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('includes=[\n')
+ level += 1
+ for includes in self.includes:
+ showIndent(outfile, level)
+ outfile.write('model_.includes(\n')
+ includes.exportLiteral(outfile, level, name_='includes')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('includedby=[\n')
+ level += 1
+ for includedby in self.includedby:
+ showIndent(outfile, level)
+ outfile.write('model_.includedby(\n')
+ includedby.exportLiteral(outfile, level, name_='includedby')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ if self.incdepgraph:
+ showIndent(outfile, level)
+ outfile.write('incdepgraph=model_.graphType(\n')
+ self.incdepgraph.exportLiteral(outfile, level, name_='incdepgraph')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ if self.invincdepgraph:
+ showIndent(outfile, level)
+ outfile.write('invincdepgraph=model_.graphType(\n')
+ self.invincdepgraph.exportLiteral(outfile, level, name_='invincdepgraph')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ showIndent(outfile, level)
+ outfile.write('innerdir=[\n')
+ level += 1
+ for innerdir in self.innerdir:
+ showIndent(outfile, level)
+ outfile.write('model_.innerdir(\n')
+ innerdir.exportLiteral(outfile, level, name_='innerdir')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('innerfile=[\n')
+ level += 1
+ for innerfile in self.innerfile:
+ showIndent(outfile, level)
+ outfile.write('model_.innerfile(\n')
+ innerfile.exportLiteral(outfile, level, name_='innerfile')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('innerclass=[\n')
+ level += 1
+ for innerclass in self.innerclass:
+ showIndent(outfile, level)
+ outfile.write('model_.innerclass(\n')
+ innerclass.exportLiteral(outfile, level, name_='innerclass')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('innernamespace=[\n')
+ level += 1
+ for innernamespace in self.innernamespace:
+ showIndent(outfile, level)
+ outfile.write('model_.innernamespace(\n')
+ innernamespace.exportLiteral(outfile, level, name_='innernamespace')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('innerpage=[\n')
+ level += 1
+ for innerpage in self.innerpage:
+ showIndent(outfile, level)
+ outfile.write('model_.innerpage(\n')
+ innerpage.exportLiteral(outfile, level, name_='innerpage')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('innergroup=[\n')
+ level += 1
+ for innergroup in self.innergroup:
+ showIndent(outfile, level)
+ outfile.write('model_.innergroup(\n')
+ innergroup.exportLiteral(outfile, level, name_='innergroup')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ if self.templateparamlist:
+ showIndent(outfile, level)
+ outfile.write('templateparamlist=model_.templateparamlistType(\n')
+ self.templateparamlist.exportLiteral(outfile, level, name_='templateparamlist')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ showIndent(outfile, level)
+ outfile.write('sectiondef=[\n')
+ level += 1
+ for sectiondef in self.sectiondef:
+ showIndent(outfile, level)
+ outfile.write('model_.sectiondef(\n')
+ sectiondef.exportLiteral(outfile, level, name_='sectiondef')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ if self.briefdescription:
+ showIndent(outfile, level)
+ outfile.write('briefdescription=model_.descriptionType(\n')
+ self.briefdescription.exportLiteral(outfile, level, name_='briefdescription')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ if self.detaileddescription:
+ showIndent(outfile, level)
+ outfile.write('detaileddescription=model_.descriptionType(\n')
+ self.detaileddescription.exportLiteral(outfile, level, name_='detaileddescription')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ if self.inheritancegraph:
+ showIndent(outfile, level)
+ outfile.write('inheritancegraph=model_.graphType(\n')
+ self.inheritancegraph.exportLiteral(outfile, level, name_='inheritancegraph')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ if self.collaborationgraph:
+ showIndent(outfile, level)
+ outfile.write('collaborationgraph=model_.graphType(\n')
+ self.collaborationgraph.exportLiteral(outfile, level, name_='collaborationgraph')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ if self.programlisting:
+ showIndent(outfile, level)
+ outfile.write('programlisting=model_.listingType(\n')
+ self.programlisting.exportLiteral(outfile, level, name_='programlisting')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ if self.location:
+ showIndent(outfile, level)
+ outfile.write('location=model_.locationType(\n')
+ self.location.exportLiteral(outfile, level, name_='location')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ if self.listofallmembers:
+ showIndent(outfile, level)
+ outfile.write('listofallmembers=model_.listofallmembersType(\n')
+ self.listofallmembers.exportLiteral(outfile, level, name_='listofallmembers')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('kind'):
+ self.kind = attrs.get('kind').value
+ if attrs.get('prot'):
+ self.prot = attrs.get('prot').value
+ if attrs.get('id'):
+ self.id = attrs.get('id').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'compoundname':
+ compoundname_ = ''
+ for text__content_ in child_.childNodes:
+ compoundname_ += text__content_.nodeValue
+ self.compoundname = compoundname_
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'title':
+ obj_ = docTitleType.factory()
+ obj_.build(child_)
+ self.set_title(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'basecompoundref':
+ obj_ = compoundRefType.factory()
+ obj_.build(child_)
+ self.basecompoundref.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'derivedcompoundref':
+ obj_ = compoundRefType.factory()
+ obj_.build(child_)
+ self.derivedcompoundref.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'includes':
+ obj_ = incType.factory()
+ obj_.build(child_)
+ self.includes.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'includedby':
+ obj_ = incType.factory()
+ obj_.build(child_)
+ self.includedby.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'incdepgraph':
+ obj_ = graphType.factory()
+ obj_.build(child_)
+ self.set_incdepgraph(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'invincdepgraph':
+ obj_ = graphType.factory()
+ obj_.build(child_)
+ self.set_invincdepgraph(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'innerdir':
+ obj_ = refType.factory()
+ obj_.build(child_)
+ self.innerdir.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'innerfile':
+ obj_ = refType.factory()
+ obj_.build(child_)
+ self.innerfile.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'innerclass':
+ obj_ = refType.factory()
+ obj_.build(child_)
+ self.innerclass.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'innernamespace':
+ obj_ = refType.factory()
+ obj_.build(child_)
+ self.innernamespace.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'innerpage':
+ obj_ = refType.factory()
+ obj_.build(child_)
+ self.innerpage.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'innergroup':
+ obj_ = refType.factory()
+ obj_.build(child_)
+ self.innergroup.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'templateparamlist':
+ obj_ = templateparamlistType.factory()
+ obj_.build(child_)
+ self.set_templateparamlist(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'sectiondef':
+ obj_ = sectiondefType.factory()
+ obj_.build(child_)
+ self.sectiondef.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'briefdescription':
+ obj_ = descriptionType.factory()
+ obj_.build(child_)
+ self.set_briefdescription(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'detaileddescription':
+ obj_ = descriptionType.factory()
+ obj_.build(child_)
+ self.set_detaileddescription(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'inheritancegraph':
+ obj_ = graphType.factory()
+ obj_.build(child_)
+ self.set_inheritancegraph(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'collaborationgraph':
+ obj_ = graphType.factory()
+ obj_.build(child_)
+ self.set_collaborationgraph(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'programlisting':
+ obj_ = listingType.factory()
+ obj_.build(child_)
+ self.set_programlisting(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'location':
+ obj_ = locationType.factory()
+ obj_.build(child_)
+ self.set_location(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'listofallmembers':
+ obj_ = listofallmembersType.factory()
+ obj_.build(child_)
+ self.set_listofallmembers(obj_)
+# end class compounddefType
+
+
+class listofallmembersType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, member=None):
+ if member is None:
+ self.member = []
+ else:
+ self.member = member
+ def factory(*args_, **kwargs_):
+ if listofallmembersType.subclass:
+ return listofallmembersType.subclass(*args_, **kwargs_)
+ else:
+ return listofallmembersType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_member(self): return self.member
+ def set_member(self, member): self.member = member
+ def add_member(self, value): self.member.append(value)
+ def insert_member(self, index, value): self.member[index] = value
+ def export(self, outfile, level, namespace_='', name_='listofallmembersType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='listofallmembersType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='listofallmembersType'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='listofallmembersType'):
+ for member_ in self.member:
+ member_.export(outfile, level, namespace_, name_='member')
+ def hasContent_(self):
+ if (
+ self.member is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='listofallmembersType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('member=[\n')
+ level += 1
+ for member in self.member:
+ showIndent(outfile, level)
+ outfile.write('model_.member(\n')
+ member.exportLiteral(outfile, level, name_='member')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'member':
+ obj_ = memberRefType.factory()
+ obj_.build(child_)
+ self.member.append(obj_)
+# end class listofallmembersType
+
+
+class memberRefType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, virt=None, prot=None, refid=None, ambiguityscope=None, scope=None, name=None):
+ self.virt = virt
+ self.prot = prot
+ self.refid = refid
+ self.ambiguityscope = ambiguityscope
+ self.scope = scope
+ self.name = name
+ def factory(*args_, **kwargs_):
+ if memberRefType.subclass:
+ return memberRefType.subclass(*args_, **kwargs_)
+ else:
+ return memberRefType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_scope(self): return self.scope
+ def set_scope(self, scope): self.scope = scope
+ def get_name(self): return self.name
+ def set_name(self, name): self.name = name
+ def get_virt(self): return self.virt
+ def set_virt(self, virt): self.virt = virt
+ def get_prot(self): return self.prot
+ def set_prot(self, prot): self.prot = prot
+ def get_refid(self): return self.refid
+ def set_refid(self, refid): self.refid = refid
+ def get_ambiguityscope(self): return self.ambiguityscope
+ def set_ambiguityscope(self, ambiguityscope): self.ambiguityscope = ambiguityscope
+ def export(self, outfile, level, namespace_='', name_='memberRefType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='memberRefType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='memberRefType'):
+ if self.virt is not None:
+ outfile.write(' virt=%s' % (quote_attrib(self.virt), ))
+ if self.prot is not None:
+ outfile.write(' prot=%s' % (quote_attrib(self.prot), ))
+ if self.refid is not None:
+ outfile.write(' refid=%s' % (self.format_string(quote_attrib(self.refid).encode(ExternalEncoding), input_name='refid'), ))
+ if self.ambiguityscope is not None:
+ outfile.write(' ambiguityscope=%s' % (self.format_string(quote_attrib(self.ambiguityscope).encode(ExternalEncoding), input_name='ambiguityscope'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='memberRefType'):
+ if self.scope is not None:
+ showIndent(outfile, level)
+ outfile.write('<%sscope>%s</%sscope>\n' % (namespace_, self.format_string(quote_xml(self.scope).encode(ExternalEncoding), input_name='scope'), namespace_))
+ if self.name is not None:
+ showIndent(outfile, level)
+ outfile.write('<%sname>%s</%sname>\n' % (namespace_, self.format_string(quote_xml(self.name).encode(ExternalEncoding), input_name='name'), namespace_))
+ def hasContent_(self):
+ if (
+ self.scope is not None or
+ self.name is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='memberRefType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.virt is not None:
+ showIndent(outfile, level)
+ outfile.write('virt = "%s",\n' % (self.virt,))
+ if self.prot is not None:
+ showIndent(outfile, level)
+ outfile.write('prot = "%s",\n' % (self.prot,))
+ if self.refid is not None:
+ showIndent(outfile, level)
+ outfile.write('refid = %s,\n' % (self.refid,))
+ if self.ambiguityscope is not None:
+ showIndent(outfile, level)
+ outfile.write('ambiguityscope = %s,\n' % (self.ambiguityscope,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('scope=%s,\n' % quote_python(self.scope).encode(ExternalEncoding))
+ showIndent(outfile, level)
+ outfile.write('name=%s,\n' % quote_python(self.name).encode(ExternalEncoding))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('virt'):
+ self.virt = attrs.get('virt').value
+ if attrs.get('prot'):
+ self.prot = attrs.get('prot').value
+ if attrs.get('refid'):
+ self.refid = attrs.get('refid').value
+ if attrs.get('ambiguityscope'):
+ self.ambiguityscope = attrs.get('ambiguityscope').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'scope':
+ scope_ = ''
+ for text__content_ in child_.childNodes:
+ scope_ += text__content_.nodeValue
+ self.scope = scope_
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'name':
+ name_ = ''
+ for text__content_ in child_.childNodes:
+ name_ += text__content_.nodeValue
+ self.name = name_
+# end class memberRefType
+
+
+class scope(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, valueOf_=''):
+ self.valueOf_ = valueOf_
+ def factory(*args_, **kwargs_):
+ if scope.subclass:
+ return scope.subclass(*args_, **kwargs_)
+ else:
+ return scope(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='scope', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='scope')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='scope'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='scope'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='scope'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class scope
+
+
+class name(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, valueOf_=''):
+ self.valueOf_ = valueOf_
+ def factory(*args_, **kwargs_):
+ if name.subclass:
+ return name.subclass(*args_, **kwargs_)
+ else:
+ return name(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='name', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='name')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='name'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='name'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='name'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class name
+
+
+class compoundRefType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, virt=None, prot=None, refid=None, valueOf_='', mixedclass_=None, content_=None):
+ self.virt = virt
+ self.prot = prot
+ self.refid = refid
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if compoundRefType.subclass:
+ return compoundRefType.subclass(*args_, **kwargs_)
+ else:
+ return compoundRefType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_virt(self): return self.virt
+ def set_virt(self, virt): self.virt = virt
+ def get_prot(self): return self.prot
+ def set_prot(self, prot): self.prot = prot
+ def get_refid(self): return self.refid
+ def set_refid(self, refid): self.refid = refid
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='compoundRefType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='compoundRefType')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='compoundRefType'):
+ if self.virt is not None:
+ outfile.write(' virt=%s' % (quote_attrib(self.virt), ))
+ if self.prot is not None:
+ outfile.write(' prot=%s' % (quote_attrib(self.prot), ))
+ if self.refid is not None:
+ outfile.write(' refid=%s' % (self.format_string(quote_attrib(self.refid).encode(ExternalEncoding), input_name='refid'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='compoundRefType'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='compoundRefType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.virt is not None:
+ showIndent(outfile, level)
+ outfile.write('virt = "%s",\n' % (self.virt,))
+ if self.prot is not None:
+ showIndent(outfile, level)
+ outfile.write('prot = "%s",\n' % (self.prot,))
+ if self.refid is not None:
+ showIndent(outfile, level)
+ outfile.write('refid = %s,\n' % (self.refid,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('virt'):
+ self.virt = attrs.get('virt').value
+ if attrs.get('prot'):
+ self.prot = attrs.get('prot').value
+ if attrs.get('refid'):
+ self.refid = attrs.get('refid').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class compoundRefType
+
+
+class reimplementType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, refid=None, valueOf_='', mixedclass_=None, content_=None):
+ self.refid = refid
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if reimplementType.subclass:
+ return reimplementType.subclass(*args_, **kwargs_)
+ else:
+ return reimplementType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_refid(self): return self.refid
+ def set_refid(self, refid): self.refid = refid
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='reimplementType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='reimplementType')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='reimplementType'):
+ if self.refid is not None:
+ outfile.write(' refid=%s' % (self.format_string(quote_attrib(self.refid).encode(ExternalEncoding), input_name='refid'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='reimplementType'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='reimplementType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.refid is not None:
+ showIndent(outfile, level)
+ outfile.write('refid = %s,\n' % (self.refid,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('refid'):
+ self.refid = attrs.get('refid').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class reimplementType
+
+
+class incType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, local=None, refid=None, valueOf_='', mixedclass_=None, content_=None):
+ self.local = local
+ self.refid = refid
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if incType.subclass:
+ return incType.subclass(*args_, **kwargs_)
+ else:
+ return incType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_local(self): return self.local
+ def set_local(self, local): self.local = local
+ def get_refid(self): return self.refid
+ def set_refid(self, refid): self.refid = refid
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='incType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='incType')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='incType'):
+ if self.local is not None:
+ outfile.write(' local=%s' % (quote_attrib(self.local), ))
+ if self.refid is not None:
+ outfile.write(' refid=%s' % (self.format_string(quote_attrib(self.refid).encode(ExternalEncoding), input_name='refid'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='incType'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='incType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.local is not None:
+ showIndent(outfile, level)
+ outfile.write('local = "%s",\n' % (self.local,))
+ if self.refid is not None:
+ showIndent(outfile, level)
+ outfile.write('refid = %s,\n' % (self.refid,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('local'):
+ self.local = attrs.get('local').value
+ if attrs.get('refid'):
+ self.refid = attrs.get('refid').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class incType
+
+
+class refType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, prot=None, refid=None, valueOf_='', mixedclass_=None, content_=None):
+ self.prot = prot
+ self.refid = refid
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if refType.subclass:
+ return refType.subclass(*args_, **kwargs_)
+ else:
+ return refType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_prot(self): return self.prot
+ def set_prot(self, prot): self.prot = prot
+ def get_refid(self): return self.refid
+ def set_refid(self, refid): self.refid = refid
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='refType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='refType')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='refType'):
+ if self.prot is not None:
+ outfile.write(' prot=%s' % (quote_attrib(self.prot), ))
+ if self.refid is not None:
+ outfile.write(' refid=%s' % (self.format_string(quote_attrib(self.refid).encode(ExternalEncoding), input_name='refid'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='refType'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='refType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.prot is not None:
+ showIndent(outfile, level)
+ outfile.write('prot = "%s",\n' % (self.prot,))
+ if self.refid is not None:
+ showIndent(outfile, level)
+ outfile.write('refid = %s,\n' % (self.refid,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('prot'):
+ self.prot = attrs.get('prot').value
+ if attrs.get('refid'):
+ self.refid = attrs.get('refid').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class refType
+
+
+class refTextType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, refid=None, kindref=None, external=None, valueOf_='', mixedclass_=None, content_=None):
+ self.refid = refid
+ self.kindref = kindref
+ self.external = external
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if refTextType.subclass:
+ return refTextType.subclass(*args_, **kwargs_)
+ else:
+ return refTextType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_refid(self): return self.refid
+ def set_refid(self, refid): self.refid = refid
+ def get_kindref(self): return self.kindref
+ def set_kindref(self, kindref): self.kindref = kindref
+ def get_external(self): return self.external
+ def set_external(self, external): self.external = external
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='refTextType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='refTextType')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='refTextType'):
+ if self.refid is not None:
+ outfile.write(' refid=%s' % (self.format_string(quote_attrib(self.refid).encode(ExternalEncoding), input_name='refid'), ))
+ if self.kindref is not None:
+ outfile.write(' kindref=%s' % (quote_attrib(self.kindref), ))
+ if self.external is not None:
+ outfile.write(' external=%s' % (self.format_string(quote_attrib(self.external).encode(ExternalEncoding), input_name='external'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='refTextType'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='refTextType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.refid is not None:
+ showIndent(outfile, level)
+ outfile.write('refid = %s,\n' % (self.refid,))
+ if self.kindref is not None:
+ showIndent(outfile, level)
+ outfile.write('kindref = "%s",\n' % (self.kindref,))
+ if self.external is not None:
+ showIndent(outfile, level)
+ outfile.write('external = %s,\n' % (self.external,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('refid'):
+ self.refid = attrs.get('refid').value
+ if attrs.get('kindref'):
+ self.kindref = attrs.get('kindref').value
+ if attrs.get('external'):
+ self.external = attrs.get('external').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class refTextType
+
+
+class sectiondefType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, kind=None, header=None, description=None, memberdef=None):
+ self.kind = kind
+ self.header = header
+ self.description = description
+ if memberdef is None:
+ self.memberdef = []
+ else:
+ self.memberdef = memberdef
+ def factory(*args_, **kwargs_):
+ if sectiondefType.subclass:
+ return sectiondefType.subclass(*args_, **kwargs_)
+ else:
+ return sectiondefType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_header(self): return self.header
+ def set_header(self, header): self.header = header
+ def get_description(self): return self.description
+ def set_description(self, description): self.description = description
+ def get_memberdef(self): return self.memberdef
+ def set_memberdef(self, memberdef): self.memberdef = memberdef
+ def add_memberdef(self, value): self.memberdef.append(value)
+ def insert_memberdef(self, index, value): self.memberdef[index] = value
+ def get_kind(self): return self.kind
+ def set_kind(self, kind): self.kind = kind
+ def export(self, outfile, level, namespace_='', name_='sectiondefType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='sectiondefType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='sectiondefType'):
+ if self.kind is not None:
+ outfile.write(' kind=%s' % (quote_attrib(self.kind), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='sectiondefType'):
+ if self.header is not None:
+ showIndent(outfile, level)
+ outfile.write('<%sheader>%s</%sheader>\n' % (namespace_, self.format_string(quote_xml(self.header).encode(ExternalEncoding), input_name='header'), namespace_))
+ if self.description:
+ self.description.export(outfile, level, namespace_, name_='description')
+ for memberdef_ in self.memberdef:
+ memberdef_.export(outfile, level, namespace_, name_='memberdef')
+ def hasContent_(self):
+ if (
+ self.header is not None or
+ self.description is not None or
+ self.memberdef is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='sectiondefType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.kind is not None:
+ showIndent(outfile, level)
+ outfile.write('kind = "%s",\n' % (self.kind,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('header=%s,\n' % quote_python(self.header).encode(ExternalEncoding))
+ if self.description:
+ showIndent(outfile, level)
+ outfile.write('description=model_.descriptionType(\n')
+ self.description.exportLiteral(outfile, level, name_='description')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ showIndent(outfile, level)
+ outfile.write('memberdef=[\n')
+ level += 1
+ for memberdef in self.memberdef:
+ showIndent(outfile, level)
+ outfile.write('model_.memberdef(\n')
+ memberdef.exportLiteral(outfile, level, name_='memberdef')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('kind'):
+ self.kind = attrs.get('kind').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'header':
+ header_ = ''
+ for text__content_ in child_.childNodes:
+ header_ += text__content_.nodeValue
+ self.header = header_
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'description':
+ obj_ = descriptionType.factory()
+ obj_.build(child_)
+ self.set_description(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'memberdef':
+ obj_ = memberdefType.factory()
+ obj_.build(child_)
+ self.memberdef.append(obj_)
+# end class sectiondefType
+
+
+class memberdefType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, initonly=None, kind=None, volatile=None, const=None, raisexx=None, virt=None, readable=None, prot=None, explicit=None, new=None, final=None, writable=None, add=None, static=None, remove=None, sealed=None, mutable=None, gettable=None, inline=None, settable=None, id=None, templateparamlist=None, type_=None, definition=None, argsstring=None, name=None, read=None, write=None, bitfield=None, reimplements=None, reimplementedby=None, param=None, enumvalue=None, initializer=None, exceptions=None, briefdescription=None, detaileddescription=None, inbodydescription=None, location=None, references=None, referencedby=None):
+ self.initonly = initonly
+ self.kind = kind
+ self.volatile = volatile
+ self.const = const
+ self.raisexx = raisexx
+ self.virt = virt
+ self.readable = readable
+ self.prot = prot
+ self.explicit = explicit
+ self.new = new
+ self.final = final
+ self.writable = writable
+ self.add = add
+ self.static = static
+ self.remove = remove
+ self.sealed = sealed
+ self.mutable = mutable
+ self.gettable = gettable
+ self.inline = inline
+ self.settable = settable
+ self.id = id
+ self.templateparamlist = templateparamlist
+ self.type_ = type_
+ self.definition = definition
+ self.argsstring = argsstring
+ self.name = name
+ self.read = read
+ self.write = write
+ self.bitfield = bitfield
+ if reimplements is None:
+ self.reimplements = []
+ else:
+ self.reimplements = reimplements
+ if reimplementedby is None:
+ self.reimplementedby = []
+ else:
+ self.reimplementedby = reimplementedby
+ if param is None:
+ self.param = []
+ else:
+ self.param = param
+ if enumvalue is None:
+ self.enumvalue = []
+ else:
+ self.enumvalue = enumvalue
+ self.initializer = initializer
+ self.exceptions = exceptions
+ self.briefdescription = briefdescription
+ self.detaileddescription = detaileddescription
+ self.inbodydescription = inbodydescription
+ self.location = location
+ if references is None:
+ self.references = []
+ else:
+ self.references = references
+ if referencedby is None:
+ self.referencedby = []
+ else:
+ self.referencedby = referencedby
+ def factory(*args_, **kwargs_):
+ if memberdefType.subclass:
+ return memberdefType.subclass(*args_, **kwargs_)
+ else:
+ return memberdefType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_templateparamlist(self): return self.templateparamlist
+ def set_templateparamlist(self, templateparamlist): self.templateparamlist = templateparamlist
+ def get_type(self): return self.type_
+ def set_type(self, type_): self.type_ = type_
+ def get_definition(self): return self.definition
+ def set_definition(self, definition): self.definition = definition
+ def get_argsstring(self): return self.argsstring
+ def set_argsstring(self, argsstring): self.argsstring = argsstring
+ def get_name(self): return self.name
+ def set_name(self, name): self.name = name
+ def get_read(self): return self.read
+ def set_read(self, read): self.read = read
+ def get_write(self): return self.write
+ def set_write(self, write): self.write = write
+ def get_bitfield(self): return self.bitfield
+ def set_bitfield(self, bitfield): self.bitfield = bitfield
+ def get_reimplements(self): return self.reimplements
+ def set_reimplements(self, reimplements): self.reimplements = reimplements
+ def add_reimplements(self, value): self.reimplements.append(value)
+ def insert_reimplements(self, index, value): self.reimplements[index] = value
+ def get_reimplementedby(self): return self.reimplementedby
+ def set_reimplementedby(self, reimplementedby): self.reimplementedby = reimplementedby
+ def add_reimplementedby(self, value): self.reimplementedby.append(value)
+ def insert_reimplementedby(self, index, value): self.reimplementedby[index] = value
+ def get_param(self): return self.param
+ def set_param(self, param): self.param = param
+ def add_param(self, value): self.param.append(value)
+ def insert_param(self, index, value): self.param[index] = value
+ def get_enumvalue(self): return self.enumvalue
+ def set_enumvalue(self, enumvalue): self.enumvalue = enumvalue
+ def add_enumvalue(self, value): self.enumvalue.append(value)
+ def insert_enumvalue(self, index, value): self.enumvalue[index] = value
+ def get_initializer(self): return self.initializer
+ def set_initializer(self, initializer): self.initializer = initializer
+ def get_exceptions(self): return self.exceptions
+ def set_exceptions(self, exceptions): self.exceptions = exceptions
+ def get_briefdescription(self): return self.briefdescription
+ def set_briefdescription(self, briefdescription): self.briefdescription = briefdescription
+ def get_detaileddescription(self): return self.detaileddescription
+ def set_detaileddescription(self, detaileddescription): self.detaileddescription = detaileddescription
+ def get_inbodydescription(self): return self.inbodydescription
+ def set_inbodydescription(self, inbodydescription): self.inbodydescription = inbodydescription
+ def get_location(self): return self.location
+ def set_location(self, location): self.location = location
+ def get_references(self): return self.references
+ def set_references(self, references): self.references = references
+ def add_references(self, value): self.references.append(value)
+ def insert_references(self, index, value): self.references[index] = value
+ def get_referencedby(self): return self.referencedby
+ def set_referencedby(self, referencedby): self.referencedby = referencedby
+ def add_referencedby(self, value): self.referencedby.append(value)
+ def insert_referencedby(self, index, value): self.referencedby[index] = value
+ def get_initonly(self): return self.initonly
+ def set_initonly(self, initonly): self.initonly = initonly
+ def get_kind(self): return self.kind
+ def set_kind(self, kind): self.kind = kind
+ def get_volatile(self): return self.volatile
+ def set_volatile(self, volatile): self.volatile = volatile
+ def get_const(self): return self.const
+ def set_const(self, const): self.const = const
+ def get_raise(self): return self.raisexx
+ def set_raise(self, raisexx): self.raisexx = raisexx
+ def get_virt(self): return self.virt
+ def set_virt(self, virt): self.virt = virt
+ def get_readable(self): return self.readable
+ def set_readable(self, readable): self.readable = readable
+ def get_prot(self): return self.prot
+ def set_prot(self, prot): self.prot = prot
+ def get_explicit(self): return self.explicit
+ def set_explicit(self, explicit): self.explicit = explicit
+ def get_new(self): return self.new
+ def set_new(self, new): self.new = new
+ def get_final(self): return self.final
+ def set_final(self, final): self.final = final
+ def get_writable(self): return self.writable
+ def set_writable(self, writable): self.writable = writable
+ def get_add(self): return self.add
+ def set_add(self, add): self.add = add
+ def get_static(self): return self.static
+ def set_static(self, static): self.static = static
+ def get_remove(self): return self.remove
+ def set_remove(self, remove): self.remove = remove
+ def get_sealed(self): return self.sealed
+ def set_sealed(self, sealed): self.sealed = sealed
+ def get_mutable(self): return self.mutable
+ def set_mutable(self, mutable): self.mutable = mutable
+ def get_gettable(self): return self.gettable
+ def set_gettable(self, gettable): self.gettable = gettable
+ def get_inline(self): return self.inline
+ def set_inline(self, inline): self.inline = inline
+ def get_settable(self): return self.settable
+ def set_settable(self, settable): self.settable = settable
+ def get_id(self): return self.id
+ def set_id(self, id): self.id = id
+ def export(self, outfile, level, namespace_='', name_='memberdefType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='memberdefType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='memberdefType'):
+ if self.initonly is not None:
+ outfile.write(' initonly=%s' % (quote_attrib(self.initonly), ))
+ if self.kind is not None:
+ outfile.write(' kind=%s' % (quote_attrib(self.kind), ))
+ if self.volatile is not None:
+ outfile.write(' volatile=%s' % (quote_attrib(self.volatile), ))
+ if self.const is not None:
+ outfile.write(' const=%s' % (quote_attrib(self.const), ))
+ if self.raisexx is not None:
+ outfile.write(' raise=%s' % (quote_attrib(self.raisexx), ))
+ if self.virt is not None:
+ outfile.write(' virt=%s' % (quote_attrib(self.virt), ))
+ if self.readable is not None:
+ outfile.write(' readable=%s' % (quote_attrib(self.readable), ))
+ if self.prot is not None:
+ outfile.write(' prot=%s' % (quote_attrib(self.prot), ))
+ if self.explicit is not None:
+ outfile.write(' explicit=%s' % (quote_attrib(self.explicit), ))
+ if self.new is not None:
+ outfile.write(' new=%s' % (quote_attrib(self.new), ))
+ if self.final is not None:
+ outfile.write(' final=%s' % (quote_attrib(self.final), ))
+ if self.writable is not None:
+ outfile.write(' writable=%s' % (quote_attrib(self.writable), ))
+ if self.add is not None:
+ outfile.write(' add=%s' % (quote_attrib(self.add), ))
+ if self.static is not None:
+ outfile.write(' static=%s' % (quote_attrib(self.static), ))
+ if self.remove is not None:
+ outfile.write(' remove=%s' % (quote_attrib(self.remove), ))
+ if self.sealed is not None:
+ outfile.write(' sealed=%s' % (quote_attrib(self.sealed), ))
+ if self.mutable is not None:
+ outfile.write(' mutable=%s' % (quote_attrib(self.mutable), ))
+ if self.gettable is not None:
+ outfile.write(' gettable=%s' % (quote_attrib(self.gettable), ))
+ if self.inline is not None:
+ outfile.write(' inline=%s' % (quote_attrib(self.inline), ))
+ if self.settable is not None:
+ outfile.write(' settable=%s' % (quote_attrib(self.settable), ))
+ if self.id is not None:
+ outfile.write(' id=%s' % (self.format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='memberdefType'):
+ if self.templateparamlist:
+ self.templateparamlist.export(outfile, level, namespace_, name_='templateparamlist')
+ if self.type_:
+ self.type_.export(outfile, level, namespace_, name_='type')
+ if self.definition is not None:
+ showIndent(outfile, level)
+ outfile.write('<%sdefinition>%s</%sdefinition>\n' % (namespace_, self.format_string(quote_xml(self.definition).encode(ExternalEncoding), input_name='definition'), namespace_))
+ if self.argsstring is not None:
+ showIndent(outfile, level)
+ outfile.write('<%sargsstring>%s</%sargsstring>\n' % (namespace_, self.format_string(quote_xml(self.argsstring).encode(ExternalEncoding), input_name='argsstring'), namespace_))
+ if self.name is not None:
+ showIndent(outfile, level)
+ outfile.write('<%sname>%s</%sname>\n' % (namespace_, self.format_string(quote_xml(self.name).encode(ExternalEncoding), input_name='name'), namespace_))
+ if self.read is not None:
+ showIndent(outfile, level)
+ outfile.write('<%sread>%s</%sread>\n' % (namespace_, self.format_string(quote_xml(self.read).encode(ExternalEncoding), input_name='read'), namespace_))
+ if self.write is not None:
+ showIndent(outfile, level)
+ outfile.write('<%swrite>%s</%swrite>\n' % (namespace_, self.format_string(quote_xml(self.write).encode(ExternalEncoding), input_name='write'), namespace_))
+ if self.bitfield is not None:
+ showIndent(outfile, level)
+ outfile.write('<%sbitfield>%s</%sbitfield>\n' % (namespace_, self.format_string(quote_xml(self.bitfield).encode(ExternalEncoding), input_name='bitfield'), namespace_))
+ for reimplements_ in self.reimplements:
+ reimplements_.export(outfile, level, namespace_, name_='reimplements')
+ for reimplementedby_ in self.reimplementedby:
+ reimplementedby_.export(outfile, level, namespace_, name_='reimplementedby')
+ for param_ in self.param:
+ param_.export(outfile, level, namespace_, name_='param')
+ for enumvalue_ in self.enumvalue:
+ enumvalue_.export(outfile, level, namespace_, name_='enumvalue')
+ if self.initializer:
+ self.initializer.export(outfile, level, namespace_, name_='initializer')
+ if self.exceptions:
+ self.exceptions.export(outfile, level, namespace_, name_='exceptions')
+ if self.briefdescription:
+ self.briefdescription.export(outfile, level, namespace_, name_='briefdescription')
+ if self.detaileddescription:
+ self.detaileddescription.export(outfile, level, namespace_, name_='detaileddescription')
+ if self.inbodydescription:
+ self.inbodydescription.export(outfile, level, namespace_, name_='inbodydescription')
+ if self.location:
+ self.location.export(outfile, level, namespace_, name_='location', )
+ for references_ in self.references:
+ references_.export(outfile, level, namespace_, name_='references')
+ for referencedby_ in self.referencedby:
+ referencedby_.export(outfile, level, namespace_, name_='referencedby')
+ def hasContent_(self):
+ if (
+ self.templateparamlist is not None or
+ self.type_ is not None or
+ self.definition is not None or
+ self.argsstring is not None or
+ self.name is not None or
+ self.read is not None or
+ self.write is not None or
+ self.bitfield is not None or
+ self.reimplements is not None or
+ self.reimplementedby is not None or
+ self.param is not None or
+ self.enumvalue is not None or
+ self.initializer is not None or
+ self.exceptions is not None or
+ self.briefdescription is not None or
+ self.detaileddescription is not None or
+ self.inbodydescription is not None or
+ self.location is not None or
+ self.references is not None or
+ self.referencedby is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='memberdefType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.initonly is not None:
+ showIndent(outfile, level)
+ outfile.write('initonly = "%s",\n' % (self.initonly,))
+ if self.kind is not None:
+ showIndent(outfile, level)
+ outfile.write('kind = "%s",\n' % (self.kind,))
+ if self.volatile is not None:
+ showIndent(outfile, level)
+ outfile.write('volatile = "%s",\n' % (self.volatile,))
+ if self.const is not None:
+ showIndent(outfile, level)
+ outfile.write('const = "%s",\n' % (self.const,))
+ if self.raisexx is not None:
+ showIndent(outfile, level)
+ outfile.write('raisexx = "%s",\n' % (self.raisexx,))
+ if self.virt is not None:
+ showIndent(outfile, level)
+ outfile.write('virt = "%s",\n' % (self.virt,))
+ if self.readable is not None:
+ showIndent(outfile, level)
+ outfile.write('readable = "%s",\n' % (self.readable,))
+ if self.prot is not None:
+ showIndent(outfile, level)
+ outfile.write('prot = "%s",\n' % (self.prot,))
+ if self.explicit is not None:
+ showIndent(outfile, level)
+ outfile.write('explicit = "%s",\n' % (self.explicit,))
+ if self.new is not None:
+ showIndent(outfile, level)
+ outfile.write('new = "%s",\n' % (self.new,))
+ if self.final is not None:
+ showIndent(outfile, level)
+ outfile.write('final = "%s",\n' % (self.final,))
+ if self.writable is not None:
+ showIndent(outfile, level)
+ outfile.write('writable = "%s",\n' % (self.writable,))
+ if self.add is not None:
+ showIndent(outfile, level)
+ outfile.write('add = "%s",\n' % (self.add,))
+ if self.static is not None:
+ showIndent(outfile, level)
+ outfile.write('static = "%s",\n' % (self.static,))
+ if self.remove is not None:
+ showIndent(outfile, level)
+ outfile.write('remove = "%s",\n' % (self.remove,))
+ if self.sealed is not None:
+ showIndent(outfile, level)
+ outfile.write('sealed = "%s",\n' % (self.sealed,))
+ if self.mutable is not None:
+ showIndent(outfile, level)
+ outfile.write('mutable = "%s",\n' % (self.mutable,))
+ if self.gettable is not None:
+ showIndent(outfile, level)
+ outfile.write('gettable = "%s",\n' % (self.gettable,))
+ if self.inline is not None:
+ showIndent(outfile, level)
+ outfile.write('inline = "%s",\n' % (self.inline,))
+ if self.settable is not None:
+ showIndent(outfile, level)
+ outfile.write('settable = "%s",\n' % (self.settable,))
+ if self.id is not None:
+ showIndent(outfile, level)
+ outfile.write('id = %s,\n' % (self.id,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ if self.templateparamlist:
+ showIndent(outfile, level)
+ outfile.write('templateparamlist=model_.templateparamlistType(\n')
+ self.templateparamlist.exportLiteral(outfile, level, name_='templateparamlist')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ if self.type_:
+ showIndent(outfile, level)
+ outfile.write('type_=model_.linkedTextType(\n')
+ self.type_.exportLiteral(outfile, level, name_='type')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ showIndent(outfile, level)
+ outfile.write('definition=%s,\n' % quote_python(self.definition).encode(ExternalEncoding))
+ showIndent(outfile, level)
+ outfile.write('argsstring=%s,\n' % quote_python(self.argsstring).encode(ExternalEncoding))
+ showIndent(outfile, level)
+ outfile.write('name=%s,\n' % quote_python(self.name).encode(ExternalEncoding))
+ showIndent(outfile, level)
+ outfile.write('read=%s,\n' % quote_python(self.read).encode(ExternalEncoding))
+ showIndent(outfile, level)
+ outfile.write('write=%s,\n' % quote_python(self.write).encode(ExternalEncoding))
+ showIndent(outfile, level)
+ outfile.write('bitfield=%s,\n' % quote_python(self.bitfield).encode(ExternalEncoding))
+ showIndent(outfile, level)
+ outfile.write('reimplements=[\n')
+ level += 1
+ for reimplements in self.reimplements:
+ showIndent(outfile, level)
+ outfile.write('model_.reimplements(\n')
+ reimplements.exportLiteral(outfile, level, name_='reimplements')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('reimplementedby=[\n')
+ level += 1
+ for reimplementedby in self.reimplementedby:
+ showIndent(outfile, level)
+ outfile.write('model_.reimplementedby(\n')
+ reimplementedby.exportLiteral(outfile, level, name_='reimplementedby')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('param=[\n')
+ level += 1
+ for param in self.param:
+ showIndent(outfile, level)
+ outfile.write('model_.param(\n')
+ param.exportLiteral(outfile, level, name_='param')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('enumvalue=[\n')
+ level += 1
+ for enumvalue in self.enumvalue:
+ showIndent(outfile, level)
+ outfile.write('model_.enumvalue(\n')
+ enumvalue.exportLiteral(outfile, level, name_='enumvalue')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ if self.initializer:
+ showIndent(outfile, level)
+ outfile.write('initializer=model_.linkedTextType(\n')
+ self.initializer.exportLiteral(outfile, level, name_='initializer')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ if self.exceptions:
+ showIndent(outfile, level)
+ outfile.write('exceptions=model_.linkedTextType(\n')
+ self.exceptions.exportLiteral(outfile, level, name_='exceptions')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ if self.briefdescription:
+ showIndent(outfile, level)
+ outfile.write('briefdescription=model_.descriptionType(\n')
+ self.briefdescription.exportLiteral(outfile, level, name_='briefdescription')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ if self.detaileddescription:
+ showIndent(outfile, level)
+ outfile.write('detaileddescription=model_.descriptionType(\n')
+ self.detaileddescription.exportLiteral(outfile, level, name_='detaileddescription')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ if self.inbodydescription:
+ showIndent(outfile, level)
+ outfile.write('inbodydescription=model_.descriptionType(\n')
+ self.inbodydescription.exportLiteral(outfile, level, name_='inbodydescription')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ if self.location:
+ showIndent(outfile, level)
+ outfile.write('location=model_.locationType(\n')
+ self.location.exportLiteral(outfile, level, name_='location')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ showIndent(outfile, level)
+ outfile.write('references=[\n')
+ level += 1
+ for references in self.references:
+ showIndent(outfile, level)
+ outfile.write('model_.references(\n')
+ references.exportLiteral(outfile, level, name_='references')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('referencedby=[\n')
+ level += 1
+ for referencedby in self.referencedby:
+ showIndent(outfile, level)
+ outfile.write('model_.referencedby(\n')
+ referencedby.exportLiteral(outfile, level, name_='referencedby')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('initonly'):
+ self.initonly = attrs.get('initonly').value
+ if attrs.get('kind'):
+ self.kind = attrs.get('kind').value
+ if attrs.get('volatile'):
+ self.volatile = attrs.get('volatile').value
+ if attrs.get('const'):
+ self.const = attrs.get('const').value
+ if attrs.get('raise'):
+ self.raisexx = attrs.get('raise').value
+ if attrs.get('virt'):
+ self.virt = attrs.get('virt').value
+ if attrs.get('readable'):
+ self.readable = attrs.get('readable').value
+ if attrs.get('prot'):
+ self.prot = attrs.get('prot').value
+ if attrs.get('explicit'):
+ self.explicit = attrs.get('explicit').value
+ if attrs.get('new'):
+ self.new = attrs.get('new').value
+ if attrs.get('final'):
+ self.final = attrs.get('final').value
+ if attrs.get('writable'):
+ self.writable = attrs.get('writable').value
+ if attrs.get('add'):
+ self.add = attrs.get('add').value
+ if attrs.get('static'):
+ self.static = attrs.get('static').value
+ if attrs.get('remove'):
+ self.remove = attrs.get('remove').value
+ if attrs.get('sealed'):
+ self.sealed = attrs.get('sealed').value
+ if attrs.get('mutable'):
+ self.mutable = attrs.get('mutable').value
+ if attrs.get('gettable'):
+ self.gettable = attrs.get('gettable').value
+ if attrs.get('inline'):
+ self.inline = attrs.get('inline').value
+ if attrs.get('settable'):
+ self.settable = attrs.get('settable').value
+ if attrs.get('id'):
+ self.id = attrs.get('id').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'templateparamlist':
+ obj_ = templateparamlistType.factory()
+ obj_.build(child_)
+ self.set_templateparamlist(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'type':
+ obj_ = linkedTextType.factory()
+ obj_.build(child_)
+ self.set_type(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'definition':
+ definition_ = ''
+ for text__content_ in child_.childNodes:
+ definition_ += text__content_.nodeValue
+ self.definition = definition_
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'argsstring':
+ argsstring_ = ''
+ for text__content_ in child_.childNodes:
+ argsstring_ += text__content_.nodeValue
+ self.argsstring = argsstring_
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'name':
+ name_ = ''
+ for text__content_ in child_.childNodes:
+ name_ += text__content_.nodeValue
+ self.name = name_
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'read':
+ read_ = ''
+ for text__content_ in child_.childNodes:
+ read_ += text__content_.nodeValue
+ self.read = read_
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'write':
+ write_ = ''
+ for text__content_ in child_.childNodes:
+ write_ += text__content_.nodeValue
+ self.write = write_
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'bitfield':
+ bitfield_ = ''
+ for text__content_ in child_.childNodes:
+ bitfield_ += text__content_.nodeValue
+ self.bitfield = bitfield_
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'reimplements':
+ obj_ = reimplementType.factory()
+ obj_.build(child_)
+ self.reimplements.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'reimplementedby':
+ obj_ = reimplementType.factory()
+ obj_.build(child_)
+ self.reimplementedby.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'param':
+ obj_ = paramType.factory()
+ obj_.build(child_)
+ self.param.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'enumvalue':
+ obj_ = enumvalueType.factory()
+ obj_.build(child_)
+ self.enumvalue.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'initializer':
+ obj_ = linkedTextType.factory()
+ obj_.build(child_)
+ self.set_initializer(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'exceptions':
+ obj_ = linkedTextType.factory()
+ obj_.build(child_)
+ self.set_exceptions(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'briefdescription':
+ obj_ = descriptionType.factory()
+ obj_.build(child_)
+ self.set_briefdescription(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'detaileddescription':
+ obj_ = descriptionType.factory()
+ obj_.build(child_)
+ self.set_detaileddescription(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'inbodydescription':
+ obj_ = descriptionType.factory()
+ obj_.build(child_)
+ self.set_inbodydescription(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'location':
+ obj_ = locationType.factory()
+ obj_.build(child_)
+ self.set_location(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'references':
+ obj_ = referenceType.factory()
+ obj_.build(child_)
+ self.references.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'referencedby':
+ obj_ = referenceType.factory()
+ obj_.build(child_)
+ self.referencedby.append(obj_)
+# end class memberdefType
+
+
+class definition(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, valueOf_=''):
+ self.valueOf_ = valueOf_
+ def factory(*args_, **kwargs_):
+ if definition.subclass:
+ return definition.subclass(*args_, **kwargs_)
+ else:
+ return definition(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='definition', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='definition')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='definition'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='definition'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='definition'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class definition
+
+
+class argsstring(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, valueOf_=''):
+ self.valueOf_ = valueOf_
+ def factory(*args_, **kwargs_):
+ if argsstring.subclass:
+ return argsstring.subclass(*args_, **kwargs_)
+ else:
+ return argsstring(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='argsstring', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='argsstring')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='argsstring'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='argsstring'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='argsstring'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class argsstring
+
+
+class read(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, valueOf_=''):
+ self.valueOf_ = valueOf_
+ def factory(*args_, **kwargs_):
+ if read.subclass:
+ return read.subclass(*args_, **kwargs_)
+ else:
+ return read(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='read', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='read')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='read'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='read'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='read'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class read
+
+
+class write(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, valueOf_=''):
+ self.valueOf_ = valueOf_
+ def factory(*args_, **kwargs_):
+ if write.subclass:
+ return write.subclass(*args_, **kwargs_)
+ else:
+ return write(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='write', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='write')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='write'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='write'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='write'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class write
+
+
+class bitfield(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, valueOf_=''):
+ self.valueOf_ = valueOf_
+ def factory(*args_, **kwargs_):
+ if bitfield.subclass:
+ return bitfield.subclass(*args_, **kwargs_)
+ else:
+ return bitfield(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='bitfield', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='bitfield')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='bitfield'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='bitfield'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='bitfield'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class bitfield
+
+
+class descriptionType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, title=None, para=None, sect1=None, internal=None, mixedclass_=None, content_=None):
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if descriptionType.subclass:
+ return descriptionType.subclass(*args_, **kwargs_)
+ else:
+ return descriptionType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_title(self): return self.title
+ def set_title(self, title): self.title = title
+ def get_para(self): return self.para
+ def set_para(self, para): self.para = para
+ def add_para(self, value): self.para.append(value)
+ def insert_para(self, index, value): self.para[index] = value
+ def get_sect1(self): return self.sect1
+ def set_sect1(self, sect1): self.sect1 = sect1
+ def add_sect1(self, value): self.sect1.append(value)
+ def insert_sect1(self, index, value): self.sect1[index] = value
+ def get_internal(self): return self.internal
+ def set_internal(self, internal): self.internal = internal
+ def export(self, outfile, level, namespace_='', name_='descriptionType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='descriptionType')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='descriptionType'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='descriptionType'):
+ for item_ in self.content_:
+ item_.export(outfile, level, item_.name, namespace_)
+ def hasContent_(self):
+ if (
+ self.title is not None or
+ self.para is not None or
+ self.sect1 is not None or
+ self.internal is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='descriptionType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'title':
+ childobj_ = docTitleType.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'title', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'para':
+ childobj_ = docParaType.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'para', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'sect1':
+ childobj_ = docSect1Type.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'sect1', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'internal':
+ childobj_ = docInternalType.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'internal', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+# end class descriptionType
+
+
+class enumvalueType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, prot=None, id=None, name=None, initializer=None, briefdescription=None, detaileddescription=None, mixedclass_=None, content_=None):
+ self.prot = prot
+ self.id = id
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if enumvalueType.subclass:
+ return enumvalueType.subclass(*args_, **kwargs_)
+ else:
+ return enumvalueType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_name(self): return self.name
+ def set_name(self, name): self.name = name
+ def get_initializer(self): return self.initializer
+ def set_initializer(self, initializer): self.initializer = initializer
+ def get_briefdescription(self): return self.briefdescription
+ def set_briefdescription(self, briefdescription): self.briefdescription = briefdescription
+ def get_detaileddescription(self): return self.detaileddescription
+ def set_detaileddescription(self, detaileddescription): self.detaileddescription = detaileddescription
+ def get_prot(self): return self.prot
+ def set_prot(self, prot): self.prot = prot
+ def get_id(self): return self.id
+ def set_id(self, id): self.id = id
+ def export(self, outfile, level, namespace_='', name_='enumvalueType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='enumvalueType')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='enumvalueType'):
+ if self.prot is not None:
+ outfile.write(' prot=%s' % (quote_attrib(self.prot), ))
+ if self.id is not None:
+ outfile.write(' id=%s' % (self.format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='enumvalueType'):
+ for item_ in self.content_:
+ item_.export(outfile, level, item_.name, namespace_)
+ def hasContent_(self):
+ if (
+ self.name is not None or
+ self.initializer is not None or
+ self.briefdescription is not None or
+ self.detaileddescription is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='enumvalueType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.prot is not None:
+ showIndent(outfile, level)
+ outfile.write('prot = "%s",\n' % (self.prot,))
+ if self.id is not None:
+ showIndent(outfile, level)
+ outfile.write('id = %s,\n' % (self.id,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('prot'):
+ self.prot = attrs.get('prot').value
+ if attrs.get('id'):
+ self.id = attrs.get('id').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'name':
+ value_ = []
+ for text_ in child_.childNodes:
+ value_.append(text_.nodeValue)
+ valuestr_ = ''.join(value_)
+ obj_ = self.mixedclass_(MixedContainer.CategorySimple,
+ MixedContainer.TypeString, 'name', valuestr_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'initializer':
+ childobj_ = linkedTextType.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'initializer', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'briefdescription':
+ childobj_ = descriptionType.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'briefdescription', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'detaileddescription':
+ childobj_ = descriptionType.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'detaileddescription', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+# end class enumvalueType
+
+
+class templateparamlistType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, param=None):
+ if param is None:
+ self.param = []
+ else:
+ self.param = param
+ def factory(*args_, **kwargs_):
+ if templateparamlistType.subclass:
+ return templateparamlistType.subclass(*args_, **kwargs_)
+ else:
+ return templateparamlistType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_param(self): return self.param
+ def set_param(self, param): self.param = param
+ def add_param(self, value): self.param.append(value)
+ def insert_param(self, index, value): self.param[index] = value
+ def export(self, outfile, level, namespace_='', name_='templateparamlistType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='templateparamlistType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='templateparamlistType'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='templateparamlistType'):
+ for param_ in self.param:
+ param_.export(outfile, level, namespace_, name_='param')
+ def hasContent_(self):
+ if (
+ self.param is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='templateparamlistType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('param=[\n')
+ level += 1
+ for param in self.param:
+ showIndent(outfile, level)
+ outfile.write('model_.param(\n')
+ param.exportLiteral(outfile, level, name_='param')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'param':
+ obj_ = paramType.factory()
+ obj_.build(child_)
+ self.param.append(obj_)
+# end class templateparamlistType
+
+
+class paramType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, type_=None, declname=None, defname=None, array=None, defval=None, briefdescription=None):
+ self.type_ = type_
+ self.declname = declname
+ self.defname = defname
+ self.array = array
+ self.defval = defval
+ self.briefdescription = briefdescription
+ def factory(*args_, **kwargs_):
+ if paramType.subclass:
+ return paramType.subclass(*args_, **kwargs_)
+ else:
+ return paramType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_type(self): return self.type_
+ def set_type(self, type_): self.type_ = type_
+ def get_declname(self): return self.declname
+ def set_declname(self, declname): self.declname = declname
+ def get_defname(self): return self.defname
+ def set_defname(self, defname): self.defname = defname
+ def get_array(self): return self.array
+ def set_array(self, array): self.array = array
+ def get_defval(self): return self.defval
+ def set_defval(self, defval): self.defval = defval
+ def get_briefdescription(self): return self.briefdescription
+ def set_briefdescription(self, briefdescription): self.briefdescription = briefdescription
+ def export(self, outfile, level, namespace_='', name_='paramType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='paramType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='paramType'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='paramType'):
+ if self.type_:
+ self.type_.export(outfile, level, namespace_, name_='type')
+ if self.declname is not None:
+ showIndent(outfile, level)
+ outfile.write('<%sdeclname>%s</%sdeclname>\n' % (namespace_, self.format_string(quote_xml(self.declname).encode(ExternalEncoding), input_name='declname'), namespace_))
+ if self.defname is not None:
+ showIndent(outfile, level)
+ outfile.write('<%sdefname>%s</%sdefname>\n' % (namespace_, self.format_string(quote_xml(self.defname).encode(ExternalEncoding), input_name='defname'), namespace_))
+ if self.array is not None:
+ showIndent(outfile, level)
+ outfile.write('<%sarray>%s</%sarray>\n' % (namespace_, self.format_string(quote_xml(self.array).encode(ExternalEncoding), input_name='array'), namespace_))
+ if self.defval:
+ self.defval.export(outfile, level, namespace_, name_='defval')
+ if self.briefdescription:
+ self.briefdescription.export(outfile, level, namespace_, name_='briefdescription')
+ def hasContent_(self):
+ if (
+ self.type_ is not None or
+ self.declname is not None or
+ self.defname is not None or
+ self.array is not None or
+ self.defval is not None or
+ self.briefdescription is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='paramType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ if self.type_:
+ showIndent(outfile, level)
+ outfile.write('type_=model_.linkedTextType(\n')
+ self.type_.exportLiteral(outfile, level, name_='type')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ showIndent(outfile, level)
+ outfile.write('declname=%s,\n' % quote_python(self.declname).encode(ExternalEncoding))
+ showIndent(outfile, level)
+ outfile.write('defname=%s,\n' % quote_python(self.defname).encode(ExternalEncoding))
+ showIndent(outfile, level)
+ outfile.write('array=%s,\n' % quote_python(self.array).encode(ExternalEncoding))
+ if self.defval:
+ showIndent(outfile, level)
+ outfile.write('defval=model_.linkedTextType(\n')
+ self.defval.exportLiteral(outfile, level, name_='defval')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ if self.briefdescription:
+ showIndent(outfile, level)
+ outfile.write('briefdescription=model_.descriptionType(\n')
+ self.briefdescription.exportLiteral(outfile, level, name_='briefdescription')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'type':
+ obj_ = linkedTextType.factory()
+ obj_.build(child_)
+ self.set_type(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'declname':
+ declname_ = ''
+ for text__content_ in child_.childNodes:
+ declname_ += text__content_.nodeValue
+ self.declname = declname_
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'defname':
+ defname_ = ''
+ for text__content_ in child_.childNodes:
+ defname_ += text__content_.nodeValue
+ self.defname = defname_
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'array':
+ array_ = ''
+ for text__content_ in child_.childNodes:
+ array_ += text__content_.nodeValue
+ self.array = array_
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'defval':
+ obj_ = linkedTextType.factory()
+ obj_.build(child_)
+ self.set_defval(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'briefdescription':
+ obj_ = descriptionType.factory()
+ obj_.build(child_)
+ self.set_briefdescription(obj_)
+# end class paramType
+
+
+class declname(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, valueOf_=''):
+ self.valueOf_ = valueOf_
+ def factory(*args_, **kwargs_):
+ if declname.subclass:
+ return declname.subclass(*args_, **kwargs_)
+ else:
+ return declname(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='declname', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='declname')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='declname'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='declname'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='declname'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class declname
+
+
+class defname(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, valueOf_=''):
+ self.valueOf_ = valueOf_
+ def factory(*args_, **kwargs_):
+ if defname.subclass:
+ return defname.subclass(*args_, **kwargs_)
+ else:
+ return defname(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='defname', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='defname')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='defname'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='defname'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='defname'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class defname
+
+
+class array(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, valueOf_=''):
+ self.valueOf_ = valueOf_
+ def factory(*args_, **kwargs_):
+ if array.subclass:
+ return array.subclass(*args_, **kwargs_)
+ else:
+ return array(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='array', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='array')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='array'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='array'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='array'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class array
+
+
+class linkedTextType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, ref=None, mixedclass_=None, content_=None):
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if linkedTextType.subclass:
+ return linkedTextType.subclass(*args_, **kwargs_)
+ else:
+ return linkedTextType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_ref(self): return self.ref
+ def set_ref(self, ref): self.ref = ref
+ def add_ref(self, value): self.ref.append(value)
+ def insert_ref(self, index, value): self.ref[index] = value
+ def export(self, outfile, level, namespace_='', name_='linkedTextType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='linkedTextType')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='linkedTextType'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='linkedTextType'):
+ for item_ in self.content_:
+ item_.export(outfile, level, item_.name, namespace_)
+ def hasContent_(self):
+ if (
+ self.ref is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='linkedTextType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'ref':
+ childobj_ = docRefTextType.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'ref', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+# end class linkedTextType
+
+
+class graphType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, node=None):
+ if node is None:
+ self.node = []
+ else:
+ self.node = node
+ def factory(*args_, **kwargs_):
+ if graphType.subclass:
+ return graphType.subclass(*args_, **kwargs_)
+ else:
+ return graphType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_node(self): return self.node
+ def set_node(self, node): self.node = node
+ def add_node(self, value): self.node.append(value)
+ def insert_node(self, index, value): self.node[index] = value
+ def export(self, outfile, level, namespace_='', name_='graphType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='graphType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='graphType'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='graphType'):
+ for node_ in self.node:
+ node_.export(outfile, level, namespace_, name_='node')
+ def hasContent_(self):
+ if (
+ self.node is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='graphType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('node=[\n')
+ level += 1
+ for node in self.node:
+ showIndent(outfile, level)
+ outfile.write('model_.node(\n')
+ node.exportLiteral(outfile, level, name_='node')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'node':
+ obj_ = nodeType.factory()
+ obj_.build(child_)
+ self.node.append(obj_)
+# end class graphType
+
+
+class nodeType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, id=None, label=None, link=None, childnode=None):
+ self.id = id
+ self.label = label
+ self.link = link
+ if childnode is None:
+ self.childnode = []
+ else:
+ self.childnode = childnode
+ def factory(*args_, **kwargs_):
+ if nodeType.subclass:
+ return nodeType.subclass(*args_, **kwargs_)
+ else:
+ return nodeType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_label(self): return self.label
+ def set_label(self, label): self.label = label
+ def get_link(self): return self.link
+ def set_link(self, link): self.link = link
+ def get_childnode(self): return self.childnode
+ def set_childnode(self, childnode): self.childnode = childnode
+ def add_childnode(self, value): self.childnode.append(value)
+ def insert_childnode(self, index, value): self.childnode[index] = value
+ def get_id(self): return self.id
+ def set_id(self, id): self.id = id
+ def export(self, outfile, level, namespace_='', name_='nodeType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='nodeType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='nodeType'):
+ if self.id is not None:
+ outfile.write(' id=%s' % (self.format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='nodeType'):
+ if self.label is not None:
+ showIndent(outfile, level)
+ outfile.write('<%slabel>%s</%slabel>\n' % (namespace_, self.format_string(quote_xml(self.label).encode(ExternalEncoding), input_name='label'), namespace_))
+ if self.link:
+ self.link.export(outfile, level, namespace_, name_='link')
+ for childnode_ in self.childnode:
+ childnode_.export(outfile, level, namespace_, name_='childnode')
+ def hasContent_(self):
+ if (
+ self.label is not None or
+ self.link is not None or
+ self.childnode is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='nodeType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.id is not None:
+ showIndent(outfile, level)
+ outfile.write('id = %s,\n' % (self.id,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('label=%s,\n' % quote_python(self.label).encode(ExternalEncoding))
+ if self.link:
+ showIndent(outfile, level)
+ outfile.write('link=model_.linkType(\n')
+ self.link.exportLiteral(outfile, level, name_='link')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ showIndent(outfile, level)
+ outfile.write('childnode=[\n')
+ level += 1
+ for childnode in self.childnode:
+ showIndent(outfile, level)
+ outfile.write('model_.childnode(\n')
+ childnode.exportLiteral(outfile, level, name_='childnode')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('id'):
+ self.id = attrs.get('id').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'label':
+ label_ = ''
+ for text__content_ in child_.childNodes:
+ label_ += text__content_.nodeValue
+ self.label = label_
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'link':
+ obj_ = linkType.factory()
+ obj_.build(child_)
+ self.set_link(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'childnode':
+ obj_ = childnodeType.factory()
+ obj_.build(child_)
+ self.childnode.append(obj_)
+# end class nodeType
+
+
+class label(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, valueOf_=''):
+ self.valueOf_ = valueOf_
+ def factory(*args_, **kwargs_):
+ if label.subclass:
+ return label.subclass(*args_, **kwargs_)
+ else:
+ return label(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='label', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='label')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='label'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='label'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='label'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class label
+
+
+class childnodeType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, relation=None, refid=None, edgelabel=None):
+ self.relation = relation
+ self.refid = refid
+ if edgelabel is None:
+ self.edgelabel = []
+ else:
+ self.edgelabel = edgelabel
+ def factory(*args_, **kwargs_):
+ if childnodeType.subclass:
+ return childnodeType.subclass(*args_, **kwargs_)
+ else:
+ return childnodeType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_edgelabel(self): return self.edgelabel
+ def set_edgelabel(self, edgelabel): self.edgelabel = edgelabel
+ def add_edgelabel(self, value): self.edgelabel.append(value)
+ def insert_edgelabel(self, index, value): self.edgelabel[index] = value
+ def get_relation(self): return self.relation
+ def set_relation(self, relation): self.relation = relation
+ def get_refid(self): return self.refid
+ def set_refid(self, refid): self.refid = refid
+ def export(self, outfile, level, namespace_='', name_='childnodeType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='childnodeType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='childnodeType'):
+ if self.relation is not None:
+ outfile.write(' relation=%s' % (quote_attrib(self.relation), ))
+ if self.refid is not None:
+ outfile.write(' refid=%s' % (self.format_string(quote_attrib(self.refid).encode(ExternalEncoding), input_name='refid'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='childnodeType'):
+ for edgelabel_ in self.edgelabel:
+ showIndent(outfile, level)
+ outfile.write('<%sedgelabel>%s</%sedgelabel>\n' % (namespace_, self.format_string(quote_xml(edgelabel_).encode(ExternalEncoding), input_name='edgelabel'), namespace_))
+ def hasContent_(self):
+ if (
+ self.edgelabel is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='childnodeType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.relation is not None:
+ showIndent(outfile, level)
+ outfile.write('relation = "%s",\n' % (self.relation,))
+ if self.refid is not None:
+ showIndent(outfile, level)
+ outfile.write('refid = %s,\n' % (self.refid,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('edgelabel=[\n')
+ level += 1
+ for edgelabel in self.edgelabel:
+ showIndent(outfile, level)
+ outfile.write('%s,\n' % quote_python(edgelabel).encode(ExternalEncoding))
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('relation'):
+ self.relation = attrs.get('relation').value
+ if attrs.get('refid'):
+ self.refid = attrs.get('refid').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'edgelabel':
+ edgelabel_ = ''
+ for text__content_ in child_.childNodes:
+ edgelabel_ += text__content_.nodeValue
+ self.edgelabel.append(edgelabel_)
+# end class childnodeType
+
+
+class edgelabel(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, valueOf_=''):
+ self.valueOf_ = valueOf_
+ def factory(*args_, **kwargs_):
+ if edgelabel.subclass:
+ return edgelabel.subclass(*args_, **kwargs_)
+ else:
+ return edgelabel(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='edgelabel', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='edgelabel')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='edgelabel'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='edgelabel'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='edgelabel'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class edgelabel
+
+
+class linkType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, refid=None, external=None, valueOf_=''):
+ self.refid = refid
+ self.external = external
+ self.valueOf_ = valueOf_
+ def factory(*args_, **kwargs_):
+ if linkType.subclass:
+ return linkType.subclass(*args_, **kwargs_)
+ else:
+ return linkType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_refid(self): return self.refid
+ def set_refid(self, refid): self.refid = refid
+ def get_external(self): return self.external
+ def set_external(self, external): self.external = external
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='linkType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='linkType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='linkType'):
+ if self.refid is not None:
+ outfile.write(' refid=%s' % (self.format_string(quote_attrib(self.refid).encode(ExternalEncoding), input_name='refid'), ))
+ if self.external is not None:
+ outfile.write(' external=%s' % (self.format_string(quote_attrib(self.external).encode(ExternalEncoding), input_name='external'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='linkType'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='linkType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.refid is not None:
+ showIndent(outfile, level)
+ outfile.write('refid = %s,\n' % (self.refid,))
+ if self.external is not None:
+ showIndent(outfile, level)
+ outfile.write('external = %s,\n' % (self.external,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('refid'):
+ self.refid = attrs.get('refid').value
+ if attrs.get('external'):
+ self.external = attrs.get('external').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class linkType
+
+
+class listingType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, codeline=None):
+ if codeline is None:
+ self.codeline = []
+ else:
+ self.codeline = codeline
+ def factory(*args_, **kwargs_):
+ if listingType.subclass:
+ return listingType.subclass(*args_, **kwargs_)
+ else:
+ return listingType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_codeline(self): return self.codeline
+ def set_codeline(self, codeline): self.codeline = codeline
+ def add_codeline(self, value): self.codeline.append(value)
+ def insert_codeline(self, index, value): self.codeline[index] = value
+ def export(self, outfile, level, namespace_='', name_='listingType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='listingType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='listingType'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='listingType'):
+ for codeline_ in self.codeline:
+ codeline_.export(outfile, level, namespace_, name_='codeline')
+ def hasContent_(self):
+ if (
+ self.codeline is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='listingType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('codeline=[\n')
+ level += 1
+ for codeline in self.codeline:
+ showIndent(outfile, level)
+ outfile.write('model_.codeline(\n')
+ codeline.exportLiteral(outfile, level, name_='codeline')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'codeline':
+ obj_ = codelineType.factory()
+ obj_.build(child_)
+ self.codeline.append(obj_)
+# end class listingType
+
+
+class codelineType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, external=None, lineno=None, refkind=None, refid=None, highlight=None):
+ self.external = external
+ self.lineno = lineno
+ self.refkind = refkind
+ self.refid = refid
+ if highlight is None:
+ self.highlight = []
+ else:
+ self.highlight = highlight
+ def factory(*args_, **kwargs_):
+ if codelineType.subclass:
+ return codelineType.subclass(*args_, **kwargs_)
+ else:
+ return codelineType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_highlight(self): return self.highlight
+ def set_highlight(self, highlight): self.highlight = highlight
+ def add_highlight(self, value): self.highlight.append(value)
+ def insert_highlight(self, index, value): self.highlight[index] = value
+ def get_external(self): return self.external
+ def set_external(self, external): self.external = external
+ def get_lineno(self): return self.lineno
+ def set_lineno(self, lineno): self.lineno = lineno
+ def get_refkind(self): return self.refkind
+ def set_refkind(self, refkind): self.refkind = refkind
+ def get_refid(self): return self.refid
+ def set_refid(self, refid): self.refid = refid
+ def export(self, outfile, level, namespace_='', name_='codelineType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='codelineType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='codelineType'):
+ if self.external is not None:
+ outfile.write(' external=%s' % (quote_attrib(self.external), ))
+ if self.lineno is not None:
+ outfile.write(' lineno="%s"' % self.format_integer(self.lineno, input_name='lineno'))
+ if self.refkind is not None:
+ outfile.write(' refkind=%s' % (quote_attrib(self.refkind), ))
+ if self.refid is not None:
+ outfile.write(' refid=%s' % (self.format_string(quote_attrib(self.refid).encode(ExternalEncoding), input_name='refid'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='codelineType'):
+ for highlight_ in self.highlight:
+ highlight_.export(outfile, level, namespace_, name_='highlight')
+ def hasContent_(self):
+ if (
+ self.highlight is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='codelineType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.external is not None:
+ showIndent(outfile, level)
+ outfile.write('external = "%s",\n' % (self.external,))
+ if self.lineno is not None:
+ showIndent(outfile, level)
+ outfile.write('lineno = %s,\n' % (self.lineno,))
+ if self.refkind is not None:
+ showIndent(outfile, level)
+ outfile.write('refkind = "%s",\n' % (self.refkind,))
+ if self.refid is not None:
+ showIndent(outfile, level)
+ outfile.write('refid = %s,\n' % (self.refid,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('highlight=[\n')
+ level += 1
+ for highlight in self.highlight:
+ showIndent(outfile, level)
+ outfile.write('model_.highlight(\n')
+ highlight.exportLiteral(outfile, level, name_='highlight')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('external'):
+ self.external = attrs.get('external').value
+ if attrs.get('lineno'):
+ try:
+ self.lineno = int(attrs.get('lineno').value)
+ except ValueError, exp:
+ raise ValueError('Bad integer attribute (lineno): %s' % exp)
+ if attrs.get('refkind'):
+ self.refkind = attrs.get('refkind').value
+ if attrs.get('refid'):
+ self.refid = attrs.get('refid').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'highlight':
+ obj_ = highlightType.factory()
+ obj_.build(child_)
+ self.highlight.append(obj_)
+# end class codelineType
+
+
+class highlightType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, classxx=None, sp=None, ref=None, mixedclass_=None, content_=None):
+ self.classxx = classxx
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if highlightType.subclass:
+ return highlightType.subclass(*args_, **kwargs_)
+ else:
+ return highlightType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_sp(self): return self.sp
+ def set_sp(self, sp): self.sp = sp
+ def add_sp(self, value): self.sp.append(value)
+ def insert_sp(self, index, value): self.sp[index] = value
+ def get_ref(self): return self.ref
+ def set_ref(self, ref): self.ref = ref
+ def add_ref(self, value): self.ref.append(value)
+ def insert_ref(self, index, value): self.ref[index] = value
+ def get_class(self): return self.classxx
+ def set_class(self, classxx): self.classxx = classxx
+ def export(self, outfile, level, namespace_='', name_='highlightType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='highlightType')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='highlightType'):
+ if self.classxx is not None:
+ outfile.write(' class=%s' % (quote_attrib(self.classxx), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='highlightType'):
+ for item_ in self.content_:
+ item_.export(outfile, level, item_.name, namespace_)
+ def hasContent_(self):
+ if (
+ self.sp is not None or
+ self.ref is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='highlightType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.classxx is not None:
+ showIndent(outfile, level)
+ outfile.write('classxx = "%s",\n' % (self.classxx,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('class'):
+ self.classxx = attrs.get('class').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'sp':
+ value_ = []
+ for text_ in child_.childNodes:
+ value_.append(text_.nodeValue)
+ valuestr_ = ''.join(value_)
+ obj_ = self.mixedclass_(MixedContainer.CategorySimple,
+ MixedContainer.TypeString, 'sp', valuestr_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'ref':
+ childobj_ = docRefTextType.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'ref', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+# end class highlightType
+
+
+class sp(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, valueOf_=''):
+ self.valueOf_ = valueOf_
+ def factory(*args_, **kwargs_):
+ if sp.subclass:
+ return sp.subclass(*args_, **kwargs_)
+ else:
+ return sp(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='sp', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='sp')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='sp'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='sp'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='sp'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class sp
+
+
+class referenceType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, endline=None, startline=None, refid=None, compoundref=None, valueOf_='', mixedclass_=None, content_=None):
+ self.endline = endline
+ self.startline = startline
+ self.refid = refid
+ self.compoundref = compoundref
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if referenceType.subclass:
+ return referenceType.subclass(*args_, **kwargs_)
+ else:
+ return referenceType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_endline(self): return self.endline
+ def set_endline(self, endline): self.endline = endline
+ def get_startline(self): return self.startline
+ def set_startline(self, startline): self.startline = startline
+ def get_refid(self): return self.refid
+ def set_refid(self, refid): self.refid = refid
+ def get_compoundref(self): return self.compoundref
+ def set_compoundref(self, compoundref): self.compoundref = compoundref
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='referenceType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='referenceType')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='referenceType'):
+ if self.endline is not None:
+ outfile.write(' endline="%s"' % self.format_integer(self.endline, input_name='endline'))
+ if self.startline is not None:
+ outfile.write(' startline="%s"' % self.format_integer(self.startline, input_name='startline'))
+ if self.refid is not None:
+ outfile.write(' refid=%s' % (self.format_string(quote_attrib(self.refid).encode(ExternalEncoding), input_name='refid'), ))
+ if self.compoundref is not None:
+ outfile.write(' compoundref=%s' % (self.format_string(quote_attrib(self.compoundref).encode(ExternalEncoding), input_name='compoundref'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='referenceType'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='referenceType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.endline is not None:
+ showIndent(outfile, level)
+ outfile.write('endline = %s,\n' % (self.endline,))
+ if self.startline is not None:
+ showIndent(outfile, level)
+ outfile.write('startline = %s,\n' % (self.startline,))
+ if self.refid is not None:
+ showIndent(outfile, level)
+ outfile.write('refid = %s,\n' % (self.refid,))
+ if self.compoundref is not None:
+ showIndent(outfile, level)
+ outfile.write('compoundref = %s,\n' % (self.compoundref,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('endline'):
+ try:
+ self.endline = int(attrs.get('endline').value)
+ except ValueError, exp:
+ raise ValueError('Bad integer attribute (endline): %s' % exp)
+ if attrs.get('startline'):
+ try:
+ self.startline = int(attrs.get('startline').value)
+ except ValueError, exp:
+ raise ValueError('Bad integer attribute (startline): %s' % exp)
+ if attrs.get('refid'):
+ self.refid = attrs.get('refid').value
+ if attrs.get('compoundref'):
+ self.compoundref = attrs.get('compoundref').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class referenceType
+
+
+class locationType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, bodystart=None, line=None, bodyend=None, bodyfile=None, file=None, valueOf_=''):
+ self.bodystart = bodystart
+ self.line = line
+ self.bodyend = bodyend
+ self.bodyfile = bodyfile
+ self.file = file
+ self.valueOf_ = valueOf_
+ def factory(*args_, **kwargs_):
+ if locationType.subclass:
+ return locationType.subclass(*args_, **kwargs_)
+ else:
+ return locationType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_bodystart(self): return self.bodystart
+ def set_bodystart(self, bodystart): self.bodystart = bodystart
+ def get_line(self): return self.line
+ def set_line(self, line): self.line = line
+ def get_bodyend(self): return self.bodyend
+ def set_bodyend(self, bodyend): self.bodyend = bodyend
+ def get_bodyfile(self): return self.bodyfile
+ def set_bodyfile(self, bodyfile): self.bodyfile = bodyfile
+ def get_file(self): return self.file
+ def set_file(self, file): self.file = file
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='locationType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='locationType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='locationType'):
+ if self.bodystart is not None:
+ outfile.write(' bodystart="%s"' % self.format_integer(self.bodystart, input_name='bodystart'))
+ if self.line is not None:
+ outfile.write(' line="%s"' % self.format_integer(self.line, input_name='line'))
+ if self.bodyend is not None:
+ outfile.write(' bodyend="%s"' % self.format_integer(self.bodyend, input_name='bodyend'))
+ if self.bodyfile is not None:
+ outfile.write(' bodyfile=%s' % (self.format_string(quote_attrib(self.bodyfile).encode(ExternalEncoding), input_name='bodyfile'), ))
+ if self.file is not None:
+ outfile.write(' file=%s' % (self.format_string(quote_attrib(self.file).encode(ExternalEncoding), input_name='file'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='locationType'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='locationType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.bodystart is not None:
+ showIndent(outfile, level)
+ outfile.write('bodystart = %s,\n' % (self.bodystart,))
+ if self.line is not None:
+ showIndent(outfile, level)
+ outfile.write('line = %s,\n' % (self.line,))
+ if self.bodyend is not None:
+ showIndent(outfile, level)
+ outfile.write('bodyend = %s,\n' % (self.bodyend,))
+ if self.bodyfile is not None:
+ showIndent(outfile, level)
+ outfile.write('bodyfile = %s,\n' % (self.bodyfile,))
+ if self.file is not None:
+ showIndent(outfile, level)
+ outfile.write('file = %s,\n' % (self.file,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('bodystart'):
+ try:
+ self.bodystart = int(attrs.get('bodystart').value)
+ except ValueError, exp:
+ raise ValueError('Bad integer attribute (bodystart): %s' % exp)
+ if attrs.get('line'):
+ try:
+ self.line = int(attrs.get('line').value)
+ except ValueError, exp:
+ raise ValueError('Bad integer attribute (line): %s' % exp)
+ if attrs.get('bodyend'):
+ try:
+ self.bodyend = int(attrs.get('bodyend').value)
+ except ValueError, exp:
+ raise ValueError('Bad integer attribute (bodyend): %s' % exp)
+ if attrs.get('bodyfile'):
+ self.bodyfile = attrs.get('bodyfile').value
+ if attrs.get('file'):
+ self.file = attrs.get('file').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class locationType
+
+
+class docSect1Type(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, id=None, title=None, para=None, sect2=None, internal=None, mixedclass_=None, content_=None):
+ self.id = id
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if docSect1Type.subclass:
+ return docSect1Type.subclass(*args_, **kwargs_)
+ else:
+ return docSect1Type(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_title(self): return self.title
+ def set_title(self, title): self.title = title
+ def get_para(self): return self.para
+ def set_para(self, para): self.para = para
+ def add_para(self, value): self.para.append(value)
+ def insert_para(self, index, value): self.para[index] = value
+ def get_sect2(self): return self.sect2
+ def set_sect2(self, sect2): self.sect2 = sect2
+ def add_sect2(self, value): self.sect2.append(value)
+ def insert_sect2(self, index, value): self.sect2[index] = value
+ def get_internal(self): return self.internal
+ def set_internal(self, internal): self.internal = internal
+ def get_id(self): return self.id
+ def set_id(self, id): self.id = id
+ def export(self, outfile, level, namespace_='', name_='docSect1Type', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docSect1Type')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='docSect1Type'):
+ if self.id is not None:
+ outfile.write(' id=%s' % (self.format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='docSect1Type'):
+ for item_ in self.content_:
+ item_.export(outfile, level, item_.name, namespace_)
+ def hasContent_(self):
+ if (
+ self.title is not None or
+ self.para is not None or
+ self.sect2 is not None or
+ self.internal is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docSect1Type'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.id is not None:
+ showIndent(outfile, level)
+ outfile.write('id = %s,\n' % (self.id,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('id'):
+ self.id = attrs.get('id').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'title':
+ childobj_ = docTitleType.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'title', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'para':
+ childobj_ = docParaType.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'para', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'sect2':
+ childobj_ = docSect2Type.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'sect2', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'internal':
+ childobj_ = docInternalS1Type.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'internal', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+# end class docSect1Type
+
+
+class docSect2Type(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, id=None, title=None, para=None, sect3=None, internal=None, mixedclass_=None, content_=None):
+ self.id = id
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if docSect2Type.subclass:
+ return docSect2Type.subclass(*args_, **kwargs_)
+ else:
+ return docSect2Type(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_title(self): return self.title
+ def set_title(self, title): self.title = title
+ def get_para(self): return self.para
+ def set_para(self, para): self.para = para
+ def add_para(self, value): self.para.append(value)
+ def insert_para(self, index, value): self.para[index] = value
+ def get_sect3(self): return self.sect3
+ def set_sect3(self, sect3): self.sect3 = sect3
+ def add_sect3(self, value): self.sect3.append(value)
+ def insert_sect3(self, index, value): self.sect3[index] = value
+ def get_internal(self): return self.internal
+ def set_internal(self, internal): self.internal = internal
+ def get_id(self): return self.id
+ def set_id(self, id): self.id = id
+ def export(self, outfile, level, namespace_='', name_='docSect2Type', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docSect2Type')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='docSect2Type'):
+ if self.id is not None:
+ outfile.write(' id=%s' % (self.format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='docSect2Type'):
+ for item_ in self.content_:
+ item_.export(outfile, level, item_.name, namespace_)
+ def hasContent_(self):
+ if (
+ self.title is not None or
+ self.para is not None or
+ self.sect3 is not None or
+ self.internal is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docSect2Type'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.id is not None:
+ showIndent(outfile, level)
+ outfile.write('id = %s,\n' % (self.id,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('id'):
+ self.id = attrs.get('id').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'title':
+ childobj_ = docTitleType.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'title', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'para':
+ childobj_ = docParaType.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'para', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'sect3':
+ childobj_ = docSect3Type.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'sect3', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'internal':
+ childobj_ = docInternalS2Type.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'internal', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+# end class docSect2Type
+
+
+class docSect3Type(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, id=None, title=None, para=None, sect4=None, internal=None, mixedclass_=None, content_=None):
+ self.id = id
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if docSect3Type.subclass:
+ return docSect3Type.subclass(*args_, **kwargs_)
+ else:
+ return docSect3Type(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_title(self): return self.title
+ def set_title(self, title): self.title = title
+ def get_para(self): return self.para
+ def set_para(self, para): self.para = para
+ def add_para(self, value): self.para.append(value)
+ def insert_para(self, index, value): self.para[index] = value
+ def get_sect4(self): return self.sect4
+ def set_sect4(self, sect4): self.sect4 = sect4
+ def add_sect4(self, value): self.sect4.append(value)
+ def insert_sect4(self, index, value): self.sect4[index] = value
+ def get_internal(self): return self.internal
+ def set_internal(self, internal): self.internal = internal
+ def get_id(self): return self.id
+ def set_id(self, id): self.id = id
+ def export(self, outfile, level, namespace_='', name_='docSect3Type', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docSect3Type')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='docSect3Type'):
+ if self.id is not None:
+ outfile.write(' id=%s' % (self.format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='docSect3Type'):
+ for item_ in self.content_:
+ item_.export(outfile, level, item_.name, namespace_)
+ def hasContent_(self):
+ if (
+ self.title is not None or
+ self.para is not None or
+ self.sect4 is not None or
+ self.internal is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docSect3Type'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.id is not None:
+ showIndent(outfile, level)
+ outfile.write('id = %s,\n' % (self.id,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('id'):
+ self.id = attrs.get('id').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'title':
+ childobj_ = docTitleType.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'title', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'para':
+ childobj_ = docParaType.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'para', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'sect4':
+ childobj_ = docSect4Type.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'sect4', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'internal':
+ childobj_ = docInternalS3Type.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'internal', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+# end class docSect3Type
+
+
+class docSect4Type(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, id=None, title=None, para=None, internal=None, mixedclass_=None, content_=None):
+ self.id = id
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if docSect4Type.subclass:
+ return docSect4Type.subclass(*args_, **kwargs_)
+ else:
+ return docSect4Type(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_title(self): return self.title
+ def set_title(self, title): self.title = title
+ def get_para(self): return self.para
+ def set_para(self, para): self.para = para
+ def add_para(self, value): self.para.append(value)
+ def insert_para(self, index, value): self.para[index] = value
+ def get_internal(self): return self.internal
+ def set_internal(self, internal): self.internal = internal
+ def get_id(self): return self.id
+ def set_id(self, id): self.id = id
+ def export(self, outfile, level, namespace_='', name_='docSect4Type', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docSect4Type')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='docSect4Type'):
+ if self.id is not None:
+ outfile.write(' id=%s' % (self.format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='docSect4Type'):
+ for item_ in self.content_:
+ item_.export(outfile, level, item_.name, namespace_)
+ def hasContent_(self):
+ if (
+ self.title is not None or
+ self.para is not None or
+ self.internal is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docSect4Type'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.id is not None:
+ showIndent(outfile, level)
+ outfile.write('id = %s,\n' % (self.id,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('id'):
+ self.id = attrs.get('id').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'title':
+ childobj_ = docTitleType.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'title', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'para':
+ childobj_ = docParaType.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'para', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'internal':
+ childobj_ = docInternalS4Type.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'internal', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+# end class docSect4Type
+
+
+class docInternalType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, para=None, sect1=None, mixedclass_=None, content_=None):
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if docInternalType.subclass:
+ return docInternalType.subclass(*args_, **kwargs_)
+ else:
+ return docInternalType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_para(self): return self.para
+ def set_para(self, para): self.para = para
+ def add_para(self, value): self.para.append(value)
+ def insert_para(self, index, value): self.para[index] = value
+ def get_sect1(self): return self.sect1
+ def set_sect1(self, sect1): self.sect1 = sect1
+ def add_sect1(self, value): self.sect1.append(value)
+ def insert_sect1(self, index, value): self.sect1[index] = value
+ def export(self, outfile, level, namespace_='', name_='docInternalType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docInternalType')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='docInternalType'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='docInternalType'):
+ for item_ in self.content_:
+ item_.export(outfile, level, item_.name, namespace_)
+ def hasContent_(self):
+ if (
+ self.para is not None or
+ self.sect1 is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docInternalType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'para':
+ childobj_ = docParaType.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'para', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'sect1':
+ childobj_ = docSect1Type.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'sect1', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+# end class docInternalType
+
+
+class docInternalS1Type(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, para=None, sect2=None, mixedclass_=None, content_=None):
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if docInternalS1Type.subclass:
+ return docInternalS1Type.subclass(*args_, **kwargs_)
+ else:
+ return docInternalS1Type(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_para(self): return self.para
+ def set_para(self, para): self.para = para
+ def add_para(self, value): self.para.append(value)
+ def insert_para(self, index, value): self.para[index] = value
+ def get_sect2(self): return self.sect2
+ def set_sect2(self, sect2): self.sect2 = sect2
+ def add_sect2(self, value): self.sect2.append(value)
+ def insert_sect2(self, index, value): self.sect2[index] = value
+ def export(self, outfile, level, namespace_='', name_='docInternalS1Type', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docInternalS1Type')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='docInternalS1Type'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='docInternalS1Type'):
+ for item_ in self.content_:
+ item_.export(outfile, level, item_.name, namespace_)
+ def hasContent_(self):
+ if (
+ self.para is not None or
+ self.sect2 is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docInternalS1Type'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'para':
+ childobj_ = docParaType.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'para', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'sect2':
+ childobj_ = docSect2Type.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'sect2', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+# end class docInternalS1Type
+
+
+class docInternalS2Type(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, para=None, sect3=None, mixedclass_=None, content_=None):
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if docInternalS2Type.subclass:
+ return docInternalS2Type.subclass(*args_, **kwargs_)
+ else:
+ return docInternalS2Type(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_para(self): return self.para
+ def set_para(self, para): self.para = para
+ def add_para(self, value): self.para.append(value)
+ def insert_para(self, index, value): self.para[index] = value
+ def get_sect3(self): return self.sect3
+ def set_sect3(self, sect3): self.sect3 = sect3
+ def add_sect3(self, value): self.sect3.append(value)
+ def insert_sect3(self, index, value): self.sect3[index] = value
+ def export(self, outfile, level, namespace_='', name_='docInternalS2Type', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docInternalS2Type')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='docInternalS2Type'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='docInternalS2Type'):
+ for item_ in self.content_:
+ item_.export(outfile, level, item_.name, namespace_)
+ def hasContent_(self):
+ if (
+ self.para is not None or
+ self.sect3 is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docInternalS2Type'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'para':
+ childobj_ = docParaType.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'para', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'sect3':
+ childobj_ = docSect3Type.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'sect3', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+# end class docInternalS2Type
+
+
+class docInternalS3Type(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, para=None, sect3=None, mixedclass_=None, content_=None):
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if docInternalS3Type.subclass:
+ return docInternalS3Type.subclass(*args_, **kwargs_)
+ else:
+ return docInternalS3Type(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_para(self): return self.para
+ def set_para(self, para): self.para = para
+ def add_para(self, value): self.para.append(value)
+ def insert_para(self, index, value): self.para[index] = value
+ def get_sect3(self): return self.sect3
+ def set_sect3(self, sect3): self.sect3 = sect3
+ def add_sect3(self, value): self.sect3.append(value)
+ def insert_sect3(self, index, value): self.sect3[index] = value
+ def export(self, outfile, level, namespace_='', name_='docInternalS3Type', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docInternalS3Type')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='docInternalS3Type'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='docInternalS3Type'):
+ for item_ in self.content_:
+ item_.export(outfile, level, item_.name, namespace_)
+ def hasContent_(self):
+ if (
+ self.para is not None or
+ self.sect3 is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docInternalS3Type'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'para':
+ childobj_ = docParaType.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'para', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'sect3':
+ childobj_ = docSect4Type.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'sect3', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+# end class docInternalS3Type
+
+
+class docInternalS4Type(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, para=None, mixedclass_=None, content_=None):
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if docInternalS4Type.subclass:
+ return docInternalS4Type.subclass(*args_, **kwargs_)
+ else:
+ return docInternalS4Type(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_para(self): return self.para
+ def set_para(self, para): self.para = para
+ def add_para(self, value): self.para.append(value)
+ def insert_para(self, index, value): self.para[index] = value
+ def export(self, outfile, level, namespace_='', name_='docInternalS4Type', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docInternalS4Type')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='docInternalS4Type'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='docInternalS4Type'):
+ for item_ in self.content_:
+ item_.export(outfile, level, item_.name, namespace_)
+ def hasContent_(self):
+ if (
+ self.para is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docInternalS4Type'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'para':
+ childobj_ = docParaType.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'para', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+# end class docInternalS4Type
+
+
+class docTitleType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, valueOf_='', mixedclass_=None, content_=None):
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if docTitleType.subclass:
+ return docTitleType.subclass(*args_, **kwargs_)
+ else:
+ return docTitleType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='docTitleType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docTitleType')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='docTitleType'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='docTitleType'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docTitleType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class docTitleType
+
+
+class docParaType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, valueOf_='', mixedclass_=None, content_=None):
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if docParaType.subclass:
+ return docParaType.subclass(*args_, **kwargs_)
+ else:
+ return docParaType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='docParaType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docParaType')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='docParaType'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='docParaType'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docParaType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class docParaType
+
+
+class docMarkupType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, valueOf_='', mixedclass_=None, content_=None):
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if docMarkupType.subclass:
+ return docMarkupType.subclass(*args_, **kwargs_)
+ else:
+ return docMarkupType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='docMarkupType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docMarkupType')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='docMarkupType'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='docMarkupType'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docMarkupType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class docMarkupType
+
+
+class docURLLink(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, url=None, valueOf_='', mixedclass_=None, content_=None):
+ self.url = url
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if docURLLink.subclass:
+ return docURLLink.subclass(*args_, **kwargs_)
+ else:
+ return docURLLink(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_url(self): return self.url
+ def set_url(self, url): self.url = url
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='docURLLink', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docURLLink')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='docURLLink'):
+ if self.url is not None:
+ outfile.write(' url=%s' % (self.format_string(quote_attrib(self.url).encode(ExternalEncoding), input_name='url'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='docURLLink'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docURLLink'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.url is not None:
+ showIndent(outfile, level)
+ outfile.write('url = %s,\n' % (self.url,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('url'):
+ self.url = attrs.get('url').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class docURLLink
+
+
+class docAnchorType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, id=None, valueOf_='', mixedclass_=None, content_=None):
+ self.id = id
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if docAnchorType.subclass:
+ return docAnchorType.subclass(*args_, **kwargs_)
+ else:
+ return docAnchorType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_id(self): return self.id
+ def set_id(self, id): self.id = id
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='docAnchorType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docAnchorType')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='docAnchorType'):
+ if self.id is not None:
+ outfile.write(' id=%s' % (self.format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='docAnchorType'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docAnchorType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.id is not None:
+ showIndent(outfile, level)
+ outfile.write('id = %s,\n' % (self.id,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('id'):
+ self.id = attrs.get('id').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class docAnchorType
+
+
+class docFormulaType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, id=None, valueOf_='', mixedclass_=None, content_=None):
+ self.id = id
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if docFormulaType.subclass:
+ return docFormulaType.subclass(*args_, **kwargs_)
+ else:
+ return docFormulaType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_id(self): return self.id
+ def set_id(self, id): self.id = id
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='docFormulaType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docFormulaType')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='docFormulaType'):
+ if self.id is not None:
+ outfile.write(' id=%s' % (self.format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='docFormulaType'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docFormulaType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.id is not None:
+ showIndent(outfile, level)
+ outfile.write('id = %s,\n' % (self.id,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('id'):
+ self.id = attrs.get('id').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class docFormulaType
+
+
+class docIndexEntryType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, primaryie=None, secondaryie=None):
+ self.primaryie = primaryie
+ self.secondaryie = secondaryie
+ def factory(*args_, **kwargs_):
+ if docIndexEntryType.subclass:
+ return docIndexEntryType.subclass(*args_, **kwargs_)
+ else:
+ return docIndexEntryType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_primaryie(self): return self.primaryie
+ def set_primaryie(self, primaryie): self.primaryie = primaryie
+ def get_secondaryie(self): return self.secondaryie
+ def set_secondaryie(self, secondaryie): self.secondaryie = secondaryie
+ def export(self, outfile, level, namespace_='', name_='docIndexEntryType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docIndexEntryType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='docIndexEntryType'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='docIndexEntryType'):
+ if self.primaryie is not None:
+ showIndent(outfile, level)
+ outfile.write('<%sprimaryie>%s</%sprimaryie>\n' % (namespace_, self.format_string(quote_xml(self.primaryie).encode(ExternalEncoding), input_name='primaryie'), namespace_))
+ if self.secondaryie is not None:
+ showIndent(outfile, level)
+ outfile.write('<%ssecondaryie>%s</%ssecondaryie>\n' % (namespace_, self.format_string(quote_xml(self.secondaryie).encode(ExternalEncoding), input_name='secondaryie'), namespace_))
+ def hasContent_(self):
+ if (
+ self.primaryie is not None or
+ self.secondaryie is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docIndexEntryType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('primaryie=%s,\n' % quote_python(self.primaryie).encode(ExternalEncoding))
+ showIndent(outfile, level)
+ outfile.write('secondaryie=%s,\n' % quote_python(self.secondaryie).encode(ExternalEncoding))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'primaryie':
+ primaryie_ = ''
+ for text__content_ in child_.childNodes:
+ primaryie_ += text__content_.nodeValue
+ self.primaryie = primaryie_
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'secondaryie':
+ secondaryie_ = ''
+ for text__content_ in child_.childNodes:
+ secondaryie_ += text__content_.nodeValue
+ self.secondaryie = secondaryie_
+# end class docIndexEntryType
+
+
+class docListType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, listitem=None):
+ if listitem is None:
+ self.listitem = []
+ else:
+ self.listitem = listitem
+ def factory(*args_, **kwargs_):
+ if docListType.subclass:
+ return docListType.subclass(*args_, **kwargs_)
+ else:
+ return docListType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_listitem(self): return self.listitem
+ def set_listitem(self, listitem): self.listitem = listitem
+ def add_listitem(self, value): self.listitem.append(value)
+ def insert_listitem(self, index, value): self.listitem[index] = value
+ def export(self, outfile, level, namespace_='', name_='docListType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docListType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='docListType'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='docListType'):
+ for listitem_ in self.listitem:
+ listitem_.export(outfile, level, namespace_, name_='listitem')
+ def hasContent_(self):
+ if (
+ self.listitem is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docListType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('listitem=[\n')
+ level += 1
+ for listitem in self.listitem:
+ showIndent(outfile, level)
+ outfile.write('model_.listitem(\n')
+ listitem.exportLiteral(outfile, level, name_='listitem')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'listitem':
+ obj_ = docListItemType.factory()
+ obj_.build(child_)
+ self.listitem.append(obj_)
+# end class docListType
+
+
+class docListItemType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, para=None):
+ if para is None:
+ self.para = []
+ else:
+ self.para = para
+ def factory(*args_, **kwargs_):
+ if docListItemType.subclass:
+ return docListItemType.subclass(*args_, **kwargs_)
+ else:
+ return docListItemType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_para(self): return self.para
+ def set_para(self, para): self.para = para
+ def add_para(self, value): self.para.append(value)
+ def insert_para(self, index, value): self.para[index] = value
+ def export(self, outfile, level, namespace_='', name_='docListItemType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docListItemType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='docListItemType'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='docListItemType'):
+ for para_ in self.para:
+ para_.export(outfile, level, namespace_, name_='para')
+ def hasContent_(self):
+ if (
+ self.para is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docListItemType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('para=[\n')
+ level += 1
+ for para in self.para:
+ showIndent(outfile, level)
+ outfile.write('model_.para(\n')
+ para.exportLiteral(outfile, level, name_='para')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'para':
+ obj_ = docParaType.factory()
+ obj_.build(child_)
+ self.para.append(obj_)
+# end class docListItemType
+
+
+class docSimpleSectType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, kind=None, title=None, para=None):
+ self.kind = kind
+ self.title = title
+ if para is None:
+ self.para = []
+ else:
+ self.para = para
+ def factory(*args_, **kwargs_):
+ if docSimpleSectType.subclass:
+ return docSimpleSectType.subclass(*args_, **kwargs_)
+ else:
+ return docSimpleSectType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_title(self): return self.title
+ def set_title(self, title): self.title = title
+ def get_para(self): return self.para
+ def set_para(self, para): self.para = para
+ def add_para(self, value): self.para.append(value)
+ def insert_para(self, index, value): self.para[index] = value
+ def get_kind(self): return self.kind
+ def set_kind(self, kind): self.kind = kind
+ def export(self, outfile, level, namespace_='', name_='docSimpleSectType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docSimpleSectType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='docSimpleSectType'):
+ if self.kind is not None:
+ outfile.write(' kind=%s' % (quote_attrib(self.kind), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='docSimpleSectType'):
+ if self.title:
+ self.title.export(outfile, level, namespace_, name_='title')
+ for para_ in self.para:
+ para_.export(outfile, level, namespace_, name_='para')
+ def hasContent_(self):
+ if (
+ self.title is not None or
+ self.para is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docSimpleSectType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.kind is not None:
+ showIndent(outfile, level)
+ outfile.write('kind = "%s",\n' % (self.kind,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ if self.title:
+ showIndent(outfile, level)
+ outfile.write('title=model_.docTitleType(\n')
+ self.title.exportLiteral(outfile, level, name_='title')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ showIndent(outfile, level)
+ outfile.write('para=[\n')
+ level += 1
+ for para in self.para:
+ showIndent(outfile, level)
+ outfile.write('model_.para(\n')
+ para.exportLiteral(outfile, level, name_='para')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('kind'):
+ self.kind = attrs.get('kind').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'title':
+ obj_ = docTitleType.factory()
+ obj_.build(child_)
+ self.set_title(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'para':
+ obj_ = docParaType.factory()
+ obj_.build(child_)
+ self.para.append(obj_)
+# end class docSimpleSectType
+
+
+class docVarListEntryType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, term=None):
+ self.term = term
+ def factory(*args_, **kwargs_):
+ if docVarListEntryType.subclass:
+ return docVarListEntryType.subclass(*args_, **kwargs_)
+ else:
+ return docVarListEntryType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_term(self): return self.term
+ def set_term(self, term): self.term = term
+ def export(self, outfile, level, namespace_='', name_='docVarListEntryType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docVarListEntryType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='docVarListEntryType'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='docVarListEntryType'):
+ if self.term:
+ self.term.export(outfile, level, namespace_, name_='term', )
+ def hasContent_(self):
+ if (
+ self.term is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docVarListEntryType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ if self.term:
+ showIndent(outfile, level)
+ outfile.write('term=model_.docTitleType(\n')
+ self.term.exportLiteral(outfile, level, name_='term')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'term':
+ obj_ = docTitleType.factory()
+ obj_.build(child_)
+ self.set_term(obj_)
+# end class docVarListEntryType
+
+
+class docVariableListType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, valueOf_=''):
+ self.valueOf_ = valueOf_
+ def factory(*args_, **kwargs_):
+ if docVariableListType.subclass:
+ return docVariableListType.subclass(*args_, **kwargs_)
+ else:
+ return docVariableListType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='docVariableListType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docVariableListType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='docVariableListType'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='docVariableListType'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docVariableListType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class docVariableListType
+
+
+class docRefTextType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, refid=None, kindref=None, external=None, valueOf_='', mixedclass_=None, content_=None):
+ self.refid = refid
+ self.kindref = kindref
+ self.external = external
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if docRefTextType.subclass:
+ return docRefTextType.subclass(*args_, **kwargs_)
+ else:
+ return docRefTextType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_refid(self): return self.refid
+ def set_refid(self, refid): self.refid = refid
+ def get_kindref(self): return self.kindref
+ def set_kindref(self, kindref): self.kindref = kindref
+ def get_external(self): return self.external
+ def set_external(self, external): self.external = external
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='docRefTextType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docRefTextType')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='docRefTextType'):
+ if self.refid is not None:
+ outfile.write(' refid=%s' % (self.format_string(quote_attrib(self.refid).encode(ExternalEncoding), input_name='refid'), ))
+ if self.kindref is not None:
+ outfile.write(' kindref=%s' % (quote_attrib(self.kindref), ))
+ if self.external is not None:
+ outfile.write(' external=%s' % (self.format_string(quote_attrib(self.external).encode(ExternalEncoding), input_name='external'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='docRefTextType'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docRefTextType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.refid is not None:
+ showIndent(outfile, level)
+ outfile.write('refid = %s,\n' % (self.refid,))
+ if self.kindref is not None:
+ showIndent(outfile, level)
+ outfile.write('kindref = "%s",\n' % (self.kindref,))
+ if self.external is not None:
+ showIndent(outfile, level)
+ outfile.write('external = %s,\n' % (self.external,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('refid'):
+ self.refid = attrs.get('refid').value
+ if attrs.get('kindref'):
+ self.kindref = attrs.get('kindref').value
+ if attrs.get('external'):
+ self.external = attrs.get('external').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class docRefTextType
+
+
+class docTableType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, rows=None, cols=None, row=None, caption=None):
+ self.rows = rows
+ self.cols = cols
+ if row is None:
+ self.row = []
+ else:
+ self.row = row
+ self.caption = caption
+ def factory(*args_, **kwargs_):
+ if docTableType.subclass:
+ return docTableType.subclass(*args_, **kwargs_)
+ else:
+ return docTableType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_row(self): return self.row
+ def set_row(self, row): self.row = row
+ def add_row(self, value): self.row.append(value)
+ def insert_row(self, index, value): self.row[index] = value
+ def get_caption(self): return self.caption
+ def set_caption(self, caption): self.caption = caption
+ def get_rows(self): return self.rows
+ def set_rows(self, rows): self.rows = rows
+ def get_cols(self): return self.cols
+ def set_cols(self, cols): self.cols = cols
+ def export(self, outfile, level, namespace_='', name_='docTableType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docTableType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='docTableType'):
+ if self.rows is not None:
+ outfile.write(' rows="%s"' % self.format_integer(self.rows, input_name='rows'))
+ if self.cols is not None:
+ outfile.write(' cols="%s"' % self.format_integer(self.cols, input_name='cols'))
+ def exportChildren(self, outfile, level, namespace_='', name_='docTableType'):
+ for row_ in self.row:
+ row_.export(outfile, level, namespace_, name_='row')
+ if self.caption:
+ self.caption.export(outfile, level, namespace_, name_='caption')
+ def hasContent_(self):
+ if (
+ self.row is not None or
+ self.caption is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docTableType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.rows is not None:
+ showIndent(outfile, level)
+ outfile.write('rows = %s,\n' % (self.rows,))
+ if self.cols is not None:
+ showIndent(outfile, level)
+ outfile.write('cols = %s,\n' % (self.cols,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('row=[\n')
+ level += 1
+ for row in self.row:
+ showIndent(outfile, level)
+ outfile.write('model_.row(\n')
+ row.exportLiteral(outfile, level, name_='row')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ if self.caption:
+ showIndent(outfile, level)
+ outfile.write('caption=model_.docCaptionType(\n')
+ self.caption.exportLiteral(outfile, level, name_='caption')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('rows'):
+ try:
+ self.rows = int(attrs.get('rows').value)
+ except ValueError, exp:
+ raise ValueError('Bad integer attribute (rows): %s' % exp)
+ if attrs.get('cols'):
+ try:
+ self.cols = int(attrs.get('cols').value)
+ except ValueError, exp:
+ raise ValueError('Bad integer attribute (cols): %s' % exp)
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'row':
+ obj_ = docRowType.factory()
+ obj_.build(child_)
+ self.row.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'caption':
+ obj_ = docCaptionType.factory()
+ obj_.build(child_)
+ self.set_caption(obj_)
+# end class docTableType
+
+
+class docRowType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, entry=None):
+ if entry is None:
+ self.entry = []
+ else:
+ self.entry = entry
+ def factory(*args_, **kwargs_):
+ if docRowType.subclass:
+ return docRowType.subclass(*args_, **kwargs_)
+ else:
+ return docRowType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_entry(self): return self.entry
+ def set_entry(self, entry): self.entry = entry
+ def add_entry(self, value): self.entry.append(value)
+ def insert_entry(self, index, value): self.entry[index] = value
+ def export(self, outfile, level, namespace_='', name_='docRowType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docRowType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='docRowType'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='docRowType'):
+ for entry_ in self.entry:
+ entry_.export(outfile, level, namespace_, name_='entry')
+ def hasContent_(self):
+ if (
+ self.entry is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docRowType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('entry=[\n')
+ level += 1
+ for entry in self.entry:
+ showIndent(outfile, level)
+ outfile.write('model_.entry(\n')
+ entry.exportLiteral(outfile, level, name_='entry')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'entry':
+ obj_ = docEntryType.factory()
+ obj_.build(child_)
+ self.entry.append(obj_)
+# end class docRowType
+
+
+class docEntryType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, thead=None, para=None):
+ self.thead = thead
+ if para is None:
+ self.para = []
+ else:
+ self.para = para
+ def factory(*args_, **kwargs_):
+ if docEntryType.subclass:
+ return docEntryType.subclass(*args_, **kwargs_)
+ else:
+ return docEntryType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_para(self): return self.para
+ def set_para(self, para): self.para = para
+ def add_para(self, value): self.para.append(value)
+ def insert_para(self, index, value): self.para[index] = value
+ def get_thead(self): return self.thead
+ def set_thead(self, thead): self.thead = thead
+ def export(self, outfile, level, namespace_='', name_='docEntryType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docEntryType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='docEntryType'):
+ if self.thead is not None:
+ outfile.write(' thead=%s' % (quote_attrib(self.thead), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='docEntryType'):
+ for para_ in self.para:
+ para_.export(outfile, level, namespace_, name_='para')
+ def hasContent_(self):
+ if (
+ self.para is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docEntryType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.thead is not None:
+ showIndent(outfile, level)
+ outfile.write('thead = "%s",\n' % (self.thead,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('para=[\n')
+ level += 1
+ for para in self.para:
+ showIndent(outfile, level)
+ outfile.write('model_.para(\n')
+ para.exportLiteral(outfile, level, name_='para')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('thead'):
+ self.thead = attrs.get('thead').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'para':
+ obj_ = docParaType.factory()
+ obj_.build(child_)
+ self.para.append(obj_)
+# end class docEntryType
+
+
+class docCaptionType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, valueOf_='', mixedclass_=None, content_=None):
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if docCaptionType.subclass:
+ return docCaptionType.subclass(*args_, **kwargs_)
+ else:
+ return docCaptionType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='docCaptionType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docCaptionType')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='docCaptionType'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='docCaptionType'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docCaptionType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class docCaptionType
+
+
+class docHeadingType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, level=None, valueOf_='', mixedclass_=None, content_=None):
+ self.level = level
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if docHeadingType.subclass:
+ return docHeadingType.subclass(*args_, **kwargs_)
+ else:
+ return docHeadingType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_level(self): return self.level
+ def set_level(self, level): self.level = level
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='docHeadingType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docHeadingType')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='docHeadingType'):
+ if self.level is not None:
+ outfile.write(' level="%s"' % self.format_integer(self.level, input_name='level'))
+ def exportChildren(self, outfile, level, namespace_='', name_='docHeadingType'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docHeadingType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.level is not None:
+ showIndent(outfile, level)
+ outfile.write('level = %s,\n' % (self.level,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('level'):
+ try:
+ self.level = int(attrs.get('level').value)
+ except ValueError, exp:
+ raise ValueError('Bad integer attribute (level): %s' % exp)
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class docHeadingType
+
+
+class docImageType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, width=None, type_=None, name=None, height=None, valueOf_='', mixedclass_=None, content_=None):
+ self.width = width
+ self.type_ = type_
+ self.name = name
+ self.height = height
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if docImageType.subclass:
+ return docImageType.subclass(*args_, **kwargs_)
+ else:
+ return docImageType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_width(self): return self.width
+ def set_width(self, width): self.width = width
+ def get_type(self): return self.type_
+ def set_type(self, type_): self.type_ = type_
+ def get_name(self): return self.name
+ def set_name(self, name): self.name = name
+ def get_height(self): return self.height
+ def set_height(self, height): self.height = height
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='docImageType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docImageType')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='docImageType'):
+ if self.width is not None:
+ outfile.write(' width=%s' % (self.format_string(quote_attrib(self.width).encode(ExternalEncoding), input_name='width'), ))
+ if self.type_ is not None:
+ outfile.write(' type=%s' % (quote_attrib(self.type_), ))
+ if self.name is not None:
+ outfile.write(' name=%s' % (self.format_string(quote_attrib(self.name).encode(ExternalEncoding), input_name='name'), ))
+ if self.height is not None:
+ outfile.write(' height=%s' % (self.format_string(quote_attrib(self.height).encode(ExternalEncoding), input_name='height'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='docImageType'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docImageType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.width is not None:
+ showIndent(outfile, level)
+ outfile.write('width = %s,\n' % (self.width,))
+ if self.type_ is not None:
+ showIndent(outfile, level)
+ outfile.write('type_ = "%s",\n' % (self.type_,))
+ if self.name is not None:
+ showIndent(outfile, level)
+ outfile.write('name = %s,\n' % (self.name,))
+ if self.height is not None:
+ showIndent(outfile, level)
+ outfile.write('height = %s,\n' % (self.height,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('width'):
+ self.width = attrs.get('width').value
+ if attrs.get('type'):
+ self.type_ = attrs.get('type').value
+ if attrs.get('name'):
+ self.name = attrs.get('name').value
+ if attrs.get('height'):
+ self.height = attrs.get('height').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class docImageType
+
+
+class docDotFileType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, name=None, valueOf_='', mixedclass_=None, content_=None):
+ self.name = name
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if docDotFileType.subclass:
+ return docDotFileType.subclass(*args_, **kwargs_)
+ else:
+ return docDotFileType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_name(self): return self.name
+ def set_name(self, name): self.name = name
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='docDotFileType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docDotFileType')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='docDotFileType'):
+ if self.name is not None:
+ outfile.write(' name=%s' % (self.format_string(quote_attrib(self.name).encode(ExternalEncoding), input_name='name'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='docDotFileType'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docDotFileType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.name is not None:
+ showIndent(outfile, level)
+ outfile.write('name = %s,\n' % (self.name,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('name'):
+ self.name = attrs.get('name').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class docDotFileType
+
+
+class docTocItemType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, id=None, valueOf_='', mixedclass_=None, content_=None):
+ self.id = id
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if docTocItemType.subclass:
+ return docTocItemType.subclass(*args_, **kwargs_)
+ else:
+ return docTocItemType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_id(self): return self.id
+ def set_id(self, id): self.id = id
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='docTocItemType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docTocItemType')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='docTocItemType'):
+ if self.id is not None:
+ outfile.write(' id=%s' % (self.format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='docTocItemType'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docTocItemType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.id is not None:
+ showIndent(outfile, level)
+ outfile.write('id = %s,\n' % (self.id,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('id'):
+ self.id = attrs.get('id').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class docTocItemType
+
+
+class docTocListType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, tocitem=None):
+ if tocitem is None:
+ self.tocitem = []
+ else:
+ self.tocitem = tocitem
+ def factory(*args_, **kwargs_):
+ if docTocListType.subclass:
+ return docTocListType.subclass(*args_, **kwargs_)
+ else:
+ return docTocListType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_tocitem(self): return self.tocitem
+ def set_tocitem(self, tocitem): self.tocitem = tocitem
+ def add_tocitem(self, value): self.tocitem.append(value)
+ def insert_tocitem(self, index, value): self.tocitem[index] = value
+ def export(self, outfile, level, namespace_='', name_='docTocListType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docTocListType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='docTocListType'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='docTocListType'):
+ for tocitem_ in self.tocitem:
+ tocitem_.export(outfile, level, namespace_, name_='tocitem')
+ def hasContent_(self):
+ if (
+ self.tocitem is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docTocListType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('tocitem=[\n')
+ level += 1
+ for tocitem in self.tocitem:
+ showIndent(outfile, level)
+ outfile.write('model_.tocitem(\n')
+ tocitem.exportLiteral(outfile, level, name_='tocitem')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'tocitem':
+ obj_ = docTocItemType.factory()
+ obj_.build(child_)
+ self.tocitem.append(obj_)
+# end class docTocListType
+
+
+class docLanguageType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, langid=None, para=None):
+ self.langid = langid
+ if para is None:
+ self.para = []
+ else:
+ self.para = para
+ def factory(*args_, **kwargs_):
+ if docLanguageType.subclass:
+ return docLanguageType.subclass(*args_, **kwargs_)
+ else:
+ return docLanguageType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_para(self): return self.para
+ def set_para(self, para): self.para = para
+ def add_para(self, value): self.para.append(value)
+ def insert_para(self, index, value): self.para[index] = value
+ def get_langid(self): return self.langid
+ def set_langid(self, langid): self.langid = langid
+ def export(self, outfile, level, namespace_='', name_='docLanguageType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docLanguageType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='docLanguageType'):
+ if self.langid is not None:
+ outfile.write(' langid=%s' % (self.format_string(quote_attrib(self.langid).encode(ExternalEncoding), input_name='langid'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='docLanguageType'):
+ for para_ in self.para:
+ para_.export(outfile, level, namespace_, name_='para')
+ def hasContent_(self):
+ if (
+ self.para is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docLanguageType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.langid is not None:
+ showIndent(outfile, level)
+ outfile.write('langid = %s,\n' % (self.langid,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('para=[\n')
+ level += 1
+ for para in self.para:
+ showIndent(outfile, level)
+ outfile.write('model_.para(\n')
+ para.exportLiteral(outfile, level, name_='para')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('langid'):
+ self.langid = attrs.get('langid').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'para':
+ obj_ = docParaType.factory()
+ obj_.build(child_)
+ self.para.append(obj_)
+# end class docLanguageType
+
+
+class docParamListType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, kind=None, parameteritem=None):
+ self.kind = kind
+ if parameteritem is None:
+ self.parameteritem = []
+ else:
+ self.parameteritem = parameteritem
+ def factory(*args_, **kwargs_):
+ if docParamListType.subclass:
+ return docParamListType.subclass(*args_, **kwargs_)
+ else:
+ return docParamListType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_parameteritem(self): return self.parameteritem
+ def set_parameteritem(self, parameteritem): self.parameteritem = parameteritem
+ def add_parameteritem(self, value): self.parameteritem.append(value)
+ def insert_parameteritem(self, index, value): self.parameteritem[index] = value
+ def get_kind(self): return self.kind
+ def set_kind(self, kind): self.kind = kind
+ def export(self, outfile, level, namespace_='', name_='docParamListType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docParamListType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='docParamListType'):
+ if self.kind is not None:
+ outfile.write(' kind=%s' % (quote_attrib(self.kind), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='docParamListType'):
+ for parameteritem_ in self.parameteritem:
+ parameteritem_.export(outfile, level, namespace_, name_='parameteritem')
+ def hasContent_(self):
+ if (
+ self.parameteritem is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docParamListType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.kind is not None:
+ showIndent(outfile, level)
+ outfile.write('kind = "%s",\n' % (self.kind,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('parameteritem=[\n')
+ level += 1
+ for parameteritem in self.parameteritem:
+ showIndent(outfile, level)
+ outfile.write('model_.parameteritem(\n')
+ parameteritem.exportLiteral(outfile, level, name_='parameteritem')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('kind'):
+ self.kind = attrs.get('kind').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'parameteritem':
+ obj_ = docParamListItem.factory()
+ obj_.build(child_)
+ self.parameteritem.append(obj_)
+# end class docParamListType
+
+
+class docParamListItem(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, parameternamelist=None, parameterdescription=None):
+ if parameternamelist is None:
+ self.parameternamelist = []
+ else:
+ self.parameternamelist = parameternamelist
+ self.parameterdescription = parameterdescription
+ def factory(*args_, **kwargs_):
+ if docParamListItem.subclass:
+ return docParamListItem.subclass(*args_, **kwargs_)
+ else:
+ return docParamListItem(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_parameternamelist(self): return self.parameternamelist
+ def set_parameternamelist(self, parameternamelist): self.parameternamelist = parameternamelist
+ def add_parameternamelist(self, value): self.parameternamelist.append(value)
+ def insert_parameternamelist(self, index, value): self.parameternamelist[index] = value
+ def get_parameterdescription(self): return self.parameterdescription
+ def set_parameterdescription(self, parameterdescription): self.parameterdescription = parameterdescription
+ def export(self, outfile, level, namespace_='', name_='docParamListItem', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docParamListItem')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='docParamListItem'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='docParamListItem'):
+ for parameternamelist_ in self.parameternamelist:
+ parameternamelist_.export(outfile, level, namespace_, name_='parameternamelist')
+ if self.parameterdescription:
+ self.parameterdescription.export(outfile, level, namespace_, name_='parameterdescription', )
+ def hasContent_(self):
+ if (
+ self.parameternamelist is not None or
+ self.parameterdescription is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docParamListItem'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('parameternamelist=[\n')
+ level += 1
+ for parameternamelist in self.parameternamelist:
+ showIndent(outfile, level)
+ outfile.write('model_.parameternamelist(\n')
+ parameternamelist.exportLiteral(outfile, level, name_='parameternamelist')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ if self.parameterdescription:
+ showIndent(outfile, level)
+ outfile.write('parameterdescription=model_.descriptionType(\n')
+ self.parameterdescription.exportLiteral(outfile, level, name_='parameterdescription')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'parameternamelist':
+ obj_ = docParamNameList.factory()
+ obj_.build(child_)
+ self.parameternamelist.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'parameterdescription':
+ obj_ = descriptionType.factory()
+ obj_.build(child_)
+ self.set_parameterdescription(obj_)
+# end class docParamListItem
+
+
+class docParamNameList(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, parametername=None):
+ if parametername is None:
+ self.parametername = []
+ else:
+ self.parametername = parametername
+ def factory(*args_, **kwargs_):
+ if docParamNameList.subclass:
+ return docParamNameList.subclass(*args_, **kwargs_)
+ else:
+ return docParamNameList(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_parametername(self): return self.parametername
+ def set_parametername(self, parametername): self.parametername = parametername
+ def add_parametername(self, value): self.parametername.append(value)
+ def insert_parametername(self, index, value): self.parametername[index] = value
+ def export(self, outfile, level, namespace_='', name_='docParamNameList', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docParamNameList')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='docParamNameList'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='docParamNameList'):
+ for parametername_ in self.parametername:
+ parametername_.export(outfile, level, namespace_, name_='parametername')
+ def hasContent_(self):
+ if (
+ self.parametername is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docParamNameList'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('parametername=[\n')
+ level += 1
+ for parametername in self.parametername:
+ showIndent(outfile, level)
+ outfile.write('model_.parametername(\n')
+ parametername.exportLiteral(outfile, level, name_='parametername')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'parametername':
+ obj_ = docParamName.factory()
+ obj_.build(child_)
+ self.parametername.append(obj_)
+# end class docParamNameList
+
+
+class docParamName(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, direction=None, ref=None, mixedclass_=None, content_=None):
+ self.direction = direction
+ if mixedclass_ is None:
+ self.mixedclass_ = MixedContainer
+ else:
+ self.mixedclass_ = mixedclass_
+ if content_ is None:
+ self.content_ = []
+ else:
+ self.content_ = content_
+ def factory(*args_, **kwargs_):
+ if docParamName.subclass:
+ return docParamName.subclass(*args_, **kwargs_)
+ else:
+ return docParamName(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_ref(self): return self.ref
+ def set_ref(self, ref): self.ref = ref
+ def get_direction(self): return self.direction
+ def set_direction(self, direction): self.direction = direction
+ def export(self, outfile, level, namespace_='', name_='docParamName', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docParamName')
+ outfile.write('>')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ def exportAttributes(self, outfile, level, namespace_='', name_='docParamName'):
+ if self.direction is not None:
+ outfile.write(' direction=%s' % (quote_attrib(self.direction), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='docParamName'):
+ for item_ in self.content_:
+ item_.export(outfile, level, item_.name, namespace_)
+ def hasContent_(self):
+ if (
+ self.ref is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docParamName'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.direction is not None:
+ showIndent(outfile, level)
+ outfile.write('direction = "%s",\n' % (self.direction,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('content_ = [\n')
+ for item_ in self.content_:
+ item_.exportLiteral(outfile, level, name_)
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('direction'):
+ self.direction = attrs.get('direction').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'ref':
+ childobj_ = docRefTextType.factory()
+ childobj_.build(child_)
+ obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
+ MixedContainer.TypeNone, 'ref', childobj_)
+ self.content_.append(obj_)
+ elif child_.nodeType == Node.TEXT_NODE:
+ obj_ = self.mixedclass_(MixedContainer.CategoryText,
+ MixedContainer.TypeNone, '', child_.nodeValue)
+ self.content_.append(obj_)
+# end class docParamName
+
+
+class docXRefSectType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, id=None, xreftitle=None, xrefdescription=None):
+ self.id = id
+ if xreftitle is None:
+ self.xreftitle = []
+ else:
+ self.xreftitle = xreftitle
+ self.xrefdescription = xrefdescription
+ def factory(*args_, **kwargs_):
+ if docXRefSectType.subclass:
+ return docXRefSectType.subclass(*args_, **kwargs_)
+ else:
+ return docXRefSectType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_xreftitle(self): return self.xreftitle
+ def set_xreftitle(self, xreftitle): self.xreftitle = xreftitle
+ def add_xreftitle(self, value): self.xreftitle.append(value)
+ def insert_xreftitle(self, index, value): self.xreftitle[index] = value
+ def get_xrefdescription(self): return self.xrefdescription
+ def set_xrefdescription(self, xrefdescription): self.xrefdescription = xrefdescription
+ def get_id(self): return self.id
+ def set_id(self, id): self.id = id
+ def export(self, outfile, level, namespace_='', name_='docXRefSectType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docXRefSectType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='docXRefSectType'):
+ if self.id is not None:
+ outfile.write(' id=%s' % (self.format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='docXRefSectType'):
+ for xreftitle_ in self.xreftitle:
+ showIndent(outfile, level)
+ outfile.write('<%sxreftitle>%s</%sxreftitle>\n' % (namespace_, self.format_string(quote_xml(xreftitle_).encode(ExternalEncoding), input_name='xreftitle'), namespace_))
+ if self.xrefdescription:
+ self.xrefdescription.export(outfile, level, namespace_, name_='xrefdescription', )
+ def hasContent_(self):
+ if (
+ self.xreftitle is not None or
+ self.xrefdescription is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docXRefSectType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.id is not None:
+ showIndent(outfile, level)
+ outfile.write('id = %s,\n' % (self.id,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('xreftitle=[\n')
+ level += 1
+ for xreftitle in self.xreftitle:
+ showIndent(outfile, level)
+ outfile.write('%s,\n' % quote_python(xreftitle).encode(ExternalEncoding))
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ if self.xrefdescription:
+ showIndent(outfile, level)
+ outfile.write('xrefdescription=model_.descriptionType(\n')
+ self.xrefdescription.exportLiteral(outfile, level, name_='xrefdescription')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('id'):
+ self.id = attrs.get('id').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'xreftitle':
+ xreftitle_ = ''
+ for text__content_ in child_.childNodes:
+ xreftitle_ += text__content_.nodeValue
+ self.xreftitle.append(xreftitle_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'xrefdescription':
+ obj_ = descriptionType.factory()
+ obj_.build(child_)
+ self.set_xrefdescription(obj_)
+# end class docXRefSectType
+
+
+class docCopyType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, link=None, para=None, sect1=None, internal=None):
+ self.link = link
+ if para is None:
+ self.para = []
+ else:
+ self.para = para
+ if sect1 is None:
+ self.sect1 = []
+ else:
+ self.sect1 = sect1
+ self.internal = internal
+ def factory(*args_, **kwargs_):
+ if docCopyType.subclass:
+ return docCopyType.subclass(*args_, **kwargs_)
+ else:
+ return docCopyType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_para(self): return self.para
+ def set_para(self, para): self.para = para
+ def add_para(self, value): self.para.append(value)
+ def insert_para(self, index, value): self.para[index] = value
+ def get_sect1(self): return self.sect1
+ def set_sect1(self, sect1): self.sect1 = sect1
+ def add_sect1(self, value): self.sect1.append(value)
+ def insert_sect1(self, index, value): self.sect1[index] = value
+ def get_internal(self): return self.internal
+ def set_internal(self, internal): self.internal = internal
+ def get_link(self): return self.link
+ def set_link(self, link): self.link = link
+ def export(self, outfile, level, namespace_='', name_='docCopyType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docCopyType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='docCopyType'):
+ if self.link is not None:
+ outfile.write(' link=%s' % (self.format_string(quote_attrib(self.link).encode(ExternalEncoding), input_name='link'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='docCopyType'):
+ for para_ in self.para:
+ para_.export(outfile, level, namespace_, name_='para')
+ for sect1_ in self.sect1:
+ sect1_.export(outfile, level, namespace_, name_='sect1')
+ if self.internal:
+ self.internal.export(outfile, level, namespace_, name_='internal')
+ def hasContent_(self):
+ if (
+ self.para is not None or
+ self.sect1 is not None or
+ self.internal is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docCopyType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.link is not None:
+ showIndent(outfile, level)
+ outfile.write('link = %s,\n' % (self.link,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('para=[\n')
+ level += 1
+ for para in self.para:
+ showIndent(outfile, level)
+ outfile.write('model_.para(\n')
+ para.exportLiteral(outfile, level, name_='para')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ showIndent(outfile, level)
+ outfile.write('sect1=[\n')
+ level += 1
+ for sect1 in self.sect1:
+ showIndent(outfile, level)
+ outfile.write('model_.sect1(\n')
+ sect1.exportLiteral(outfile, level, name_='sect1')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ if self.internal:
+ showIndent(outfile, level)
+ outfile.write('internal=model_.docInternalType(\n')
+ self.internal.exportLiteral(outfile, level, name_='internal')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('link'):
+ self.link = attrs.get('link').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'para':
+ obj_ = docParaType.factory()
+ obj_.build(child_)
+ self.para.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'sect1':
+ obj_ = docSect1Type.factory()
+ obj_.build(child_)
+ self.sect1.append(obj_)
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'internal':
+ obj_ = docInternalType.factory()
+ obj_.build(child_)
+ self.set_internal(obj_)
+# end class docCopyType
+
+
+class docCharType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, char=None, valueOf_=''):
+ self.char = char
+ self.valueOf_ = valueOf_
+ def factory(*args_, **kwargs_):
+ if docCharType.subclass:
+ return docCharType.subclass(*args_, **kwargs_)
+ else:
+ return docCharType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_char(self): return self.char
+ def set_char(self, char): self.char = char
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='docCharType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docCharType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='docCharType'):
+ if self.char is not None:
+ outfile.write(' char=%s' % (quote_attrib(self.char), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='docCharType'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docCharType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.char is not None:
+ showIndent(outfile, level)
+ outfile.write('char = "%s",\n' % (self.char,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('char'):
+ self.char = attrs.get('char').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class docCharType
+
+
+class docEmptyType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, valueOf_=''):
+ self.valueOf_ = valueOf_
+ def factory(*args_, **kwargs_):
+ if docEmptyType.subclass:
+ return docEmptyType.subclass(*args_, **kwargs_)
+ else:
+ return docEmptyType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def getValueOf_(self): return self.valueOf_
+ def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
+ def export(self, outfile, level, namespace_='', name_='docEmptyType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='docEmptyType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='docEmptyType'):
+ pass
+ def exportChildren(self, outfile, level, namespace_='', name_='docEmptyType'):
+ if self.valueOf_.find('![CDATA')>-1:
+ value=quote_xml('%s' % self.valueOf_)
+ value=value.replace('![CDATA','<![CDATA')
+ value=value.replace(']]',']]>')
+ outfile.write(value)
+ else:
+ outfile.write(quote_xml('%s' % self.valueOf_))
+ def hasContent_(self):
+ if (
+ self.valueOf_ is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='docEmptyType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ pass
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ self.valueOf_ = ''
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ pass
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.TEXT_NODE:
+ self.valueOf_ += child_.nodeValue
+ elif child_.nodeType == Node.CDATA_SECTION_NODE:
+ self.valueOf_ += '![CDATA['+child_.nodeValue+']]'
+# end class docEmptyType
+
+
+USAGE_TEXT = """
+Usage: python <Parser>.py [ -s ] <in_xml_file>
+Options:
+ -s Use the SAX parser, not the minidom parser.
+"""
+
+def usage():
+ print USAGE_TEXT
+ sys.exit(1)
+
+
+def parse(inFileName):
+ doc = minidom.parse(inFileName)
+ rootNode = doc.documentElement
+ rootObj = DoxygenType.factory()
+ rootObj.build(rootNode)
+ # Enable Python to collect the space used by the DOM.
+ doc = None
+ sys.stdout.write('<?xml version="1.0" ?>\n')
+ rootObj.export(sys.stdout, 0, name_="doxygen",
+ namespacedef_='')
+ return rootObj
+
+
+def parseString(inString):
+ doc = minidom.parseString(inString)
+ rootNode = doc.documentElement
+ rootObj = DoxygenType.factory()
+ rootObj.build(rootNode)
+ # Enable Python to collect the space used by the DOM.
+ doc = None
+ sys.stdout.write('<?xml version="1.0" ?>\n')
+ rootObj.export(sys.stdout, 0, name_="doxygen",
+ namespacedef_='')
+ return rootObj
+
+
+def parseLiteral(inFileName):
+ doc = minidom.parse(inFileName)
+ rootNode = doc.documentElement
+ rootObj = DoxygenType.factory()
+ rootObj.build(rootNode)
+ # Enable Python to collect the space used by the DOM.
+ doc = None
+ sys.stdout.write('from compound import *\n\n')
+ sys.stdout.write('rootObj = doxygen(\n')
+ rootObj.exportLiteral(sys.stdout, 0, name_="doxygen")
+ sys.stdout.write(')\n')
+ return rootObj
+
+
+def main():
+ args = sys.argv[1:]
+ if len(args) == 1:
+ parse(args[0])
+ else:
+ usage()
+
+
+if __name__ == '__main__':
+ main()
+ #import pdb
+ #pdb.run('main()')
+
diff --git a/tools/gr-usrptest/docs/doxygen/doxyxml/generated/index.py b/tools/gr-usrptest/docs/doxygen/doxyxml/generated/index.py
new file mode 100644
index 000000000..7a70e14a1
--- /dev/null
+++ b/tools/gr-usrptest/docs/doxygen/doxyxml/generated/index.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python
+
+"""
+Generated Mon Feb 9 19:08:05 2009 by generateDS.py.
+"""
+
+from xml.dom import minidom
+
+import os
+import sys
+import compound
+
+import indexsuper as supermod
+
+class DoxygenTypeSub(supermod.DoxygenType):
+ def __init__(self, version=None, compound=None):
+ supermod.DoxygenType.__init__(self, version, compound)
+
+ def find_compounds_and_members(self, details):
+ """
+ Returns a list of all compounds and their members which match details
+ """
+
+ results = []
+ for compound in self.compound:
+ members = compound.find_members(details)
+ if members:
+ results.append([compound, members])
+ else:
+ if details.match(compound):
+ results.append([compound, []])
+
+ return results
+
+supermod.DoxygenType.subclass = DoxygenTypeSub
+# end class DoxygenTypeSub
+
+
+class CompoundTypeSub(supermod.CompoundType):
+ def __init__(self, kind=None, refid=None, name='', member=None):
+ supermod.CompoundType.__init__(self, kind, refid, name, member)
+
+ def find_members(self, details):
+ """
+ Returns a list of all members which match details
+ """
+
+ results = []
+
+ for member in self.member:
+ if details.match(member):
+ results.append(member)
+
+ return results
+
+supermod.CompoundType.subclass = CompoundTypeSub
+# end class CompoundTypeSub
+
+
+class MemberTypeSub(supermod.MemberType):
+
+ def __init__(self, kind=None, refid=None, name=''):
+ supermod.MemberType.__init__(self, kind, refid, name)
+
+supermod.MemberType.subclass = MemberTypeSub
+# end class MemberTypeSub
+
+
+def parse(inFilename):
+
+ doc = minidom.parse(inFilename)
+ rootNode = doc.documentElement
+ rootObj = supermod.DoxygenType.factory()
+ rootObj.build(rootNode)
+
+ return rootObj
+
diff --git a/tools/gr-usrptest/docs/doxygen/doxyxml/generated/indexsuper.py b/tools/gr-usrptest/docs/doxygen/doxyxml/generated/indexsuper.py
new file mode 100644
index 000000000..a99153019
--- /dev/null
+++ b/tools/gr-usrptest/docs/doxygen/doxyxml/generated/indexsuper.py
@@ -0,0 +1,523 @@
+#!/usr/bin/env python
+
+#
+# Generated Thu Jun 11 18:43:54 2009 by generateDS.py.
+#
+
+import sys
+import getopt
+from string import lower as str_lower
+from xml.dom import minidom
+from xml.dom import Node
+
+#
+# User methods
+#
+# Calls to the methods in these classes are generated by generateDS.py.
+# You can replace these methods by re-implementing the following class
+# in a module named generatedssuper.py.
+
+try:
+ from generatedssuper import GeneratedsSuper
+except ImportError, exp:
+
+ class GeneratedsSuper:
+ def format_string(self, input_data, input_name=''):
+ return input_data
+ def format_integer(self, input_data, input_name=''):
+ return '%d' % input_data
+ def format_float(self, input_data, input_name=''):
+ return '%f' % input_data
+ def format_double(self, input_data, input_name=''):
+ return '%e' % input_data
+ def format_boolean(self, input_data, input_name=''):
+ return '%s' % input_data
+
+
+#
+# If you have installed IPython you can uncomment and use the following.
+# IPython is available from http://ipython.scipy.org/.
+#
+
+## from IPython.Shell import IPShellEmbed
+## args = ''
+## ipshell = IPShellEmbed(args,
+## banner = 'Dropping into IPython',
+## exit_msg = 'Leaving Interpreter, back to program.')
+
+# Then use the following line where and when you want to drop into the
+# IPython shell:
+# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
+
+#
+# Globals
+#
+
+ExternalEncoding = 'ascii'
+
+#
+# Support/utility functions.
+#
+
+def showIndent(outfile, level):
+ for idx in range(level):
+ outfile.write(' ')
+
+def quote_xml(inStr):
+ s1 = (isinstance(inStr, basestring) and inStr or
+ '%s' % inStr)
+ s1 = s1.replace('&', '&amp;')
+ s1 = s1.replace('<', '&lt;')
+ s1 = s1.replace('>', '&gt;')
+ return s1
+
+def quote_attrib(inStr):
+ s1 = (isinstance(inStr, basestring) and inStr or
+ '%s' % inStr)
+ s1 = s1.replace('&', '&amp;')
+ s1 = s1.replace('<', '&lt;')
+ s1 = s1.replace('>', '&gt;')
+ if '"' in s1:
+ if "'" in s1:
+ s1 = '"%s"' % s1.replace('"', "&quot;")
+ else:
+ s1 = "'%s'" % s1
+ else:
+ s1 = '"%s"' % s1
+ return s1
+
+def quote_python(inStr):
+ s1 = inStr
+ if s1.find("'") == -1:
+ if s1.find('\n') == -1:
+ return "'%s'" % s1
+ else:
+ return "'''%s'''" % s1
+ else:
+ if s1.find('"') != -1:
+ s1 = s1.replace('"', '\\"')
+ if s1.find('\n') == -1:
+ return '"%s"' % s1
+ else:
+ return '"""%s"""' % s1
+
+
+class MixedContainer:
+ # Constants for category:
+ CategoryNone = 0
+ CategoryText = 1
+ CategorySimple = 2
+ CategoryComplex = 3
+ # Constants for content_type:
+ TypeNone = 0
+ TypeText = 1
+ TypeString = 2
+ TypeInteger = 3
+ TypeFloat = 4
+ TypeDecimal = 5
+ TypeDouble = 6
+ TypeBoolean = 7
+ def __init__(self, category, content_type, name, value):
+ self.category = category
+ self.content_type = content_type
+ self.name = name
+ self.value = value
+ def getCategory(self):
+ return self.category
+ def getContenttype(self, content_type):
+ return self.content_type
+ def getValue(self):
+ return self.value
+ def getName(self):
+ return self.name
+ def export(self, outfile, level, name, namespace):
+ if self.category == MixedContainer.CategoryText:
+ outfile.write(self.value)
+ elif self.category == MixedContainer.CategorySimple:
+ self.exportSimple(outfile, level, name)
+ else: # category == MixedContainer.CategoryComplex
+ self.value.export(outfile, level, namespace,name)
+ def exportSimple(self, outfile, level, name):
+ if self.content_type == MixedContainer.TypeString:
+ outfile.write('<%s>%s</%s>' % (self.name, self.value, self.name))
+ elif self.content_type == MixedContainer.TypeInteger or \
+ self.content_type == MixedContainer.TypeBoolean:
+ outfile.write('<%s>%d</%s>' % (self.name, self.value, self.name))
+ elif self.content_type == MixedContainer.TypeFloat or \
+ self.content_type == MixedContainer.TypeDecimal:
+ outfile.write('<%s>%f</%s>' % (self.name, self.value, self.name))
+ elif self.content_type == MixedContainer.TypeDouble:
+ outfile.write('<%s>%g</%s>' % (self.name, self.value, self.name))
+ def exportLiteral(self, outfile, level, name):
+ if self.category == MixedContainer.CategoryText:
+ showIndent(outfile, level)
+ outfile.write('MixedContainer(%d, %d, "%s", "%s"),\n' % \
+ (self.category, self.content_type, self.name, self.value))
+ elif self.category == MixedContainer.CategorySimple:
+ showIndent(outfile, level)
+ outfile.write('MixedContainer(%d, %d, "%s", "%s"),\n' % \
+ (self.category, self.content_type, self.name, self.value))
+ else: # category == MixedContainer.CategoryComplex
+ showIndent(outfile, level)
+ outfile.write('MixedContainer(%d, %d, "%s",\n' % \
+ (self.category, self.content_type, self.name,))
+ self.value.exportLiteral(outfile, level + 1)
+ showIndent(outfile, level)
+ outfile.write(')\n')
+
+
+class _MemberSpec(object):
+ def __init__(self, name='', data_type='', container=0):
+ self.name = name
+ self.data_type = data_type
+ self.container = container
+ def set_name(self, name): self.name = name
+ def get_name(self): return self.name
+ def set_data_type(self, data_type): self.data_type = data_type
+ def get_data_type(self): return self.data_type
+ def set_container(self, container): self.container = container
+ def get_container(self): return self.container
+
+
+#
+# Data representation classes.
+#
+
+class DoxygenType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, version=None, compound=None):
+ self.version = version
+ if compound is None:
+ self.compound = []
+ else:
+ self.compound = compound
+ def factory(*args_, **kwargs_):
+ if DoxygenType.subclass:
+ return DoxygenType.subclass(*args_, **kwargs_)
+ else:
+ return DoxygenType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_compound(self): return self.compound
+ def set_compound(self, compound): self.compound = compound
+ def add_compound(self, value): self.compound.append(value)
+ def insert_compound(self, index, value): self.compound[index] = value
+ def get_version(self): return self.version
+ def set_version(self, version): self.version = version
+ def export(self, outfile, level, namespace_='', name_='DoxygenType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='DoxygenType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='DoxygenType'):
+ outfile.write(' version=%s' % (self.format_string(quote_attrib(self.version).encode(ExternalEncoding), input_name='version'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='DoxygenType'):
+ for compound_ in self.compound:
+ compound_.export(outfile, level, namespace_, name_='compound')
+ def hasContent_(self):
+ if (
+ self.compound is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='DoxygenType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.version is not None:
+ showIndent(outfile, level)
+ outfile.write('version = %s,\n' % (self.version,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('compound=[\n')
+ level += 1
+ for compound in self.compound:
+ showIndent(outfile, level)
+ outfile.write('model_.compound(\n')
+ compound.exportLiteral(outfile, level, name_='compound')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('version'):
+ self.version = attrs.get('version').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'compound':
+ obj_ = CompoundType.factory()
+ obj_.build(child_)
+ self.compound.append(obj_)
+# end class DoxygenType
+
+
+class CompoundType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, kind=None, refid=None, name=None, member=None):
+ self.kind = kind
+ self.refid = refid
+ self.name = name
+ if member is None:
+ self.member = []
+ else:
+ self.member = member
+ def factory(*args_, **kwargs_):
+ if CompoundType.subclass:
+ return CompoundType.subclass(*args_, **kwargs_)
+ else:
+ return CompoundType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_name(self): return self.name
+ def set_name(self, name): self.name = name
+ def get_member(self): return self.member
+ def set_member(self, member): self.member = member
+ def add_member(self, value): self.member.append(value)
+ def insert_member(self, index, value): self.member[index] = value
+ def get_kind(self): return self.kind
+ def set_kind(self, kind): self.kind = kind
+ def get_refid(self): return self.refid
+ def set_refid(self, refid): self.refid = refid
+ def export(self, outfile, level, namespace_='', name_='CompoundType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='CompoundType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='CompoundType'):
+ outfile.write(' kind=%s' % (quote_attrib(self.kind), ))
+ outfile.write(' refid=%s' % (self.format_string(quote_attrib(self.refid).encode(ExternalEncoding), input_name='refid'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='CompoundType'):
+ if self.name is not None:
+ showIndent(outfile, level)
+ outfile.write('<%sname>%s</%sname>\n' % (namespace_, self.format_string(quote_xml(self.name).encode(ExternalEncoding), input_name='name'), namespace_))
+ for member_ in self.member:
+ member_.export(outfile, level, namespace_, name_='member')
+ def hasContent_(self):
+ if (
+ self.name is not None or
+ self.member is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='CompoundType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.kind is not None:
+ showIndent(outfile, level)
+ outfile.write('kind = "%s",\n' % (self.kind,))
+ if self.refid is not None:
+ showIndent(outfile, level)
+ outfile.write('refid = %s,\n' % (self.refid,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('name=%s,\n' % quote_python(self.name).encode(ExternalEncoding))
+ showIndent(outfile, level)
+ outfile.write('member=[\n')
+ level += 1
+ for member in self.member:
+ showIndent(outfile, level)
+ outfile.write('model_.member(\n')
+ member.exportLiteral(outfile, level, name_='member')
+ showIndent(outfile, level)
+ outfile.write('),\n')
+ level -= 1
+ showIndent(outfile, level)
+ outfile.write('],\n')
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('kind'):
+ self.kind = attrs.get('kind').value
+ if attrs.get('refid'):
+ self.refid = attrs.get('refid').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'name':
+ name_ = ''
+ for text__content_ in child_.childNodes:
+ name_ += text__content_.nodeValue
+ self.name = name_
+ elif child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'member':
+ obj_ = MemberType.factory()
+ obj_.build(child_)
+ self.member.append(obj_)
+# end class CompoundType
+
+
+class MemberType(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, kind=None, refid=None, name=None):
+ self.kind = kind
+ self.refid = refid
+ self.name = name
+ def factory(*args_, **kwargs_):
+ if MemberType.subclass:
+ return MemberType.subclass(*args_, **kwargs_)
+ else:
+ return MemberType(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_name(self): return self.name
+ def set_name(self, name): self.name = name
+ def get_kind(self): return self.kind
+ def set_kind(self, kind): self.kind = kind
+ def get_refid(self): return self.refid
+ def set_refid(self, refid): self.refid = refid
+ def export(self, outfile, level, namespace_='', name_='MemberType', namespacedef_=''):
+ showIndent(outfile, level)
+ outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_, ))
+ self.exportAttributes(outfile, level, namespace_, name_='MemberType')
+ if self.hasContent_():
+ outfile.write('>\n')
+ self.exportChildren(outfile, level + 1, namespace_, name_)
+ showIndent(outfile, level)
+ outfile.write('</%s%s>\n' % (namespace_, name_))
+ else:
+ outfile.write(' />\n')
+ def exportAttributes(self, outfile, level, namespace_='', name_='MemberType'):
+ outfile.write(' kind=%s' % (quote_attrib(self.kind), ))
+ outfile.write(' refid=%s' % (self.format_string(quote_attrib(self.refid).encode(ExternalEncoding), input_name='refid'), ))
+ def exportChildren(self, outfile, level, namespace_='', name_='MemberType'):
+ if self.name is not None:
+ showIndent(outfile, level)
+ outfile.write('<%sname>%s</%sname>\n' % (namespace_, self.format_string(quote_xml(self.name).encode(ExternalEncoding), input_name='name'), namespace_))
+ def hasContent_(self):
+ if (
+ self.name is not None
+ ):
+ return True
+ else:
+ return False
+ def exportLiteral(self, outfile, level, name_='MemberType'):
+ level += 1
+ self.exportLiteralAttributes(outfile, level, name_)
+ if self.hasContent_():
+ self.exportLiteralChildren(outfile, level, name_)
+ def exportLiteralAttributes(self, outfile, level, name_):
+ if self.kind is not None:
+ showIndent(outfile, level)
+ outfile.write('kind = "%s",\n' % (self.kind,))
+ if self.refid is not None:
+ showIndent(outfile, level)
+ outfile.write('refid = %s,\n' % (self.refid,))
+ def exportLiteralChildren(self, outfile, level, name_):
+ showIndent(outfile, level)
+ outfile.write('name=%s,\n' % quote_python(self.name).encode(ExternalEncoding))
+ def build(self, node_):
+ attrs = node_.attributes
+ self.buildAttributes(attrs)
+ for child_ in node_.childNodes:
+ nodeName_ = child_.nodeName.split(':')[-1]
+ self.buildChildren(child_, nodeName_)
+ def buildAttributes(self, attrs):
+ if attrs.get('kind'):
+ self.kind = attrs.get('kind').value
+ if attrs.get('refid'):
+ self.refid = attrs.get('refid').value
+ def buildChildren(self, child_, nodeName_):
+ if child_.nodeType == Node.ELEMENT_NODE and \
+ nodeName_ == 'name':
+ name_ = ''
+ for text__content_ in child_.childNodes:
+ name_ += text__content_.nodeValue
+ self.name = name_
+# end class MemberType
+
+
+USAGE_TEXT = """
+Usage: python <Parser>.py [ -s ] <in_xml_file>
+Options:
+ -s Use the SAX parser, not the minidom parser.
+"""
+
+def usage():
+ print USAGE_TEXT
+ sys.exit(1)
+
+
+def parse(inFileName):
+ doc = minidom.parse(inFileName)
+ rootNode = doc.documentElement
+ rootObj = DoxygenType.factory()
+ rootObj.build(rootNode)
+ # Enable Python to collect the space used by the DOM.
+ doc = None
+ sys.stdout.write('<?xml version="1.0" ?>\n')
+ rootObj.export(sys.stdout, 0, name_="doxygenindex",
+ namespacedef_='')
+ return rootObj
+
+
+def parseString(inString):
+ doc = minidom.parseString(inString)
+ rootNode = doc.documentElement
+ rootObj = DoxygenType.factory()
+ rootObj.build(rootNode)
+ # Enable Python to collect the space used by the DOM.
+ doc = None
+ sys.stdout.write('<?xml version="1.0" ?>\n')
+ rootObj.export(sys.stdout, 0, name_="doxygenindex",
+ namespacedef_='')
+ return rootObj
+
+
+def parseLiteral(inFileName):
+ doc = minidom.parse(inFileName)
+ rootNode = doc.documentElement
+ rootObj = DoxygenType.factory()
+ rootObj.build(rootNode)
+ # Enable Python to collect the space used by the DOM.
+ doc = None
+ sys.stdout.write('from index import *\n\n')
+ sys.stdout.write('rootObj = doxygenindex(\n')
+ rootObj.exportLiteral(sys.stdout, 0, name_="doxygenindex")
+ sys.stdout.write(')\n')
+ return rootObj
+
+
+def main():
+ args = sys.argv[1:]
+ if len(args) == 1:
+ parse(args[0])
+ else:
+ usage()
+
+
+
+
+if __name__ == '__main__':
+ main()
+ #import pdb
+ #pdb.run('main()')
+
diff --git a/tools/gr-usrptest/docs/doxygen/doxyxml/text.py b/tools/gr-usrptest/docs/doxygen/doxyxml/text.py
new file mode 100644
index 000000000..629edd180
--- /dev/null
+++ b/tools/gr-usrptest/docs/doxygen/doxyxml/text.py
@@ -0,0 +1,56 @@
+#
+# Copyright 2010 Free Software Foundation, Inc.
+#
+# This file is part of GNU Radio
+#
+# GNU Radio is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# GNU Radio is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Radio; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+#
+"""
+Utilities for extracting text from generated classes.
+"""
+
+def is_string(txt):
+ if isinstance(txt, str):
+ return True
+ try:
+ if isinstance(txt, unicode):
+ return True
+ except NameError:
+ pass
+ return False
+
+def description(obj):
+ if obj is None:
+ return None
+ return description_bit(obj).strip()
+
+def description_bit(obj):
+ if hasattr(obj, 'content'):
+ contents = [description_bit(item) for item in obj.content]
+ result = ''.join(contents)
+ elif hasattr(obj, 'content_'):
+ contents = [description_bit(item) for item in obj.content_]
+ result = ''.join(contents)
+ elif hasattr(obj, 'value'):
+ result = description_bit(obj.value)
+ elif is_string(obj):
+ return obj
+ else:
+ raise StandardError('Expecting a string or something with content, content_ or value attribute')
+ # If this bit is a paragraph then add one some line breaks.
+ if hasattr(obj, 'name') and obj.name == 'para':
+ result += "\n\n"
+ return result
diff --git a/tools/gr-usrptest/docs/doxygen/other/group_defs.dox b/tools/gr-usrptest/docs/doxygen/other/group_defs.dox
new file mode 100644
index 000000000..745fc6679
--- /dev/null
+++ b/tools/gr-usrptest/docs/doxygen/other/group_defs.dox
@@ -0,0 +1,7 @@
+/*!
+ * \defgroup block GNU Radio USRPTEST C++ Signal Processing Blocks
+ * \brief All C++ blocks that can be used from the USRPTEST GNU Radio
+ * module are listed here or in the subcategories below.
+ *
+ */
+
diff --git a/tools/gr-usrptest/docs/doxygen/other/main_page.dox b/tools/gr-usrptest/docs/doxygen/other/main_page.dox
new file mode 100644
index 000000000..2ae0bef14
--- /dev/null
+++ b/tools/gr-usrptest/docs/doxygen/other/main_page.dox
@@ -0,0 +1,10 @@
+/*! \mainpage
+
+Welcome to the GNU Radio USRPTEST Block
+
+This is the intro page for the Doxygen manual generated for the USRPTEST
+block (docs/doxygen/other/main_page.dox). Edit it to add more detailed
+documentation about the new GNU Radio modules contained in this
+project.
+
+*/
diff --git a/tools/gr-usrptest/docs/doxygen/swig_doc.py b/tools/gr-usrptest/docs/doxygen/swig_doc.py
new file mode 100644
index 000000000..d3536db8d
--- /dev/null
+++ b/tools/gr-usrptest/docs/doxygen/swig_doc.py
@@ -0,0 +1,328 @@
+#
+# Copyright 2010-2012 Free Software Foundation, Inc.
+#
+# This file is part of GNU Radio
+#
+# GNU Radio is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# GNU Radio is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Radio; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+#
+"""
+Creates the swig_doc.i SWIG interface file.
+Execute using: python swig_doc.py xml_path outputfilename
+
+The file instructs SWIG to transfer the doxygen comments into the
+python docstrings.
+
+"""
+
+import sys, time
+
+from doxyxml import DoxyIndex, DoxyClass, DoxyFriend, DoxyFunction, DoxyFile
+from doxyxml import DoxyOther, base
+
+def py_name(name):
+ bits = name.split('_')
+ return '_'.join(bits[1:])
+
+def make_name(name):
+ bits = name.split('_')
+ return bits[0] + '_make_' + '_'.join(bits[1:])
+
+
+class Block(object):
+ """
+ Checks if doxyxml produced objects correspond to a gnuradio block.
+ """
+
+ @classmethod
+ def includes(cls, item):
+ if not isinstance(item, DoxyClass):
+ return False
+ # Check for a parsing error.
+ if item.error():
+ return False
+ friendname = make_name(item.name())
+ is_a_block = item.has_member(friendname, DoxyFriend)
+ # But now sometimes the make function isn't a friend so check again.
+ if not is_a_block:
+ is_a_block = di.has_member(friendname, DoxyFunction)
+ return is_a_block
+
+class Block2(object):
+ """
+ Checks if doxyxml produced objects correspond to a new style
+ gnuradio block.
+ """
+
+ @classmethod
+ def includes(cls, item):
+ if not isinstance(item, DoxyClass):
+ return False
+ # Check for a parsing error.
+ if item.error():
+ return False
+ is_a_block2 = item.has_member('make', DoxyFunction) and item.has_member('sptr', DoxyOther)
+ return is_a_block2
+
+
+def utoascii(text):
+ """
+ Convert unicode text into ascii and escape quotes.
+ """
+ if text is None:
+ return ''
+ out = text.encode('ascii', 'replace')
+ out = out.replace('"', '\\"')
+ return out
+
+
+def combine_descriptions(obj):
+ """
+ Combines the brief and detailed descriptions of an object together.
+ """
+ description = []
+ bd = obj.brief_description.strip()
+ dd = obj.detailed_description.strip()
+ if bd:
+ description.append(bd)
+ if dd:
+ description.append(dd)
+ return utoascii('\n\n'.join(description)).strip()
+
+def format_params(parameteritems):
+ output = ['Args:']
+ template = ' {0} : {1}'
+ for pi in parameteritems:
+ output.append(template.format(pi.name, pi.description))
+ return '\n'.join(output)
+
+entry_templ = '%feature("docstring") {name} "{docstring}"'
+def make_entry(obj, name=None, templ="{description}", description=None, params=[]):
+ """
+ Create a docstring entry for a swig interface file.
+
+ obj - a doxyxml object from which documentation will be extracted.
+ name - the name of the C object (defaults to obj.name())
+ templ - an optional template for the docstring containing only one
+ variable named 'description'.
+ description - if this optional variable is set then it's value is
+ used as the description instead of extracting it from obj.
+ """
+ if name is None:
+ name=obj.name()
+ if "operator " in name:
+ return ''
+ if description is None:
+ description = combine_descriptions(obj)
+ if params:
+ description += '\n\n'
+ description += utoascii(format_params(params))
+ docstring = templ.format(description=description)
+ if not docstring:
+ return ''
+ return entry_templ.format(
+ name=name,
+ docstring=docstring,
+ )
+
+
+def make_func_entry(func, name=None, description=None, params=None):
+ """
+ Create a function docstring entry for a swig interface file.
+
+ func - a doxyxml object from which documentation will be extracted.
+ name - the name of the C object (defaults to func.name())
+ description - if this optional variable is set then it's value is
+ used as the description instead of extracting it from func.
+ params - a parameter list that overrides using func.params.
+ """
+ #if params is None:
+ # params = func.params
+ #params = [prm.declname for prm in params]
+ #if params:
+ # sig = "Params: (%s)" % ", ".join(params)
+ #else:
+ # sig = "Params: (NONE)"
+ #templ = "{description}\n\n" + sig
+ #return make_entry(func, name=name, templ=utoascii(templ),
+ # description=description)
+ return make_entry(func, name=name, description=description, params=params)
+
+
+def make_class_entry(klass, description=None, ignored_methods=[], params=None):
+ """
+ Create a class docstring for a swig interface file.
+ """
+ if params is None:
+ params = klass.params
+ output = []
+ output.append(make_entry(klass, description=description, params=params))
+ for func in klass.in_category(DoxyFunction):
+ if func.name() not in ignored_methods:
+ name = klass.name() + '::' + func.name()
+ output.append(make_func_entry(func, name=name))
+ return "\n\n".join(output)
+
+
+def make_block_entry(di, block):
+ """
+ Create class and function docstrings of a gnuradio block for a
+ swig interface file.
+ """
+ descriptions = []
+ # Get the documentation associated with the class.
+ class_desc = combine_descriptions(block)
+ if class_desc:
+ descriptions.append(class_desc)
+ # Get the documentation associated with the make function
+ make_func = di.get_member(make_name(block.name()), DoxyFunction)
+ make_func_desc = combine_descriptions(make_func)
+ if make_func_desc:
+ descriptions.append(make_func_desc)
+ # Get the documentation associated with the file
+ try:
+ block_file = di.get_member(block.name() + ".h", DoxyFile)
+ file_desc = combine_descriptions(block_file)
+ if file_desc:
+ descriptions.append(file_desc)
+ except base.Base.NoSuchMember:
+ # Don't worry if we can't find a matching file.
+ pass
+ # And join them all together to make a super duper description.
+ super_description = "\n\n".join(descriptions)
+ # Associate the combined description with the class and
+ # the make function.
+ output = []
+ output.append(make_class_entry(block, description=super_description))
+ output.append(make_func_entry(make_func, description=super_description,
+ params=block.params))
+ return "\n\n".join(output)
+
+def make_block2_entry(di, block):
+ """
+ Create class and function docstrings of a new style gnuradio block for a
+ swig interface file.
+ """
+ descriptions = []
+ # For new style blocks all the relevant documentation should be
+ # associated with the 'make' method.
+ class_description = combine_descriptions(block)
+ make_func = block.get_member('make', DoxyFunction)
+ make_description = combine_descriptions(make_func)
+ description = class_description + "\n\nConstructor Specific Documentation:\n\n" + make_description
+ # Associate the combined description with the class and
+ # the make function.
+ output = []
+ output.append(make_class_entry(
+ block, description=description,
+ ignored_methods=['make'], params=make_func.params))
+ makename = block.name() + '::make'
+ output.append(make_func_entry(
+ make_func, name=makename, description=description,
+ params=make_func.params))
+ return "\n\n".join(output)
+
+def make_swig_interface_file(di, swigdocfilename, custom_output=None):
+
+ output = ["""
+/*
+ * This file was automatically generated using swig_doc.py.
+ *
+ * Any changes to it will be lost next time it is regenerated.
+ */
+"""]
+
+ if custom_output is not None:
+ output.append(custom_output)
+
+ # Create docstrings for the blocks.
+ blocks = di.in_category(Block)
+ blocks2 = di.in_category(Block2)
+
+ make_funcs = set([])
+ for block in blocks:
+ try:
+ make_func = di.get_member(make_name(block.name()), DoxyFunction)
+ # Don't want to risk writing to output twice.
+ if make_func.name() not in make_funcs:
+ make_funcs.add(make_func.name())
+ output.append(make_block_entry(di, block))
+ except block.ParsingError:
+ sys.stderr.write('Parsing error for block {0}\n'.format(block.name()))
+ raise
+
+ for block in blocks2:
+ try:
+ make_func = block.get_member('make', DoxyFunction)
+ make_func_name = block.name() +'::make'
+ # Don't want to risk writing to output twice.
+ if make_func_name not in make_funcs:
+ make_funcs.add(make_func_name)
+ output.append(make_block2_entry(di, block))
+ except block.ParsingError:
+ sys.stderr.write('Parsing error for block {0}\n'.format(block.name()))
+ raise
+
+ # Create docstrings for functions
+ # Don't include the make functions since they have already been dealt with.
+ funcs = [f for f in di.in_category(DoxyFunction)
+ if f.name() not in make_funcs and not f.name().startswith('std::')]
+ for f in funcs:
+ try:
+ output.append(make_func_entry(f))
+ except f.ParsingError:
+ sys.stderr.write('Parsing error for function {0}\n'.format(f.name()))
+
+ # Create docstrings for classes
+ block_names = [block.name() for block in blocks]
+ block_names += [block.name() for block in blocks2]
+ klasses = [k for k in di.in_category(DoxyClass)
+ if k.name() not in block_names and not k.name().startswith('std::')]
+ for k in klasses:
+ try:
+ output.append(make_class_entry(k))
+ except k.ParsingError:
+ sys.stderr.write('Parsing error for class {0}\n'.format(k.name()))
+
+ # Docstrings are not created for anything that is not a function or a class.
+ # If this excludes anything important please add it here.
+
+ output = "\n\n".join(output)
+
+ swig_doc = file(swigdocfilename, 'w')
+ swig_doc.write(output)
+ swig_doc.close()
+
+if __name__ == "__main__":
+ # Parse command line options and set up doxyxml.
+ err_msg = "Execute using: python swig_doc.py xml_path outputfilename"
+ if len(sys.argv) != 3:
+ raise StandardError(err_msg)
+ xml_path = sys.argv[1]
+ swigdocfilename = sys.argv[2]
+ di = DoxyIndex(xml_path)
+
+ # gnuradio.gr.msq_queue.insert_tail and delete_head create errors unless docstrings are defined!
+ # This is presumably a bug in SWIG.
+ #msg_q = di.get_member(u'gr_msg_queue', DoxyClass)
+ #insert_tail = msg_q.get_member(u'insert_tail', DoxyFunction)
+ #delete_head = msg_q.get_member(u'delete_head', DoxyFunction)
+ output = []
+ #output.append(make_func_entry(insert_tail, name='gr_py_msg_queue__insert_tail'))
+ #output.append(make_func_entry(delete_head, name='gr_py_msg_queue__delete_head'))
+ custom_output = "\n\n".join(output)
+
+ # Generate the docstrings interface file.
+ make_swig_interface_file(di, swigdocfilename, custom_output=custom_output)
diff --git a/tools/gr-usrptest/examples/lv_control_example.py b/tools/gr-usrptest/examples/lv_control_example.py
new file mode 100755
index 000000000..dc2a7be2f
--- /dev/null
+++ b/tools/gr-usrptest/examples/lv_control_example.py
@@ -0,0 +1,81 @@
+#!/usr/bin/env python
+
+from usrptest.labview_control import lv_control
+import time
+import numpy as np
+import argparse
+import sys
+
+def test00(freq, frange, steps):
+ freqrange = np.arange(freq,freq+frange,steps)
+ source = 'VST-Out'
+ sink = 'X3x0-4-B-RX2'
+ host = 'pollux'
+ base_path = 'C:\Users\sdrtest\git\labview-test\labview\RTS_Control\Host\\'
+ print('connecting to switch')
+ switch = lv_control.executive_switch(host,base_path,'RTSwitch')
+ print('connecting to siggen')
+ siggen = lv_control.vst_siggen(host,base_path,'RIO0')
+ # Configure RF-Switching
+ switch.connect_ports(source, sink)
+
+ # Sweep over freqrange
+ for freq in freqrange:
+ siggen.set_freq(float(freq))
+ time.sleep(1)
+ # Shutdown Siggen
+ siggen.disconnect()
+ switch.disconnect_all()
+
+def test01(freq, frange, steps):
+ freqrange = np.arange(freq,freq+frange,steps)
+ source = 'VST-Out'
+ sink0 = 'X3x0-4-B-RX2'
+ sink1 = 'X3x0-2-B-RX2'
+ host = 'pollux'
+ base_path = 'C:\Users\sdrtest\git\labview-test\labview\RTS_Control\Host\\'
+ print('connecting to switch')
+ switch = lv_control.executive_switch(host,base_path,'RTSwitch')
+ print('connecting to siggen')
+ siggen = lv_control.vst_siggen(host,base_path,'RIO0')
+ for freq in freqrange:
+ siggen.set_freq(float(freq))
+ print('retuning siggen to {freq} MHz'.format(freq=freq/1e6))
+ switch.connect_ports(source,sink0)
+ time.sleep(0.5)
+ switch.connect_ports(source,sink1)
+ time.sleep(0.5)
+ switch.disconnect_all()
+ time.sleep(0.2)
+ siggen.disconnect()
+ switch.disconnect_all()
+
+
+if __name__ == '__main__':
+ thismodule = sys.modules[__name__]
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '-t',
+ '--test',
+ help='which testcase?'
+ )
+ parser.add_argument(
+ '-f',
+ '--freq',
+ type=float,
+ help='which center freq?'
+ )
+ parser.add_argument(
+ '-r',
+ '--range',
+ type=float,
+ help='which freq range?'
+ )
+ parser.add_argument(
+ '--steps',
+ type=float,
+ default=1e6,
+ help='which frequency step size?'
+ )
+ args = parser.parse_args()
+ getattr(thismodule,args.test)(args.freq,args.range,args.steps)
diff --git a/tools/gr-usrptest/examples/phase_diff_x310_ubx_example.grc b/tools/gr-usrptest/examples/phase_diff_x310_ubx_example.grc
new file mode 100644
index 000000000..f6cbe3d83
--- /dev/null
+++ b/tools/gr-usrptest/examples/phase_diff_x310_ubx_example.grc
@@ -0,0 +1,3296 @@
+<?xml version='1.0' encoding='utf-8'?>
+<?grc format='1' created='3.7.11'?>
+<flow_graph>
+ <timestamp>Thu Oct 6 14:49:17 2016</timestamp>
+ <block>
+ <key>options</key>
+ <param>
+ <key>author</key>
+ <value></value>
+ </param>
+ <param>
+ <key>window_size</key>
+ <value></value>
+ </param>
+ <param>
+ <key>category</key>
+ <value>[GRC Hier Blocks]</value>
+ </param>
+ <param>
+ <key>comment</key>
+ <value></value>
+ </param>
+ <param>
+ <key>description</key>
+ <value></value>
+ </param>
+ <param>
+ <key>_enabled</key>
+ <value>True</value>
+ </param>
+ <param>
+ <key>_coordinate</key>
+ <value>(8, 8)</value>
+ </param>
+ <param>
+ <key>_rotation</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>generate_options</key>
+ <value>qt_gui</value>
+ </param>
+ <param>
+ <key>hier_block_src_path</key>
+ <value>.:</value>
+ </param>
+ <param>
+ <key>id</key>
+ <value>phase_difference_x3x0_example</value>
+ </param>
+ <param>
+ <key>max_nouts</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>qt_qss_theme</key>
+ <value></value>
+ </param>
+ <param>
+ <key>realtime_scheduling</key>
+ <value></value>
+ </param>
+ <param>
+ <key>run_command</key>
+ <value>{python} -u {filename}</value>
+ </param>
+ <param>
+ <key>run_options</key>
+ <value>prompt</value>
+ </param>
+ <param>
+ <key>run</key>
+ <value>True</value>
+ </param>
+ <param>
+ <key>thread_safe_setters</key>
+ <value></value>
+ </param>
+ <param>
+ <key>title</key>
+ <value></value>
+ </param>
+ </block>
+ <block>
+ <key>variable</key>
+ <param>
+ <key>comment</key>
+ <value></value>
+ </param>
+ <param>
+ <key>_enabled</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>_coordinate</key>
+ <value>(8, 100)</value>
+ </param>
+ <param>
+ <key>_rotation</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>id</key>
+ <value>baseband</value>
+ </param>
+ <param>
+ <key>value</key>
+ <value>3e9</value>
+ </param>
+ </block>
+ <block>
+ <key>variable_qtgui_range</key>
+ <param>
+ <key>comment</key>
+ <value></value>
+ </param>
+ <param>
+ <key>value</key>
+ <value>10e6</value>
+ </param>
+ <param>
+ <key>_enabled</key>
+ <value>True</value>
+ </param>
+ <param>
+ <key>_coordinate</key>
+ <value>(200, 112)</value>
+ </param>
+ <param>
+ <key>gui_hint</key>
+ <value></value>
+ </param>
+ <param>
+ <key>_rotation</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>id</key>
+ <value>rx_base</value>
+ </param>
+ <param>
+ <key>label</key>
+ <value></value>
+ </param>
+ <param>
+ <key>min_len</key>
+ <value>200</value>
+ </param>
+ <param>
+ <key>orient</key>
+ <value>Qt.Horizontal</value>
+ </param>
+ <param>
+ <key>start</key>
+ <value>10e6</value>
+ </param>
+ <param>
+ <key>step</key>
+ <value>50e6</value>
+ </param>
+ <param>
+ <key>stop</key>
+ <value>6e9</value>
+ </param>
+ <param>
+ <key>rangeType</key>
+ <value>float</value>
+ </param>
+ <param>
+ <key>widget</key>
+ <value>counter_slider</value>
+ </param>
+ </block>
+ <block>
+ <key>variable</key>
+ <param>
+ <key>comment</key>
+ <value></value>
+ </param>
+ <param>
+ <key>_enabled</key>
+ <value>True</value>
+ </param>
+ <param>
+ <key>_coordinate</key>
+ <value>(8, 160)</value>
+ </param>
+ <param>
+ <key>_rotation</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>id</key>
+ <value>samp_rate</value>
+ </param>
+ <param>
+ <key>value</key>
+ <value>10e6</value>
+ </param>
+ </block>
+ <block>
+ <key>variable_qtgui_range</key>
+ <param>
+ <key>comment</key>
+ <value></value>
+ </param>
+ <param>
+ <key>value</key>
+ <value>10e6</value>
+ </param>
+ <param>
+ <key>_enabled</key>
+ <value>True</value>
+ </param>
+ <param>
+ <key>_coordinate</key>
+ <value>(168, 240)</value>
+ </param>
+ <param>
+ <key>gui_hint</key>
+ <value></value>
+ </param>
+ <param>
+ <key>_rotation</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>id</key>
+ <value>tx_base</value>
+ </param>
+ <param>
+ <key>label</key>
+ <value></value>
+ </param>
+ <param>
+ <key>min_len</key>
+ <value>200</value>
+ </param>
+ <param>
+ <key>orient</key>
+ <value>Qt.Horizontal</value>
+ </param>
+ <param>
+ <key>start</key>
+ <value>10e6</value>
+ </param>
+ <param>
+ <key>step</key>
+ <value>50e6</value>
+ </param>
+ <param>
+ <key>stop</key>
+ <value>6e9</value>
+ </param>
+ <param>
+ <key>rangeType</key>
+ <value>float</value>
+ </param>
+ <param>
+ <key>widget</key>
+ <value>counter_slider</value>
+ </param>
+ </block>
+ <block>
+ <key>analog_sig_source_x</key>
+ <param>
+ <key>amp</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>alias</key>
+ <value></value>
+ </param>
+ <param>
+ <key>comment</key>
+ <value></value>
+ </param>
+ <param>
+ <key>affinity</key>
+ <value></value>
+ </param>
+ <param>
+ <key>_enabled</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>freq</key>
+ <value>100e3</value>
+ </param>
+ <param>
+ <key>_coordinate</key>
+ <value>(440, 112)</value>
+ </param>
+ <param>
+ <key>_rotation</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>id</key>
+ <value>analog_sig_source_x_1</value>
+ </param>
+ <param>
+ <key>maxoutbuf</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>minoutbuf</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>offset</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>type</key>
+ <value>complex</value>
+ </param>
+ <param>
+ <key>samp_rate</key>
+ <value>samp_rate</value>
+ </param>
+ <param>
+ <key>waveform</key>
+ <value>analog.GR_COS_WAVE</value>
+ </param>
+ </block>
+ <block>
+ <key>qtgui_time_sink_x</key>
+ <param>
+ <key>autoscale</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>axislabels</key>
+ <value>True</value>
+ </param>
+ <param>
+ <key>alias</key>
+ <value></value>
+ </param>
+ <param>
+ <key>comment</key>
+ <value></value>
+ </param>
+ <param>
+ <key>ctrlpanel</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>affinity</key>
+ <value></value>
+ </param>
+ <param>
+ <key>entags</key>
+ <value>True</value>
+ </param>
+ <param>
+ <key>_enabled</key>
+ <value>True</value>
+ </param>
+ <param>
+ <key>_coordinate</key>
+ <value>(768, 404)</value>
+ </param>
+ <param>
+ <key>gui_hint</key>
+ <value></value>
+ </param>
+ <param>
+ <key>_rotation</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>grid</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>id</key>
+ <value>qtgui_time_sink_x_0</value>
+ </param>
+ <param>
+ <key>legend</key>
+ <value>True</value>
+ </param>
+ <param>
+ <key>alpha1</key>
+ <value>1.0</value>
+ </param>
+ <param>
+ <key>color1</key>
+ <value>"blue"</value>
+ </param>
+ <param>
+ <key>label1</key>
+ <value></value>
+ </param>
+ <param>
+ <key>marker1</key>
+ <value>-1</value>
+ </param>
+ <param>
+ <key>style1</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>width1</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>alpha10</key>
+ <value>1.0</value>
+ </param>
+ <param>
+ <key>color10</key>
+ <value>"blue"</value>
+ </param>
+ <param>
+ <key>label10</key>
+ <value></value>
+ </param>
+ <param>
+ <key>marker10</key>
+ <value>-1</value>
+ </param>
+ <param>
+ <key>style10</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>width10</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>alpha2</key>
+ <value>1.0</value>
+ </param>
+ <param>
+ <key>color2</key>
+ <value>"red"</value>
+ </param>
+ <param>
+ <key>label2</key>
+ <value></value>
+ </param>
+ <param>
+ <key>marker2</key>
+ <value>-1</value>
+ </param>
+ <param>
+ <key>style2</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>width2</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>alpha3</key>
+ <value>1.0</value>
+ </param>
+ <param>
+ <key>color3</key>
+ <value>"green"</value>
+ </param>
+ <param>
+ <key>label3</key>
+ <value></value>
+ </param>
+ <param>
+ <key>marker3</key>
+ <value>-1</value>
+ </param>
+ <param>
+ <key>style3</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>width3</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>alpha4</key>
+ <value>1.0</value>
+ </param>
+ <param>
+ <key>color4</key>
+ <value>"black"</value>
+ </param>
+ <param>
+ <key>label4</key>
+ <value></value>
+ </param>
+ <param>
+ <key>marker4</key>
+ <value>-1</value>
+ </param>
+ <param>
+ <key>style4</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>width4</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>alpha5</key>
+ <value>1.0</value>
+ </param>
+ <param>
+ <key>color5</key>
+ <value>"cyan"</value>
+ </param>
+ <param>
+ <key>label5</key>
+ <value></value>
+ </param>
+ <param>
+ <key>marker5</key>
+ <value>-1</value>
+ </param>
+ <param>
+ <key>style5</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>width5</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>alpha6</key>
+ <value>1.0</value>
+ </param>
+ <param>
+ <key>color6</key>
+ <value>"magenta"</value>
+ </param>
+ <param>
+ <key>label6</key>
+ <value></value>
+ </param>
+ <param>
+ <key>marker6</key>
+ <value>-1</value>
+ </param>
+ <param>
+ <key>style6</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>width6</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>alpha7</key>
+ <value>1.0</value>
+ </param>
+ <param>
+ <key>color7</key>
+ <value>"yellow"</value>
+ </param>
+ <param>
+ <key>label7</key>
+ <value></value>
+ </param>
+ <param>
+ <key>marker7</key>
+ <value>-1</value>
+ </param>
+ <param>
+ <key>style7</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>width7</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>alpha8</key>
+ <value>1.0</value>
+ </param>
+ <param>
+ <key>color8</key>
+ <value>"dark red"</value>
+ </param>
+ <param>
+ <key>label8</key>
+ <value></value>
+ </param>
+ <param>
+ <key>marker8</key>
+ <value>-1</value>
+ </param>
+ <param>
+ <key>style8</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>width8</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>alpha9</key>
+ <value>1.0</value>
+ </param>
+ <param>
+ <key>color9</key>
+ <value>"dark green"</value>
+ </param>
+ <param>
+ <key>label9</key>
+ <value></value>
+ </param>
+ <param>
+ <key>marker9</key>
+ <value>-1</value>
+ </param>
+ <param>
+ <key>style9</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>width9</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>name</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>nconnections</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>size</key>
+ <value>1024</value>
+ </param>
+ <param>
+ <key>srate</key>
+ <value>samp_rate</value>
+ </param>
+ <param>
+ <key>tr_chan</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>tr_delay</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>tr_level</key>
+ <value>0.0</value>
+ </param>
+ <param>
+ <key>tr_mode</key>
+ <value>qtgui.TRIG_MODE_FREE</value>
+ </param>
+ <param>
+ <key>tr_slope</key>
+ <value>qtgui.TRIG_SLOPE_POS</value>
+ </param>
+ <param>
+ <key>tr_tag</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>type</key>
+ <value>float</value>
+ </param>
+ <param>
+ <key>update_time</key>
+ <value>0.10</value>
+ </param>
+ <param>
+ <key>ylabel</key>
+ <value>Amplitude</value>
+ </param>
+ <param>
+ <key>yunit</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>ymax</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>ymin</key>
+ <value>-1</value>
+ </param>
+ </block>
+ <block>
+ <key>qtgui_time_sink_x</key>
+ <param>
+ <key>autoscale</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>axislabels</key>
+ <value>True</value>
+ </param>
+ <param>
+ <key>alias</key>
+ <value></value>
+ </param>
+ <param>
+ <key>comment</key>
+ <value></value>
+ </param>
+ <param>
+ <key>ctrlpanel</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>affinity</key>
+ <value></value>
+ </param>
+ <param>
+ <key>entags</key>
+ <value>True</value>
+ </param>
+ <param>
+ <key>_enabled</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>_coordinate</key>
+ <value>(664, 280)</value>
+ </param>
+ <param>
+ <key>gui_hint</key>
+ <value></value>
+ </param>
+ <param>
+ <key>_rotation</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>grid</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>id</key>
+ <value>qtgui_time_sink_x_1</value>
+ </param>
+ <param>
+ <key>legend</key>
+ <value>True</value>
+ </param>
+ <param>
+ <key>alpha1</key>
+ <value>1.0</value>
+ </param>
+ <param>
+ <key>color1</key>
+ <value>"blue"</value>
+ </param>
+ <param>
+ <key>label1</key>
+ <value></value>
+ </param>
+ <param>
+ <key>marker1</key>
+ <value>-1</value>
+ </param>
+ <param>
+ <key>style1</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>width1</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>alpha10</key>
+ <value>1.0</value>
+ </param>
+ <param>
+ <key>color10</key>
+ <value>"blue"</value>
+ </param>
+ <param>
+ <key>label10</key>
+ <value></value>
+ </param>
+ <param>
+ <key>marker10</key>
+ <value>-1</value>
+ </param>
+ <param>
+ <key>style10</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>width10</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>alpha2</key>
+ <value>1.0</value>
+ </param>
+ <param>
+ <key>color2</key>
+ <value>"red"</value>
+ </param>
+ <param>
+ <key>label2</key>
+ <value></value>
+ </param>
+ <param>
+ <key>marker2</key>
+ <value>-1</value>
+ </param>
+ <param>
+ <key>style2</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>width2</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>alpha3</key>
+ <value>1.0</value>
+ </param>
+ <param>
+ <key>color3</key>
+ <value>"green"</value>
+ </param>
+ <param>
+ <key>label3</key>
+ <value></value>
+ </param>
+ <param>
+ <key>marker3</key>
+ <value>-1</value>
+ </param>
+ <param>
+ <key>style3</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>width3</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>alpha4</key>
+ <value>1.0</value>
+ </param>
+ <param>
+ <key>color4</key>
+ <value>"black"</value>
+ </param>
+ <param>
+ <key>label4</key>
+ <value></value>
+ </param>
+ <param>
+ <key>marker4</key>
+ <value>-1</value>
+ </param>
+ <param>
+ <key>style4</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>width4</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>alpha5</key>
+ <value>1.0</value>
+ </param>
+ <param>
+ <key>color5</key>
+ <value>"cyan"</value>
+ </param>
+ <param>
+ <key>label5</key>
+ <value></value>
+ </param>
+ <param>
+ <key>marker5</key>
+ <value>-1</value>
+ </param>
+ <param>
+ <key>style5</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>width5</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>alpha6</key>
+ <value>1.0</value>
+ </param>
+ <param>
+ <key>color6</key>
+ <value>"magenta"</value>
+ </param>
+ <param>
+ <key>label6</key>
+ <value></value>
+ </param>
+ <param>
+ <key>marker6</key>
+ <value>-1</value>
+ </param>
+ <param>
+ <key>style6</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>width6</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>alpha7</key>
+ <value>1.0</value>
+ </param>
+ <param>
+ <key>color7</key>
+ <value>"yellow"</value>
+ </param>
+ <param>
+ <key>label7</key>
+ <value></value>
+ </param>
+ <param>
+ <key>marker7</key>
+ <value>-1</value>
+ </param>
+ <param>
+ <key>style7</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>width7</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>alpha8</key>
+ <value>1.0</value>
+ </param>
+ <param>
+ <key>color8</key>
+ <value>"dark red"</value>
+ </param>
+ <param>
+ <key>label8</key>
+ <value></value>
+ </param>
+ <param>
+ <key>marker8</key>
+ <value>-1</value>
+ </param>
+ <param>
+ <key>style8</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>width8</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>alpha9</key>
+ <value>1.0</value>
+ </param>
+ <param>
+ <key>color9</key>
+ <value>"dark green"</value>
+ </param>
+ <param>
+ <key>label9</key>
+ <value></value>
+ </param>
+ <param>
+ <key>marker9</key>
+ <value>-1</value>
+ </param>
+ <param>
+ <key>style9</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>width9</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>name</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>nconnections</key>
+ <value>2</value>
+ </param>
+ <param>
+ <key>size</key>
+ <value>1024</value>
+ </param>
+ <param>
+ <key>srate</key>
+ <value>samp_rate</value>
+ </param>
+ <param>
+ <key>tr_chan</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>tr_delay</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>tr_level</key>
+ <value>0.0</value>
+ </param>
+ <param>
+ <key>tr_mode</key>
+ <value>qtgui.TRIG_MODE_FREE</value>
+ </param>
+ <param>
+ <key>tr_slope</key>
+ <value>qtgui.TRIG_SLOPE_POS</value>
+ </param>
+ <param>
+ <key>tr_tag</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>type</key>
+ <value>complex</value>
+ </param>
+ <param>
+ <key>update_time</key>
+ <value>0.10</value>
+ </param>
+ <param>
+ <key>ylabel</key>
+ <value>Amplitude</value>
+ </param>
+ <param>
+ <key>yunit</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>ymax</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>ymin</key>
+ <value>-1</value>
+ </param>
+ </block>
+ <block>
+ <key>uhd_usrp_sink</key>
+ <param>
+ <key>alias</key>
+ <value></value>
+ </param>
+ <param>
+ <key>ant0</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw0</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq0</key>
+ <value>tx_base</value>
+ </param>
+ <param>
+ <key>norm_gain0</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain0</key>
+ <value>15</value>
+ </param>
+ <param>
+ <key>ant10</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw10</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq10</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain10</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain10</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant11</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw11</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq11</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain11</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain11</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant12</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw12</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq12</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain12</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain12</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant13</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw13</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq13</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain13</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain13</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant14</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw14</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq14</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain14</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain14</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant15</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw15</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq15</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain15</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain15</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant16</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw16</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq16</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain16</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain16</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant17</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw17</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq17</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain17</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain17</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant18</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw18</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq18</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain18</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain18</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant19</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw19</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq19</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain19</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain19</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant1</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw1</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq1</key>
+ <value>tx_base</value>
+ </param>
+ <param>
+ <key>norm_gain1</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain1</key>
+ <value>15</value>
+ </param>
+ <param>
+ <key>ant20</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw20</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq20</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain20</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain20</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant21</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw21</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq21</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain21</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain21</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant22</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw22</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq22</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain22</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain22</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant23</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw23</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq23</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain23</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain23</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant24</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw24</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq24</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain24</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain24</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant25</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw25</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq25</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain25</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain25</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant26</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw26</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq26</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain26</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain26</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant27</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw27</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq27</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain27</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain27</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant28</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw28</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq28</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain28</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain28</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant29</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw29</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq29</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain29</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain29</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant2</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw2</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq2</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain2</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain2</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant30</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw30</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq30</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain30</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain30</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant31</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw31</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq31</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain31</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain31</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant3</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw3</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq3</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain3</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain3</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant4</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw4</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq4</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain4</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain4</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant5</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw5</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq5</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain5</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain5</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant6</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw6</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq6</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain6</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain6</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant7</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw7</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq7</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain7</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain7</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant8</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw8</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq8</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain8</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain8</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>ant9</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw9</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq9</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>norm_gain9</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain9</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>clock_rate</key>
+ <value>0.0</value>
+ </param>
+ <param>
+ <key>comment</key>
+ <value></value>
+ </param>
+ <param>
+ <key>affinity</key>
+ <value></value>
+ </param>
+ <param>
+ <key>dev_addr</key>
+ <value>"addr=192.168.10.2"</value>
+ </param>
+ <param>
+ <key>dev_args</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>_enabled</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>_coordinate</key>
+ <value>(816, 104)</value>
+ </param>
+ <param>
+ <key>_rotation</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>id</key>
+ <value>uhd_usrp_sink_1</value>
+ </param>
+ <param>
+ <key>type</key>
+ <value>fc32</value>
+ </param>
+ <param>
+ <key>clock_source0</key>
+ <value></value>
+ </param>
+ <param>
+ <key>sd_spec0</key>
+ <value>"A:0 B:0"</value>
+ </param>
+ <param>
+ <key>time_source0</key>
+ <value></value>
+ </param>
+ <param>
+ <key>clock_source1</key>
+ <value></value>
+ </param>
+ <param>
+ <key>sd_spec1</key>
+ <value></value>
+ </param>
+ <param>
+ <key>time_source1</key>
+ <value></value>
+ </param>
+ <param>
+ <key>clock_source2</key>
+ <value></value>
+ </param>
+ <param>
+ <key>sd_spec2</key>
+ <value></value>
+ </param>
+ <param>
+ <key>time_source2</key>
+ <value></value>
+ </param>
+ <param>
+ <key>clock_source3</key>
+ <value></value>
+ </param>
+ <param>
+ <key>sd_spec3</key>
+ <value></value>
+ </param>
+ <param>
+ <key>time_source3</key>
+ <value></value>
+ </param>
+ <param>
+ <key>clock_source4</key>
+ <value></value>
+ </param>
+ <param>
+ <key>sd_spec4</key>
+ <value></value>
+ </param>
+ <param>
+ <key>time_source4</key>
+ <value></value>
+ </param>
+ <param>
+ <key>clock_source5</key>
+ <value></value>
+ </param>
+ <param>
+ <key>sd_spec5</key>
+ <value></value>
+ </param>
+ <param>
+ <key>time_source5</key>
+ <value></value>
+ </param>
+ <param>
+ <key>clock_source6</key>
+ <value></value>
+ </param>
+ <param>
+ <key>sd_spec6</key>
+ <value></value>
+ </param>
+ <param>
+ <key>time_source6</key>
+ <value></value>
+ </param>
+ <param>
+ <key>clock_source7</key>
+ <value></value>
+ </param>
+ <param>
+ <key>sd_spec7</key>
+ <value></value>
+ </param>
+ <param>
+ <key>time_source7</key>
+ <value></value>
+ </param>
+ <param>
+ <key>nchan</key>
+ <value>2</value>
+ </param>
+ <param>
+ <key>num_mboards</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>samp_rate</key>
+ <value>samp_rate</value>
+ </param>
+ <param>
+ <key>hide_cmd_port</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>hide_lo_controls</key>
+ <value>True</value>
+ </param>
+ <param>
+ <key>stream_args</key>
+ <value></value>
+ </param>
+ <param>
+ <key>stream_chans</key>
+ <value>[]</value>
+ </param>
+ <param>
+ <key>sync</key>
+ <value></value>
+ </param>
+ <param>
+ <key>len_tag_name</key>
+ <value></value>
+ </param>
+ <param>
+ <key>otw</key>
+ <value></value>
+ </param>
+ </block>
+ <block>
+ <key>uhd_usrp_source</key>
+ <param>
+ <key>alias</key>
+ <value></value>
+ </param>
+ <param>
+ <key>ant0</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw0</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq0</key>
+ <value>rx_base</value>
+ </param>
+ <param>
+ <key>dc_offs_enb0</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb0</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain0</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain0</key>
+ <value>15</value>
+ </param>
+ <param>
+ <key>lo_export0</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source0</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant10</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw10</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq10</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb10</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb10</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain10</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain10</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export10</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source10</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant11</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw11</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq11</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb11</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb11</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain11</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain11</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export11</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source11</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant12</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw12</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq12</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb12</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb12</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain12</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain12</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export12</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source12</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant13</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw13</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq13</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb13</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb13</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain13</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain13</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export13</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source13</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant14</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw14</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq14</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb14</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb14</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain14</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain14</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export14</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source14</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant15</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw15</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq15</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb15</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb15</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain15</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain15</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export15</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source15</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant16</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw16</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq16</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb16</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb16</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain16</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain16</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export16</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source16</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant17</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw17</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq17</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb17</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb17</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain17</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain17</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export17</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source17</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant18</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw18</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq18</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb18</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb18</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain18</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain18</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export18</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source18</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant19</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw19</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq19</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb19</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb19</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain19</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain19</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export19</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source19</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant1</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw1</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq1</key>
+ <value>rx_base</value>
+ </param>
+ <param>
+ <key>dc_offs_enb1</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb1</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain1</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain1</key>
+ <value>15</value>
+ </param>
+ <param>
+ <key>lo_export1</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source1</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant20</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw20</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq20</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb20</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb20</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain20</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain20</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export20</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source20</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant21</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw21</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq21</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb21</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb21</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain21</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain21</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export21</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source21</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant22</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw22</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq22</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb22</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb22</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain22</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain22</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export22</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source22</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant23</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw23</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq23</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb23</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb23</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain23</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain23</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export23</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source23</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant24</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw24</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq24</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb24</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb24</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain24</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain24</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export24</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source24</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant25</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw25</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq25</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb25</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb25</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain25</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain25</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export25</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source25</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant26</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw26</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq26</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb26</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb26</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain26</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain26</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export26</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source26</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant27</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw27</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq27</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb27</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb27</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain27</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain27</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export27</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source27</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant28</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw28</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq28</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb28</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb28</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain28</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain28</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export28</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source28</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant29</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw29</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq29</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb29</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb29</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain29</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain29</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export29</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source29</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant2</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw2</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq2</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb2</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb2</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain2</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain2</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export2</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source2</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant30</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw30</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq30</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb30</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb30</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain30</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain30</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export30</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source30</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant31</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw31</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq31</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb31</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb31</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain31</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain31</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export31</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source31</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant3</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw3</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq3</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb3</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb3</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain3</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain3</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export3</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source3</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant4</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw4</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq4</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb4</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb4</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain4</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain4</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export4</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source4</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant5</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw5</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq5</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb5</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb5</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain5</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain5</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export5</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source5</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant6</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw6</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq6</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb6</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb6</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain6</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain6</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export6</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source6</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant7</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw7</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq7</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb7</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb7</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain7</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain7</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export7</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source7</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant8</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw8</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq8</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb8</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb8</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain8</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain8</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export8</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source8</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>ant9</key>
+ <value></value>
+ </param>
+ <param>
+ <key>bw9</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>center_freq9</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>dc_offs_enb9</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>iq_imbal_enb9</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>norm_gain9</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>gain9</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>lo_export9</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>lo_source9</key>
+ <value>internal</value>
+ </param>
+ <param>
+ <key>clock_rate</key>
+ <value>0.0</value>
+ </param>
+ <param>
+ <key>comment</key>
+ <value></value>
+ </param>
+ <param>
+ <key>affinity</key>
+ <value></value>
+ </param>
+ <param>
+ <key>dev_addr</key>
+ <value>"addr=192.168.10.2"</value>
+ </param>
+ <param>
+ <key>dev_args</key>
+ <value>""</value>
+ </param>
+ <param>
+ <key>_enabled</key>
+ <value>True</value>
+ </param>
+ <param>
+ <key>_coordinate</key>
+ <value>(320, 376)</value>
+ </param>
+ <param>
+ <key>_rotation</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>id</key>
+ <value>uhd_usrp_source_0</value>
+ </param>
+ <param>
+ <key>maxoutbuf</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>clock_source0</key>
+ <value></value>
+ </param>
+ <param>
+ <key>sd_spec0</key>
+ <value>"A:0 B:0"</value>
+ </param>
+ <param>
+ <key>time_source0</key>
+ <value></value>
+ </param>
+ <param>
+ <key>clock_source1</key>
+ <value></value>
+ </param>
+ <param>
+ <key>sd_spec1</key>
+ <value></value>
+ </param>
+ <param>
+ <key>time_source1</key>
+ <value></value>
+ </param>
+ <param>
+ <key>clock_source2</key>
+ <value></value>
+ </param>
+ <param>
+ <key>sd_spec2</key>
+ <value></value>
+ </param>
+ <param>
+ <key>time_source2</key>
+ <value></value>
+ </param>
+ <param>
+ <key>clock_source3</key>
+ <value></value>
+ </param>
+ <param>
+ <key>sd_spec3</key>
+ <value></value>
+ </param>
+ <param>
+ <key>time_source3</key>
+ <value></value>
+ </param>
+ <param>
+ <key>clock_source4</key>
+ <value></value>
+ </param>
+ <param>
+ <key>sd_spec4</key>
+ <value></value>
+ </param>
+ <param>
+ <key>time_source4</key>
+ <value></value>
+ </param>
+ <param>
+ <key>clock_source5</key>
+ <value></value>
+ </param>
+ <param>
+ <key>sd_spec5</key>
+ <value></value>
+ </param>
+ <param>
+ <key>time_source5</key>
+ <value></value>
+ </param>
+ <param>
+ <key>clock_source6</key>
+ <value></value>
+ </param>
+ <param>
+ <key>sd_spec6</key>
+ <value></value>
+ </param>
+ <param>
+ <key>time_source6</key>
+ <value></value>
+ </param>
+ <param>
+ <key>clock_source7</key>
+ <value></value>
+ </param>
+ <param>
+ <key>sd_spec7</key>
+ <value></value>
+ </param>
+ <param>
+ <key>time_source7</key>
+ <value></value>
+ </param>
+ <param>
+ <key>minoutbuf</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>nchan</key>
+ <value>2</value>
+ </param>
+ <param>
+ <key>num_mboards</key>
+ <value>1</value>
+ </param>
+ <param>
+ <key>type</key>
+ <value>fc32</value>
+ </param>
+ <param>
+ <key>samp_rate</key>
+ <value>samp_rate</value>
+ </param>
+ <param>
+ <key>hide_cmd_port</key>
+ <value>False</value>
+ </param>
+ <param>
+ <key>hide_lo_controls</key>
+ <value>True</value>
+ </param>
+ <param>
+ <key>stream_args</key>
+ <value></value>
+ </param>
+ <param>
+ <key>stream_chans</key>
+ <value>[]</value>
+ </param>
+ <param>
+ <key>sync</key>
+ <value></value>
+ </param>
+ <param>
+ <key>otw</key>
+ <value></value>
+ </param>
+ </block>
+ <block>
+ <key>usrptest_phase_calc_ccf</key>
+ <param>
+ <key>alias</key>
+ <value></value>
+ </param>
+ <param>
+ <key>comment</key>
+ <value></value>
+ </param>
+ <param>
+ <key>affinity</key>
+ <value></value>
+ </param>
+ <param>
+ <key>_enabled</key>
+ <value>True</value>
+ </param>
+ <param>
+ <key>_coordinate</key>
+ <value>(568, 408)</value>
+ </param>
+ <param>
+ <key>_rotation</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>id</key>
+ <value>usrptest_phase_calc_ccf_0</value>
+ </param>
+ <param>
+ <key>maxoutbuf</key>
+ <value>0</value>
+ </param>
+ <param>
+ <key>minoutbuf</key>
+ <value>0</value>
+ </param>
+ </block>
+ <connection>
+ <source_block_id>analog_sig_source_x_1</source_block_id>
+ <sink_block_id>uhd_usrp_sink_1</sink_block_id>
+ <source_key>0</source_key>
+ <sink_key>0</sink_key>
+ </connection>
+ <connection>
+ <source_block_id>analog_sig_source_x_1</source_block_id>
+ <sink_block_id>uhd_usrp_sink_1</sink_block_id>
+ <source_key>0</source_key>
+ <sink_key>1</sink_key>
+ </connection>
+ <connection>
+ <source_block_id>uhd_usrp_source_0</source_block_id>
+ <sink_block_id>qtgui_time_sink_x_1</sink_block_id>
+ <source_key>0</source_key>
+ <sink_key>0</sink_key>
+ </connection>
+ <connection>
+ <source_block_id>uhd_usrp_source_0</source_block_id>
+ <sink_block_id>usrptest_phase_calc_ccf_0</sink_block_id>
+ <source_key>0</source_key>
+ <sink_key>0</sink_key>
+ </connection>
+ <connection>
+ <source_block_id>uhd_usrp_source_0</source_block_id>
+ <sink_block_id>qtgui_time_sink_x_1</sink_block_id>
+ <source_key>1</source_key>
+ <sink_key>1</sink_key>
+ </connection>
+ <connection>
+ <source_block_id>uhd_usrp_source_0</source_block_id>
+ <sink_block_id>usrptest_phase_calc_ccf_0</sink_block_id>
+ <source_key>1</source_key>
+ <sink_key>1</sink_key>
+ </connection>
+ <connection>
+ <source_block_id>usrptest_phase_calc_ccf_0</source_block_id>
+ <sink_block_id>qtgui_time_sink_x_0</sink_block_id>
+ <source_key>0</source_key>
+ <sink_key>0</sink_key>
+ </connection>
+</flow_graph>
diff --git a/tools/gr-usrptest/grc/CMakeLists.txt b/tools/gr-usrptest/grc/CMakeLists.txt
new file mode 100644
index 000000000..47f494abf
--- /dev/null
+++ b/tools/gr-usrptest/grc/CMakeLists.txt
@@ -0,0 +1,23 @@
+# Copyright 2011 Free Software Foundation, Inc.
+#
+# This file is part of GNU Radio
+#
+# GNU Radio is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# GNU Radio is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Radio; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+
+install(FILES
+ usrptest_phase_calc_ccf.xml
+ DESTINATION share/gnuradio/grc/blocks
+)
diff --git a/tools/gr-usrptest/grc/usrptest_measurement_sink_f.xml b/tools/gr-usrptest/grc/usrptest_measurement_sink_f.xml
new file mode 100644
index 000000000..c681e457f
--- /dev/null
+++ b/tools/gr-usrptest/grc/usrptest_measurement_sink_f.xml
@@ -0,0 +1,38 @@
+<?xml version="1.0"?>
+<block>
+ <name>measurement_sink_f</name>
+ <key>usrptest_measurement_sink_f</key>
+ <category>[usrptest]</category>
+ <import>import usrptest</import>
+ <make>usrptest.measurement_sink_f($num_samples, $)</make>
+ <!-- Make one 'param' node for every Parameter you want settable from the GUI.
+ Sub-nodes:
+ * name
+ * key (makes the value accessible as $keyname, e.g. in the make node)
+ * type -->
+ <param>
+ <name>...</name>
+ <key>...</key>
+ <type>...</type>
+ </param>
+
+ <!-- Make one 'sink' node per input. Sub-nodes:
+ * name (an identifier for the GUI)
+ * type
+ * vlen
+ * optional (set to 1 for optional inputs) -->
+ <sink>
+ <name>in</name>
+ <type><!-- e.g. int, float, complex, byte, short, xxx_vector, ...--></type>
+ </sink>
+
+ <!-- Make one 'source' node per output. Sub-nodes:
+ * name (an identifier for the GUI)
+ * type
+ * vlen
+ * optional (set to 1 for optional inputs) -->
+ <source>
+ <name>out</name>
+ <type><!-- e.g. int, float, complex, byte, short, xxx_vector, ...--></type>
+ </source>
+</block>
diff --git a/tools/gr-usrptest/grc/usrptest_phase_calc_ccf.xml b/tools/gr-usrptest/grc/usrptest_phase_calc_ccf.xml
new file mode 100644
index 000000000..4c98f56eb
--- /dev/null
+++ b/tools/gr-usrptest/grc/usrptest_phase_calc_ccf.xml
@@ -0,0 +1,20 @@
+<?xml version="1.0"?>
+<block>
+ <name>Calculate phase diff</name>
+ <key>usrptest_phase_calc_ccf</key>
+ <category>[usrptest]</category>
+ <import>import usrptest</import>
+ <make>usrptest.phase_calc_ccf()</make>
+ <sink>
+ <name>in1</name>
+ <type>complex</type>
+ </sink>
+ <sink>
+ <name>in2</name>
+ <type>complex</type>
+ </sink>
+ <source>
+ <name>out</name>
+ <type>float</type>
+ </source>
+</block>
diff --git a/tools/gr-usrptest/include/usrptest/CMakeLists.txt b/tools/gr-usrptest/include/usrptest/CMakeLists.txt
new file mode 100644
index 000000000..761a8bee4
--- /dev/null
+++ b/tools/gr-usrptest/include/usrptest/CMakeLists.txt
@@ -0,0 +1,26 @@
+# Copyright 2011,2012 Free Software Foundation, Inc.
+#
+# This file is part of GNU Radio
+#
+# GNU Radio is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# GNU Radio is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Radio; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+
+########################################################################
+# Install public header files
+########################################################################
+install(FILES
+ api.h
+ measurement_sink_f.h DESTINATION include/usrptest
+)
diff --git a/tools/gr-usrptest/include/usrptest/api.h b/tools/gr-usrptest/include/usrptest/api.h
new file mode 100644
index 000000000..0812606d5
--- /dev/null
+++ b/tools/gr-usrptest/include/usrptest/api.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2011 Free Software Foundation, Inc.
+ *
+ * This file is part of GNU Radio
+ *
+ * GNU Radio is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 3, or (at your option)
+ * any later version.
+ *
+ * GNU Radio is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with GNU Radio; see the file COPYING. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef INCLUDED_USRPTEST_API_H
+#define INCLUDED_USRPTEST_API_H
+
+#include <gnuradio/attributes.h>
+
+#ifdef gnuradio_usrptest_EXPORTS
+# define USRPTEST_API __GR_ATTR_EXPORT
+#else
+# define USRPTEST_API __GR_ATTR_IMPORT
+#endif
+
+#endif /* INCLUDED_USRPTEST_API_H */
diff --git a/tools/gr-usrptest/include/usrptest/measurement_sink_f.h b/tools/gr-usrptest/include/usrptest/measurement_sink_f.h
new file mode 100644
index 000000000..aaf1fb9d1
--- /dev/null
+++ b/tools/gr-usrptest/include/usrptest/measurement_sink_f.h
@@ -0,0 +1,61 @@
+/* -*- c++ -*- */
+/*
+ * Copyright 2016 Ettus Research LLC.
+ *
+ * This is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 3, or (at your option)
+ * any later version.
+ *
+ * This software is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this software; see the file COPYING. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef INCLUDED_USRPTEST_MEASUREMENT_SINK_F_H
+#define INCLUDED_USRPTEST_MEASUREMENT_SINK_F_H
+
+#include <usrptest/api.h>
+#include <gnuradio/sync_block.h>
+
+namespace gr {
+ namespace usrptest {
+
+ /*!
+ * \brief <+description of block+>
+ * \ingroup usrptest
+ *
+ */
+ class USRPTEST_API measurement_sink_f : virtual public gr::sync_block
+ {
+ public:
+ typedef boost::shared_ptr<measurement_sink_f> sptr;
+
+ /*!
+ * \brief Return a shared_ptr to a new instance of usrptest::measurement_sink_f.
+ *
+ * To avoid accidental use of raw pointers, usrptest::measurement_sink_f's
+ * constructor is in a private implementation
+ * class. usrptest::measurement_sink_f::make is the public interface for
+ * creating new instances.
+ */
+ static sptr make(int num_samples,int runs);
+ virtual std::vector<float> get_avg() const = 0;
+ virtual std::vector<float> get_stddev() const = 0;
+ virtual int get_run() const = 0;
+ virtual void start_run() = 0;
+
+ };
+
+ } // namespace usrptest
+} // namespace gr
+
+#endif /* INCLUDED_USRPTEST_MEASUREMENT_SINK_F_H */
+
diff --git a/tools/gr-usrptest/lib/CMakeLists.txt b/tools/gr-usrptest/lib/CMakeLists.txt
new file mode 100644
index 000000000..10ba461e1
--- /dev/null
+++ b/tools/gr-usrptest/lib/CMakeLists.txt
@@ -0,0 +1,59 @@
+# Copyright 2011,2012,2016 Free Software Foundation, Inc.
+#
+# This file is part of GNU Radio
+#
+# GNU Radio is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# GNU Radio is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Radio; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+
+########################################################################
+# Setup library
+########################################################################
+include(GrPlatform) #define LIB_SUFFIX
+
+include_directories(${Boost_INCLUDE_DIR})
+link_directories(${Boost_LIBRARY_DIRS})
+
+list(APPEND usrptest_sources
+ measurement_sink_f_impl.cc
+)
+
+set(usrptest_sources "${usrptest_sources}" PARENT_SCOPE)
+if(NOT usrptest_sources)
+ MESSAGE(STATUS "No C++ sources... skipping lib/")
+ return()
+endif(NOT usrptest_sources)
+
+add_library(gnuradio-usrptest SHARED ${usrptest_sources})
+target_link_libraries(gnuradio-usrptest ${Boost_LIBRARIES} ${GNURADIO_ALL_LIBRARIES})
+set_target_properties(gnuradio-usrptest PROPERTIES DEFINE_SYMBOL "gnuradio_usrptest_EXPORTS")
+
+if(APPLE)
+ set_target_properties(gnuradio-usrptest PROPERTIES
+ INSTALL_NAME_DIR "${CMAKE_INSTALL_PREFIX}/lib"
+ )
+endif(APPLE)
+
+########################################################################
+# Install built library files
+########################################################################
+include(GrMiscUtils)
+GR_LIBRARY_FOO(gnuradio-usrptest RUNTIME_COMPONENT "usrptest_runtime" DEVEL_COMPONENT "usrptest_devel")
+
+########################################################################
+# Print summary
+########################################################################
+message(STATUS "Using install prefix: ${CMAKE_INSTALL_PREFIX}")
+message(STATUS "Building for version: ${VERSION} / ${LIBVER}")
+
diff --git a/tools/gr-usrptest/lib/measurement_sink_f_impl.cc b/tools/gr-usrptest/lib/measurement_sink_f_impl.cc
new file mode 100644
index 000000000..58faccb9a
--- /dev/null
+++ b/tools/gr-usrptest/lib/measurement_sink_f_impl.cc
@@ -0,0 +1,124 @@
+/* -*- c++ -*- */
+/*
+ * Copyright 2016 Ettus Research LLC.
+ *
+ * This is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 3, or (at your option)
+ * any later version.
+ *
+ * This software is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this software; see the file COPYING. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gnuradio/io_signature.h>
+#include "measurement_sink_f_impl.h"
+
+namespace gr {
+ namespace usrptest {
+
+ measurement_sink_f::sptr
+ measurement_sink_f::make(int num_samples, int runs)
+ {
+ return gnuradio::get_initial_sptr
+ (new measurement_sink_f_impl(num_samples, runs));
+
+ }
+
+ /*
+ * The private constructor
+ */
+ measurement_sink_f_impl::measurement_sink_f_impl(int num_samples, int runs)
+ : gr::sync_block("measurement_sink_f",
+ gr::io_signature::make(1, 1, sizeof(float)),
+ gr::io_signature::make(0, 0, 0)),
+ d_runs(runs),
+ d_nsamples(num_samples)
+ {
+ d_curr_run = 0; // number of completed measurement runs
+ d_curr_avg = 0.0f; // accumulated average
+ d_curr_M2 = 0.0f; // accumulated M2
+ d_run = false; // true if a measurement is currently recorded
+ d_curr_sample = 0; // current sample count
+ }
+
+ /*
+ * Our virtual destructor.
+ */
+ measurement_sink_f_impl::~measurement_sink_f_impl()
+ {
+ }
+
+ int
+ measurement_sink_f_impl::work(int noutput_items,
+ gr_vector_const_void_star &input_items,
+ gr_vector_void_star &output_items)
+ {
+ const float *in = (const float *) input_items[0];
+ if ((d_curr_run < d_runs)&&d_run){ //check if we need to record data
+ const int max_items = std::min(noutput_items, d_nsamples-d_curr_sample); // calculate number of samples we have to take into account
+ for (int item=0; item < max_items;++item){
+ ++d_curr_sample;
+ inc_both(in[item]);
+ }
+ if (d_curr_sample == d_nsamples) {
+ d_avg.push_back(d_curr_avg);
+ d_stddev.push_back(std::sqrt(d_curr_M2/(float)(d_curr_sample - 1)));
+ ++d_curr_run;
+ d_run = false;
+ d_curr_sample = 0;
+ d_curr_avg = 0.0f;
+ d_curr_M2 = 0.0f;
+ }
+ }
+ return noutput_items;
+ }
+
+
+ void
+ measurement_sink_f_impl::inc_both(const float new_val)
+ {
+ float delta = new_val - d_curr_avg;
+ d_curr_avg = d_curr_avg + delta/(float)(d_curr_sample);
+ d_curr_M2 = d_curr_M2 + delta*(new_val - d_curr_avg);
+ }
+
+
+ void
+ measurement_sink_f_impl::start_run()
+ {
+ d_run = true;
+ }
+
+ std::vector<float>
+ measurement_sink_f_impl::get_avg() const
+ {
+ return d_avg;
+ }
+
+ std::vector<float>
+ measurement_sink_f_impl::get_stddev() const
+ {
+ return d_stddev;
+ }
+
+ int
+ measurement_sink_f_impl::get_run() const
+ {
+ return d_curr_run;
+ }
+
+ } /* namespace usrptest */
+} /* namespace gr */
+
diff --git a/tools/gr-usrptest/lib/measurement_sink_f_impl.h b/tools/gr-usrptest/lib/measurement_sink_f_impl.h
new file mode 100644
index 000000000..4d63adfa2
--- /dev/null
+++ b/tools/gr-usrptest/lib/measurement_sink_f_impl.h
@@ -0,0 +1,62 @@
+/* -*- c++ -*- */
+/*
+ * Copyright 2016 Ettus Research LLC.
+ *
+ * This is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 3, or (at your option)
+ * any later version.
+ *
+ * This software is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this software; see the file COPYING. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef INCLUDED_USRPTEST_MEASUREMENT_SINK_F_IMPL_H
+#define INCLUDED_USRPTEST_MEASUREMENT_SINK_F_IMPL_H
+
+#include <usrptest/measurement_sink_f.h>
+
+namespace gr {
+ namespace usrptest {
+
+ class measurement_sink_f_impl : public measurement_sink_f
+ {
+ private:
+ std::vector< float > d_avg;
+ std::vector< float > d_stddev;
+ bool d_run;
+ int d_runs;
+ int d_nsamples;
+ int d_curr_sample;
+ int d_curr_run;
+ float d_curr_avg;
+ float d_curr_M2;
+ void inc_both(const float new_val);
+
+
+ public:
+ measurement_sink_f_impl(int num_samples, int runs);
+ ~measurement_sink_f_impl();
+ std::vector<float> get_avg() const;
+ std::vector<float> get_stddev() const;
+ int get_run() const;
+ void start_run();
+
+ // Where all the action really happens
+ int work(int noutput_items,
+ gr_vector_const_void_star &input_items,
+ gr_vector_void_star &output_items);
+ };
+
+ } // namespace usrptest
+} // namespace gr
+
+#endif /* INCLUDED_USRPTEST_MEASUREMENT_SINK_F_IMPL_H */
+
diff --git a/tools/gr-usrptest/lib/qa_usrptest.cc b/tools/gr-usrptest/lib/qa_usrptest.cc
new file mode 100644
index 000000000..c8e6495ae
--- /dev/null
+++ b/tools/gr-usrptest/lib/qa_usrptest.cc
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2012 Free Software Foundation, Inc.
+ *
+ * This file is part of GNU Radio
+ *
+ * GNU Radio is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 3, or (at your option)
+ * any later version.
+ *
+ * GNU Radio is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with GNU Radio; see the file COPYING. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/*
+ * This class gathers together all the test cases for the gr-filter
+ * directory into a single test suite. As you create new test cases,
+ * add them here.
+ */
+
+#include "qa_usrptest.h"
+
+CppUnit::TestSuite *
+qa_usrptest::suite()
+{
+ CppUnit::TestSuite *s = new CppUnit::TestSuite("usrptest");
+
+ return s;
+}
diff --git a/tools/gr-usrptest/lib/qa_usrptest.h b/tools/gr-usrptest/lib/qa_usrptest.h
new file mode 100644
index 000000000..94ce3f33b
--- /dev/null
+++ b/tools/gr-usrptest/lib/qa_usrptest.h
@@ -0,0 +1,38 @@
+/* -*- c++ -*- */
+/*
+ * Copyright 2012 Free Software Foundation, Inc.
+ *
+ * This file is part of GNU Radio
+ *
+ * GNU Radio is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 3, or (at your option)
+ * any later version.
+ *
+ * GNU Radio is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with GNU Radio; see the file COPYING. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef _QA_USRPTEST_H_
+#define _QA_USRPTEST_H_
+
+#include <gnuradio/attributes.h>
+#include <cppunit/TestSuite.h>
+
+//! collect all the tests for the gr-filter directory
+
+class __GR_ATTR_EXPORT qa_usrptest
+{
+ public:
+ //! return suite of tests for all of gr-filter directory
+ static CppUnit::TestSuite *suite();
+};
+
+#endif /* _QA_USRPTEST_H_ */
diff --git a/tools/gr-usrptest/lib/test_usrptest.cc b/tools/gr-usrptest/lib/test_usrptest.cc
new file mode 100644
index 000000000..808638cdf
--- /dev/null
+++ b/tools/gr-usrptest/lib/test_usrptest.cc
@@ -0,0 +1,48 @@
+/* -*- c++ -*- */
+/*
+ * Copyright 2012 Free Software Foundation, Inc.
+ *
+ * This file is part of GNU Radio
+ *
+ * GNU Radio is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 3, or (at your option)
+ * any later version.
+ *
+ * GNU Radio is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with GNU Radio; see the file COPYING. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <cppunit/TextTestRunner.h>
+#include <cppunit/XmlOutputter.h>
+
+#include <gnuradio/unittests.h>
+#include "qa_usrptest.h"
+#include <iostream>
+#include <fstream>
+
+int
+main (int argc, char **argv)
+{
+ CppUnit::TextTestRunner runner;
+ std::ofstream xmlfile(get_unittest_path("usrptest.xml").c_str());
+ CppUnit::XmlOutputter *xmlout = new CppUnit::XmlOutputter(&runner.result(), xmlfile);
+
+ runner.addTest(qa_usrptest::suite());
+ runner.setOutputter(xmlout);
+
+ bool was_successful = runner.run("", false);
+
+ return was_successful ? 0 : 1;
+}
diff --git a/tools/gr-usrptest/python/CMakeLists.txt b/tools/gr-usrptest/python/CMakeLists.txt
new file mode 100644
index 000000000..7bb812b0c
--- /dev/null
+++ b/tools/gr-usrptest/python/CMakeLists.txt
@@ -0,0 +1,49 @@
+# Copyright 2011 Free Software Foundation, Inc.
+#
+# This file is part of GNU Radio
+#
+# GNU Radio is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# GNU Radio is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Radio; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+
+########################################################################
+# Include python install macros
+########################################################################
+include(GrPython)
+if(NOT PYTHONINTERP_FOUND)
+ return()
+endif()
+
+########################################################################
+# Install python sources
+########################################################################
+GR_PYTHON_INSTALL(
+ FILES
+ __init__.py
+ functions.py
+ phase_calc_ccf.py DESTINATION ${GR_PYTHON_DIR}/usrptest
+)
+
+add_subdirectory(flowgraphs)
+add_subdirectory(labview_control)
+add_subdirectory(rts_tests)
+
+########################################################################
+# Handle the unit tests
+########################################################################
+include(GrTest)
+
+set(GR_TEST_TARGET_DEPS gnuradio-usrptest)
+set(GR_TEST_PYTHON_DIRS ${CMAKE_BINARY_DIR}/swig)
+GR_ADD_TEST(qa_measurement_sink_f ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/qa_measurement_sink_f.py)
diff --git a/tools/gr-usrptest/python/__init__.py b/tools/gr-usrptest/python/__init__.py
new file mode 100644
index 000000000..f4265974a
--- /dev/null
+++ b/tools/gr-usrptest/python/__init__.py
@@ -0,0 +1,35 @@
+#
+# Copyright 2008,2009 Free Software Foundation, Inc.
+#
+# This application is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# This application is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+
+# The presence of this file turns this directory into a Python package
+
+'''
+This is the GNU Radio USRPTEST module. Place your Python package
+description here (python/__init__.py).
+'''
+
+# import swig generated symbols into the usrptest namespace
+try:
+ # this might fail if the module is python-only
+ from usrptest_swig import *
+except ImportError:
+ pass
+
+# import any pure python here
+from phase_calc_ccf import phase_calc_ccf
+#
diff --git a/tools/gr-usrptest/python/build_utils.py b/tools/gr-usrptest/python/build_utils.py
new file mode 100644
index 000000000..cf58a9763
--- /dev/null
+++ b/tools/gr-usrptest/python/build_utils.py
@@ -0,0 +1,226 @@
+#
+# Copyright 2004,2009,2012 Free Software Foundation, Inc.
+#
+# This file is part of GNU Radio
+#
+# GNU Radio is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# GNU Radio is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Radio; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+#
+
+"""Misc utilities used at build time
+"""
+
+import re, os, os.path
+from build_utils_codes import *
+
+
+# set srcdir to the directory that contains Makefile.am
+try:
+ srcdir = os.environ['srcdir']
+except KeyError, e:
+ srcdir = "."
+srcdir = srcdir + '/'
+
+# set do_makefile to either true or false dependeing on the environment
+try:
+ if os.environ['do_makefile'] == '0':
+ do_makefile = False
+ else:
+ do_makefile = True
+except KeyError, e:
+ do_makefile = False
+
+# set do_sources to either true or false dependeing on the environment
+try:
+ if os.environ['do_sources'] == '0':
+ do_sources = False
+ else:
+ do_sources = True
+except KeyError, e:
+ do_sources = True
+
+name_dict = {}
+
+def log_output_name (name):
+ (base, ext) = os.path.splitext (name)
+ ext = ext[1:] # drop the leading '.'
+
+ entry = name_dict.setdefault (ext, [])
+ entry.append (name)
+
+def open_and_log_name (name, dir):
+ global do_sources
+ if do_sources:
+ f = open (name, dir)
+ else:
+ f = None
+ log_output_name (name)
+ return f
+
+def expand_template (d, template_filename, extra = ""):
+ '''Given a dictionary D and a TEMPLATE_FILENAME, expand template into output file
+ '''
+ global do_sources
+ output_extension = extract_extension (template_filename)
+ template = open_src (template_filename, 'r')
+ output_name = d['NAME'] + extra + '.' + output_extension
+ log_output_name (output_name)
+ if do_sources:
+ output = open (output_name, 'w')
+ do_substitution (d, template, output)
+ output.close ()
+ template.close ()
+
+def output_glue (dirname):
+ output_makefile_fragment ()
+ output_ifile_include (dirname)
+
+def output_makefile_fragment ():
+ global do_makefile
+ if not do_makefile:
+ return
+# overwrite the source, which must be writable; this should have been
+# checked for beforehand in the top-level Makefile.gen.gen .
+ f = open (os.path.join (os.environ.get('gendir', os.environ.get('srcdir', '.')), 'Makefile.gen'), 'w')
+ f.write ('#\n# This file is machine generated. All edits will be overwritten\n#\n')
+ output_subfrag (f, 'h')
+ output_subfrag (f, 'i')
+ output_subfrag (f, 'cc')
+ f.close ()
+
+def output_ifile_include (dirname):
+ global do_sources
+ if do_sources:
+ f = open ('%s_generated.i' % (dirname,), 'w')
+ f.write ('//\n// This file is machine generated. All edits will be overwritten\n//\n')
+ files = name_dict.setdefault ('i', [])
+ files.sort ()
+ f.write ('%{\n')
+ for file in files:
+ f.write ('#include <%s>\n' % (file[0:-1] + 'h',))
+ f.write ('%}\n\n')
+ for file in files:
+ f.write ('%%include <%s>\n' % (file,))
+
+def output_subfrag (f, ext):
+ files = name_dict.setdefault (ext, [])
+ files.sort ()
+ f.write ("GENERATED_%s =" % (ext.upper ()))
+ for file in files:
+ f.write (" \\\n\t%s" % (file,))
+ f.write ("\n\n")
+
+def extract_extension (template_name):
+ # template name is something like: GrFIRfilterXXX.h.t
+ # we return everything between the penultimate . and .t
+ mo = re.search (r'\.([a-z]+)\.t$', template_name)
+ if not mo:
+ raise ValueError, "Incorrectly formed template_name '%s'" % (template_name,)
+ return mo.group (1)
+
+def open_src (name, mode):
+ global srcdir
+ return open (os.path.join (srcdir, name), mode)
+
+def do_substitution (d, in_file, out_file):
+ def repl (match_obj):
+ key = match_obj.group (1)
+ # print key
+ return d[key]
+
+ inp = in_file.read ()
+ out = re.sub (r"@([a-zA-Z0-9_]+)@", repl, inp)
+ out_file.write (out)
+
+
+
+copyright = '''/* -*- c++ -*- */
+/*
+ * Copyright 2003,2004 Free Software Foundation, Inc.
+ *
+ * This file is part of GNU Radio
+ *
+ * GNU Radio is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 3, or (at your option)
+ * any later version.
+ *
+ * GNU Radio is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with GNU Radio; see the file COPYING. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street,
+ * Boston, MA 02110-1301, USA.
+ */
+'''
+
+def is_complex (code3):
+ if i_code (code3) == 'c' or o_code (code3) == 'c':
+ return '1'
+ else:
+ return '0'
+
+
+def standard_dict (name, code3, package='gr'):
+ d = {}
+ d['NAME'] = name
+ d['NAME_IMPL'] = name+'_impl'
+ d['GUARD_NAME'] = 'INCLUDED_%s_%s_H' % (package.upper(), name.upper())
+ d['GUARD_NAME_IMPL'] = 'INCLUDED_%s_%s_IMPL_H' % (package.upper(), name.upper())
+ d['BASE_NAME'] = re.sub ('^' + package + '_', '', name)
+ d['SPTR_NAME'] = '%s_sptr' % name
+ d['WARNING'] = 'WARNING: this file is machine generated. Edits will be overwritten'
+ d['COPYRIGHT'] = copyright
+ d['TYPE'] = i_type (code3)
+ d['I_TYPE'] = i_type (code3)
+ d['O_TYPE'] = o_type (code3)
+ d['TAP_TYPE'] = tap_type (code3)
+ d['IS_COMPLEX'] = is_complex (code3)
+ return d
+
+
+def standard_dict2 (name, code3, package):
+ d = {}
+ d['NAME'] = name
+ d['BASE_NAME'] = name
+ d['GUARD_NAME'] = 'INCLUDED_%s_%s_H' % (package.upper(), name.upper())
+ d['WARNING'] = 'WARNING: this file is machine generated. Edits will be overwritten'
+ d['COPYRIGHT'] = copyright
+ d['TYPE'] = i_type (code3)
+ d['I_TYPE'] = i_type (code3)
+ d['O_TYPE'] = o_type (code3)
+ d['TAP_TYPE'] = tap_type (code3)
+ d['IS_COMPLEX'] = is_complex (code3)
+ return d
+
+def standard_impl_dict2 (name, code3, package):
+ d = {}
+ d['NAME'] = name
+ d['IMPL_NAME'] = name
+ d['BASE_NAME'] = name.rstrip("impl").rstrip("_")
+ d['GUARD_NAME'] = 'INCLUDED_%s_%s_H' % (package.upper(), name.upper())
+ d['WARNING'] = 'WARNING: this file is machine generated. Edits will be overwritten'
+ d['COPYRIGHT'] = copyright
+ d['FIR_TYPE'] = "fir_filter_" + code3
+ d['CFIR_TYPE'] = "fir_filter_" + code3[0:2] + 'c'
+ d['TYPE'] = i_type (code3)
+ d['I_TYPE'] = i_type (code3)
+ d['O_TYPE'] = o_type (code3)
+ d['TAP_TYPE'] = tap_type (code3)
+ d['IS_COMPLEX'] = is_complex (code3)
+ return d
diff --git a/tools/gr-usrptest/python/build_utils_codes.py b/tools/gr-usrptest/python/build_utils_codes.py
new file mode 100644
index 000000000..9ea96baae
--- /dev/null
+++ b/tools/gr-usrptest/python/build_utils_codes.py
@@ -0,0 +1,52 @@
+#
+# Copyright 2004 Free Software Foundation, Inc.
+#
+# This file is part of GNU Radio
+#
+# GNU Radio is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# GNU Radio is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Radio; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+#
+
+def i_code (code3):
+ return code3[0]
+
+def o_code (code3):
+ if len (code3) >= 2:
+ return code3[1]
+ else:
+ return code3[0]
+
+def tap_code (code3):
+ if len (code3) >= 3:
+ return code3[2]
+ else:
+ return code3[0]
+
+def i_type (code3):
+ return char_to_type[i_code (code3)]
+
+def o_type (code3):
+ return char_to_type[o_code (code3)]
+
+def tap_type (code3):
+ return char_to_type[tap_code (code3)]
+
+
+char_to_type = {}
+char_to_type['s'] = 'short'
+char_to_type['i'] = 'int'
+char_to_type['f'] = 'float'
+char_to_type['c'] = 'gr_complex'
+char_to_type['b'] = 'unsigned char'
diff --git a/tools/gr-usrptest/python/flowgraphs/CMakeLists.txt b/tools/gr-usrptest/python/flowgraphs/CMakeLists.txt
new file mode 100644
index 000000000..7ea94b505
--- /dev/null
+++ b/tools/gr-usrptest/python/flowgraphs/CMakeLists.txt
@@ -0,0 +1,28 @@
+# Copyright 2011 Free Software Foundation, Inc.
+#
+# This file is part of GNU Radio
+#
+# GNU Radio is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# GNU Radio is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Radio; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+
+########################################################################
+# Install python sources
+########################################################################
+GR_PYTHON_INSTALL(
+ FILES
+ __init__.py
+ selftest_fg.py
+ phasealignment_fg.py DESTINATION ${GR_PYTHON_DIR}/usrptest/flowgraphs
+)
diff --git a/tools/gr-usrptest/python/flowgraphs/__init__.py b/tools/gr-usrptest/python/flowgraphs/__init__.py
new file mode 100644
index 000000000..048e5638a
--- /dev/null
+++ b/tools/gr-usrptest/python/flowgraphs/__init__.py
@@ -0,0 +1,14 @@
+"""
+usrptest.flowgraphs
+======================================
+
+Contents
+--------
+
+Subpackages
+-----------
+::
+
+The existance of the file turns the folder into a Python module.
+
+"""
diff --git a/tools/gr-usrptest/python/flowgraphs/phasealignment_fg.py b/tools/gr-usrptest/python/flowgraphs/phasealignment_fg.py
new file mode 100644
index 000000000..f120e2421
--- /dev/null
+++ b/tools/gr-usrptest/python/flowgraphs/phasealignment_fg.py
@@ -0,0 +1,119 @@
+#!/usr/bin/env python2
+
+from gnuradio import gr
+from gnuradio import uhd
+from gnuradio import analog
+from usrptest import phase_calc_ccf, measurement_sink_f
+from usrptest.functions import log_level
+from random import uniform
+from ast import literal_eval
+import copy
+import logging
+import sys
+
+
+class phasealignment_fg(gr.top_block):
+ def __init__(self, uhd_app):
+ gr.top_block.__init__(self, "Calculate dphi for all USRPs (Rx only)")
+ ##############################
+ # Block dicts
+ ##############################
+ self.log = logging.getLogger(__name__)
+ [self.log.removeHandler(h) for h in self.log.handlers]
+ self.log.addHandler(logging.StreamHandler(sys.stdout))
+ self.log.setLevel(log_level(uhd_app.args.log_level))
+ self.rx_streams = list()
+ self.phase_diff_calc = list()
+ self.measurement_sink = list()
+ self.uhd_app = copy.copy(uhd_app)
+ self.tx_app = copy.copy(uhd_app)
+ self.samp_rate = uhd_app.args.samp_rate
+ # Create all devices specified in --receiver
+ # Create all remaining blocks and connect devices to the first port and
+ # sink
+ self.uhd_app.args.num_chan = len(self.uhd_app.args.channels)
+ self.log.info('setting up usrp....')
+ ##############################
+ # Setup RX
+ ##############################
+ self.log.debug("RX-Setup with args: {}".format(self.uhd_app.args))
+ self.uhd_app.setup_usrp(uhd.usrp_source, self.uhd_app.args)
+ if self.uhd_app.args.measurement_setup is not None:
+ self.measurement_channels = self.uhd_app.args.measurement_setup.strip(
+ ).split(',')
+ # make sure every channels is listed in measurement_channels at least once
+ if len(set(
+ self.measurement_channels)) != self.uhd_apps.args.num_chan:
+ self.uhd_app.vprint(
+ "[{prefix}] Number of measurement channels has to be the number of used channels."
+ )
+ self.uhd_app.exit(1)
+ self.measurement_channels = [self.uhd_app.args.channels.index(m) for m in self.measurement_channels]
+ else:
+ self.measurement_channels = range(self.uhd_app.args.num_chan)
+
+ self.measurement_channels_names = list()
+ for chan in self.measurement_channels:
+ usrp_info = self.uhd_app.usrp.get_usrp_info(chan)
+ self.measurement_channels_names.append("_".join(
+ [usrp_info['mboard_serial'], usrp_info['rx_serial']]))
+
+ #Connect channels to first port of d_phi_calc_block and to measurement_sink
+ for num, chan in enumerate(self.measurement_channels[:-1]):
+ self.phase_diff_calc.append(phase_calc_ccf())
+ self.measurement_sink.append(
+ measurement_sink_f(
+ int(self.uhd_app.args.samp_rate *
+ self.uhd_app.args.duration), self.uhd_app.args.runs))
+ self.connect((self.uhd_app.usrp, chan),
+ (self.phase_diff_calc[num], 0))
+ self.connect((self.phase_diff_calc[num], 0),
+ (self.measurement_sink[num], 0))
+ # Connect devices to second port of d_phi_block
+ for num, chan in enumerate(self.measurement_channels[1:]):
+ self.connect((self.uhd_app.usrp, chan),
+ (self.phase_diff_calc[num], 1))
+ ##############################
+ # Setup TX
+ ##############################
+ if self.uhd_app.args.tx_channels is not None:
+ self.tx_app.args.antenna = self.tx_app.args.tx_antenna
+ self.tx_app.args.channels = [
+ int(chan.strip())
+ for chan in self.tx_app.args.tx_channels.split(',')
+ ]
+ self.tx_app.usrp = None
+ self.log.debug("TX-Setup with args: {}".format(self.tx_app.args))
+ self.tx_app.setup_usrp(uhd.usrp_sink, self.tx_app.args)
+ self.siggen = analog.sig_source_c(self.samp_rate,
+ analog.GR_COS_WAVE,
+ self.tx_app.args.tx_offset, 1.0)
+ for chan in range(len(self.tx_app.channels)):
+ self.connect((self.siggen, 0), (self.tx_app.usrp, chan))
+
+
+ def get_samp_rate(self):
+ return self.samp_rate
+
+ def set_samp_rate(self, samp_rate):
+ self.samp_rate = samp_rate
+
+ def retune_frequency(self, band_num=1, bands=1):
+ ref_chan = self.uhd_app.channels[0]
+ freq_range = literal_eval(
+ self.uhd_app.usrp.get_freq_range(ref_chan).__str__(
+ )) # returns tuple with (start_freq, end_freq, step)
+
+ if bands > 1:
+ bw = (freq_range[1] - freq_range[0])/bands
+ freq_range = list(freq_range)
+ freq_range[0] = freq_range[0] + ((band_num-1) % bands)*bw
+ freq_range[1] = freq_range[0] + bw
+
+ retune_freq = uniform(freq_range[0], freq_range[1])
+ self.log.info('tune all channels to: {:f} MHz'.format(retune_freq /
+ 1e6))
+ self.uhd_app.set_freq(retune_freq)
+ self.log.info('tune all channels to: {:f} MHz'.format(
+ self.uhd_app.args.freq / 1e6))
+ self.uhd_app.set_freq(self.uhd_app.args.freq)
diff --git a/tools/gr-usrptest/python/flowgraphs/selftest_fg.py b/tools/gr-usrptest/python/flowgraphs/selftest_fg.py
new file mode 100644
index 000000000..cdfc35e74
--- /dev/null
+++ b/tools/gr-usrptest/python/flowgraphs/selftest_fg.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python
+
+from gnuradio import blocks
+from gnuradio import gr
+from gnuradio import uhd
+from usrptest import phase_calc_ccf
+from gnuradio.uhd.uhd_app import UHDApp
+import numpy as np
+
+class selftest_fg(gr.top_block):
+
+ def __init__(self, freq, samp_rate, dphase, devices=list()):
+ gr.top_block.__init__(self, "Generate Signal extract phase")
+
+ ##################################################
+ # Variables
+ ##################################################
+ self.samp_rate = samp_rate
+ self.freq = 10e3
+ self.devices = devices
+ self.dphase = dphase
+ self.tx_gain = 50
+ self.rx_gain = 50
+ self.center_freq = freq
+ self.omega = 2*np.pi*self.freq
+ self.steps = np.arange(
+ self.samp_rate)*float(self.omega)/float(self.samp_rate)
+ self.reference = self.complex_sine(self.steps)
+ self.test_signal = self.complex_sine(self.steps+0.5*np.pi)
+ self.device_test = False
+
+ ##################################################
+ # Block dicts
+ ##################################################
+ self.rx_devices = dict()
+ self.tx_devices = dict()
+ self.sink_dict = dict()
+ self.phase_dict = dict()
+ self.reference_source = blocks.vector_source_c(self.reference)
+
+ if len(self.devices):
+ ##################################################
+ # Devices
+ ##################################################
+ self.device_test = True
+ #To reuse existing setup_usrp() command
+ for device in self.devices:
+ # Create and configure all devices
+ self.rx_devices[device] = uhd.usrp_source(
+ device, uhd.stream_args(
+ cpu_format="fc32", channel=range(1)))
+ self.tx_devices[device] = uhd.usrp_sink(
+ device, uhd.stream_args(
+ cpu_format="fc32", channel=range(1)))
+ self.rx_devices[device].set_samp_rate(self.samp_rate)
+ self.rx_devices[device].set_center_freq(self.center_freq, 0)
+ self.rx_devices[device].set_gain(self.rx_gain, 0)
+ self.tx_devices[device].set_samp_rate(self.samp_rate)
+ self.tx_devices[device].set_center_freq(self.center_freq, 0)
+ self.tx_devices[device].set_gain(self.tx_gain, 0)
+ self.sink_dict[device] = blocks.vector_sink_f()
+ self.phase_dict[device] = phase_calc_ccf(
+ self.samp_rate, self.freq)
+ for device in self.tx_devices.values():
+ self.connect((self.reference_source, 0), (device, 0))
+
+ for device_key in self.rx_devices.keys():
+ self.connect(
+ (self.rx_devices[device_key], 0), (self.phase_dict[device_key], 0))
+ self.connect((self.reference_source, 0),
+ (self.phase_dict[device_key], 1))
+ self.connect(
+ (self.phase_dict[device_key], 0), (self.sink_dict[device_key], 0))
+ # Debug options
+ # self.sink_list.append(blocks.vector_sink_c())
+ #self.connect((device, 0), (self.sink_list[-1], 0))
+ # self.sink_list.append(blocks.vector_sink_c())
+ #self.connect((self.reference_source, 0), (self.sink_list[-1], 0))
+ else:
+ ##################################################
+ # Blocks
+ ##################################################
+ self.result = blocks.vector_sink_f(1)
+ self.test_source = blocks.vector_source_c(self.test_signal)
+ self.block_phase_calc = phase_calc_ccf(
+ self.samp_rate, self.freq)
+
+ ##################################################
+ # Connections
+ ##################################################
+ self.connect((self.reference_source, 0), (self.block_phase_calc, 1))
+ self.connect((self.test_source, 0), (self.block_phase_calc, 0))
+ self.connect((self.block_phase_calc, 0), (self.result, 0))
+ def complex_sine(self, steps):
+ return np.exp(1j*steps)
+
+ def get_samp_rate(self):
+ return self.samp_rate
+
+ def set_samp_rate(self, samp_rate):
+ self.samp_rate = samp_rate
+
+ def run(self):
+ self.start()
+ self.wait()
+ if self.device_test:
+ data = dict()
+ for device_key in self.sink_dict.keys():
+ curr_data = self.sink_dict[device_key].data()
+ curr_data = curr_data[int(0.2*self.samp_rate):-int(0.2*self.samp_rate)]
+ phase_avg = np.average(curr_data)
+ if (np.max(curr_data) < phase_avg+self.dphase*0.5) and (np.min(curr_data) > phase_avg-self.dphase*0.5):
+ data[device_key] = phase_avg
+ else:
+ print("Error phase not settled")
+
+ #Debug
+ # plt.ylim(-1, 1)
+ # plt.xlim(self.samp_rate/2.), (self.samp_rate/2.)+1000)
+ #for key in data:
+ # plt.plot(data[key])
+ return data
diff --git a/tools/gr-usrptest/python/functions.py b/tools/gr-usrptest/python/functions.py
new file mode 100644
index 000000000..2ce2ee451
--- /dev/null
+++ b/tools/gr-usrptest/python/functions.py
@@ -0,0 +1,157 @@
+#!/usr/bin/env python2
+import numpy as np
+import time
+import copy
+import logging
+
+
+def setup_phase_alignment_parser(parser):
+ test_group = parser.add_argument_group(
+ 'Phase alignment specific arguments')
+ test_group.add_argument(
+ '--runs',
+ default=10,
+ type=int,
+ help='Number of times to retune and measure d_phi')
+ test_group.add_argument(
+ '--duration',
+ default=5.0,
+ type=float,
+ help='Duration of a measurement run')
+ test_group.add_argument(
+ '--measurement-setup',
+ type=str,
+ help='Comma-seperated list of channel ids. Phase difference will be calculated between consecutive channels. default=(0,1,2,..,M-1) M: num_chan'
+ )
+ test_group.add_argument(
+ '--log-level',
+ type=str,
+ choices=["critical", "error", "warning", "info", "debug"],
+ default="info")
+ test_group.add_argument(
+ '--freq-bands',
+ type=int,
+ help="Number of frequency bands in daughterboard range to randomly retune to",
+ default=1)
+ return parser
+
+
+def setup_tx_phase_alignment_parser(parser):
+ tx_group = parser.add_argument_group(
+ 'TX Phase alignment specific arguments.')
+ tx_group.add_argument(
+ '--tx-channels', type=str, help='which channels to use')
+ tx_group.add_argument(
+ '--tx-antenna',
+ type=str,
+ help='comma-separated list of channel antennas for tx')
+ tx_group.add_argument(
+ '--tx-offset',
+ type=float,
+ help='frequency offset in Hz which should be added to center frequency for transmission'
+ )
+ return parser
+
+
+def setup_rts_phase_alignment_parser(parser):
+ rts_group = parser.add_argument_group(
+ 'RTS Phase alignment specific arguments')
+ rts_group.add_argument(
+ '-pd',
+ '--phasedev',
+ type=float,
+ default=1.0,
+ help='maximum phase standard deviation of dphi in a run which is considered settled (in deg)'
+ )
+ rts_group.add_argument(
+ '-dp',
+ '--dphi',
+ type=float,
+ default=2.0,
+ help='maximum allowed d_phase deviation between runs (in deg)')
+ rts_group.add_argument(
+ '--freqlist',
+ type=str,
+ help='comma-separated list of frequencies to test')
+ rts_group.add_argument(
+ '--lv-host',
+ type=str,
+ help='specify this argument if running tests with vst/switch')
+ rts_group.add_argument('--lv-vst-name', type=str, help='vst device name')
+ rts_group.add_argument(
+ '--lv-switch-name', type=str, help='executive switch name')
+ rts_group.add_argument(
+ '--lv-basepath',
+ type=str,
+ help='basepath for LabVIEW VIs on Windows')
+ rts_group.add_argument(
+ '--tx-offset',
+ type=float,
+ help='transmitter frequency offset in VST')
+ rts_group.add_argument(
+ '--lv-switch-ports', type=str, help='comma-separated switch-port pair')
+ return parser
+
+def setup_manual_phase_alignment_parser(parser):
+ manual_group = parser.add_argument_group(
+ 'Manual Phase alignment specific arguments')
+ manual_group.add_argument(
+ '--plot',
+ dest='plot',
+ action='store_true',
+ help='Set this argument to enable plotting results with matplotlib'
+ )
+ manual_group.add_argument(
+ '--auto',
+ action='store_true',
+ help='Set this argument to enable automatic selection of test frequencies'
+ )
+ manual_group.add_argument(
+ '--start-freq',
+ type=float,
+ default=0.0,
+ help='Start frequency for automatic selection'
+ ),
+ manual_group.add_argument(
+ '--stop-freq',
+ type=float,
+ default=0.0,
+ help='Stop frequency for automatic selection')
+
+ parser.set_defaults(plot=False,auto=False)
+ return parser
+
+
+def process_measurement_sinks(top_block):
+ data = list()
+ curr_data = dict()
+ for num, chan in enumerate(top_block.measurement_channels[:-1]):
+ curr_data['avg'] = list(top_block.measurement_sink[num].get_avg())
+ curr_data['stddev'] = list(top_block.measurement_sink[num].get_stddev(
+ ))
+ curr_data['first'] = top_block.measurement_channels_names[num]
+ curr_data['second'] = top_block.measurement_channels_names[num + 1]
+ data.append(copy.copy(curr_data))
+ return data
+
+
+def run_test(top_block, ntimes):
+ results = dict()
+ num_sinks = len(top_block.measurement_sink)
+ for i in xrange(ntimes):
+ #tune frequency to random position and back to specified frequency
+ top_block.retune_frequency(bands=top_block.uhd_app.args.freq_bands,band_num=i+1)
+ time.sleep(2)
+ #trigger start in all measurement_sinks
+ for sink in top_block.measurement_sink:
+ sink.start_run()
+ #wait until every measurement_sink is ready with the current run
+ while (sum([ms.get_run() for ms in top_block.measurement_sink]) < (
+ (i + 1) * num_sinks)):
+ time.sleep(1)
+ results = process_measurement_sinks(top_block)
+ return results
+
+
+def log_level(string):
+ return getattr(logging, string.upper())
diff --git a/tools/gr-usrptest/python/labview_control/CMakeLists.txt b/tools/gr-usrptest/python/labview_control/CMakeLists.txt
new file mode 100644
index 000000000..924df5479
--- /dev/null
+++ b/tools/gr-usrptest/python/labview_control/CMakeLists.txt
@@ -0,0 +1,28 @@
+# Copyright 2011 Free Software Foundation, Inc.
+#
+# This file is part of GNU Radio
+#
+# GNU Radio is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# GNU Radio is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Radio; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+
+########################################################################
+# Install python sources
+########################################################################
+GR_PYTHON_INSTALL(
+ FILES
+ __init__.py
+ lv_control.py DESTINATION ${GR_PYTHON_DIR}/usrptest/labview_control
+)
+
diff --git a/tools/gr-usrptest/python/labview_control/__init__.py b/tools/gr-usrptest/python/labview_control/__init__.py
new file mode 100644
index 000000000..28d2fe03b
--- /dev/null
+++ b/tools/gr-usrptest/python/labview_control/__init__.py
@@ -0,0 +1,14 @@
+"""
+usrptest.labview_control
+======================================
+
+Contents
+--------
+
+Subpackages
+-----------
+::
+
+The existance of the file turns the folder into a Python module.
+
+"""
diff --git a/tools/gr-usrptest/python/labview_control/lv_control.py b/tools/gr-usrptest/python/labview_control/lv_control.py
new file mode 100644
index 000000000..27407a07c
--- /dev/null
+++ b/tools/gr-usrptest/python/labview_control/lv_control.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+from labview_automation.client import LabVIEWClient
+
+class vst_siggen:
+ def __init__(self,host,vi_base_path,rio_device):
+ self._host = host
+ self._path = vi_base_path
+ self._rio = rio_device
+ self._caller = lv_caller(host,2552,vi_base_path)
+
+ def __del__(self):
+ self.disconnect()
+
+ def set_freq(self, freq):
+ self._caller.vst_set_freq(self._rio,freq)
+
+ def disconnect(self):
+ try:
+ self._caller.vst_disconnect(self._rio)
+ except:
+ pass
+
+
+class executive_switch:
+ def __init__(self,host,vi_base_path,device_name):
+ self._host = host
+ self._path = vi_base_path
+ self._device = device_name
+ self._caller = lv_caller(host,2552,vi_base_path)
+
+ def __del__(self):
+ self.disconnect_all()
+
+ def connect_ports(self, port0, port1):
+ self._caller.switch_connect_ports(self._device,port0,port1)
+
+ def disconnect_all(self):
+ try:
+ self._caller.switch_disconnect_all(self._device)
+ except:
+ pass
+
+
+
+class lv_caller:
+ def __init__(self, host, port, vi_base_path):
+ self._host = host
+ self._port = port
+ self._client = LabVIEWClient(host, port)
+ self._path = vi_base_path
+
+ def vst_disconnect(self, rio_device):
+ with self._client as c:
+ control_values = {
+ "rio_device": rio_device,
+ }
+ result = c.run_vi_synchronous("".join([self._path,"vst_disconnect.vi"]),control_values)
+ return result
+
+ def vst_set_freq(self, rio_device, freq):
+ with self._client as c:
+ control_values = {
+ "rio_device": rio_device,
+ "cw_freq": freq,
+ "power": 0,
+ }
+ result = c.run_vi_synchronous("".join([self._path,"vst_set_freq.vi"]),control_values)
+ return result
+
+ def switch_connect_ports(self, switch_device, port0, port1):
+ with self._client as c:
+ control_values = {
+ "virtual_switch": switch_device,
+ "chan0": port0,
+ "chan1": port1,
+ }
+ result = c.run_vi_synchronous("".join([self._path,"switch_connect_ports.vi"]),control_values)
+ return result
+
+
+ def switch_disconnect_all(self, switch_device):
+ with self._client as c:
+ control_values = {
+ "virtual_switch": switch_device,
+ }
+ result = c.run_vi_synchronous("".join([self._path,"switch_disconnect.vi"]),control_values)
+ return result
diff --git a/tools/gr-usrptest/python/phase_calc_ccf.py b/tools/gr-usrptest/python/phase_calc_ccf.py
new file mode 100644
index 000000000..fe3cf55a8
--- /dev/null
+++ b/tools/gr-usrptest/python/phase_calc_ccf.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# Copyright 2016 Ettus Research LLC.
+#
+# This is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# This software is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this software; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+#
+
+from gnuradio import gr
+from gnuradio import blocks
+import numpy as np
+
+
+class phase_calc_ccf(gr.hier_block2):
+ """
+ docstring for block phase_calc_ccf
+ """
+
+ def __init__(self):
+ gr.hier_block2.__init__(
+ self,
+ "phase_calc_ccf",
+ gr.io_signature(2, 2, gr.sizeof_gr_complex), # Input signature
+ gr.io_signature(1, 1, gr.sizeof_float)) # Output signature
+ self.block = dict()
+ self.block['mult_conj'] = blocks.multiply_conjugate_cc()
+ self.block['arg'] = blocks.complex_to_arg()
+ self.block['mult_const'] = blocks.multiply_const_ff(180.0 / np.pi)
+
+ self.connect((self, 0), (self.block['mult_conj'], 0))
+ self.connect((self, 1), (self.block['mult_conj'], 1))
+ self.connect((self.block['mult_conj'], 0), (self.block['arg'], 0))
+ self.connect((self.block['arg'], 0), (self.block['mult_const'], 0))
+ self.connect((self.block['mult_const'], 0), (self, 0))
diff --git a/tools/gr-usrptest/python/qa_measurement_sink_f.py b/tools/gr-usrptest/python/qa_measurement_sink_f.py
new file mode 100755
index 000000000..ceb9913ae
--- /dev/null
+++ b/tools/gr-usrptest/python/qa_measurement_sink_f.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# Copyright 2016 Ettus Research LLC.
+#
+# This is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# This software is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this software; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+#
+
+from gnuradio import gr, gr_unittest
+from gnuradio import blocks
+import usrptest_swig as usrptest
+
+class qa_measurement_sink_f (gr_unittest.TestCase):
+
+ def setUp (self):
+ self.tb = gr.top_block ()
+
+ def tearDown (self):
+ self.tb = None
+
+ def test_001_t (self):
+ # set up fg
+ self.tb.run ()
+ # check data
+
+
+if __name__ == '__main__':
+ gr_unittest.run(qa_measurement_sink_f, "qa_measurement_sink_f.xml")
diff --git a/tools/gr-usrptest/python/rts_tests/CMakeLists.txt b/tools/gr-usrptest/python/rts_tests/CMakeLists.txt
new file mode 100644
index 000000000..03111a9bb
--- /dev/null
+++ b/tools/gr-usrptest/python/rts_tests/CMakeLists.txt
@@ -0,0 +1,33 @@
+# Copyright 2011 Free Software Foundation, Inc.
+#
+# This file is part of GNU Radio
+#
+# GNU Radio is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# GNU Radio is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Radio; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+
+########################################################################
+# Install python sources
+########################################################################
+GR_PYTHON_INSTALL(
+ FILES
+ __init__.py
+ test_phasealignment.py DESTINATION ${GR_PYTHON_DIR}/usrptest/rts_tests
+)
+
+GR_PYTHON_INSTALL(
+ PROGRAMS
+ test_phasealignment.py
+ DESTINATION bin
+)
diff --git a/tools/gr-usrptest/python/rts_tests/__init__.py b/tools/gr-usrptest/python/rts_tests/__init__.py
new file mode 100644
index 000000000..048e5638a
--- /dev/null
+++ b/tools/gr-usrptest/python/rts_tests/__init__.py
@@ -0,0 +1,14 @@
+"""
+usrptest.flowgraphs
+======================================
+
+Contents
+--------
+
+Subpackages
+-----------
+::
+
+The existance of the file turns the folder into a Python module.
+
+"""
diff --git a/tools/gr-usrptest/python/rts_tests/test_phasealignment.py b/tools/gr-usrptest/python/rts_tests/test_phasealignment.py
new file mode 100755
index 000000000..12856bbdb
--- /dev/null
+++ b/tools/gr-usrptest/python/rts_tests/test_phasealignment.py
@@ -0,0 +1,174 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# Copyright 2016 Ettus Research LLC.
+#
+# This is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# This software is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this software; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+#
+import unittest
+from tinydb import TinyDB, Query
+from usrptest.flowgraphs import phasealignment_fg
+from usrptest.functions import setup_phase_alignment_parser, setup_tx_phase_alignment_parser, setup_rts_phase_alignment_parser, run_test, log_level
+from usrptest.labview_control import lv_control
+from gnuradio.uhd.uhd_app import UHDApp
+import logging
+import sys
+import numpy as np
+import argparse
+import time
+import copy
+
+
+class gr_usrp_test(unittest.TestCase):
+ def __init__(self, methodName='runTest', args=None):
+ super(gr_usrp_test, self).__init__(methodName)
+ self.args = args
+
+
+class qa_phasealignment(gr_usrp_test):
+ def setUp(self):
+ time.sleep(15) #Wait for devices to settle, just in case
+ if self.args.lv_host is not None:
+ self.siggen = lv_control.vst_siggen(self.args.lv_host,
+ self.args.lv_basepath,
+ self.args.lv_vst_name)
+ self.switch = lv_control.executive_switch(self.args.lv_host,
+ self.args.lv_basepath,
+ self.args.lv_switch_name)
+ self.siggen.set_freq(self.args.freq + self.args.tx_offset)
+ self.switch.connect_ports(
+ *self.args.lv_switch_ports.strip().split(','))
+
+ self.uhd_app = UHDApp(args=self.args)
+ self.log = logging.getLogger("test_phasealignment")
+ self.tb = phasealignment_fg.phasealignment_fg(self.uhd_app)
+ self.db = TinyDB('phase_db.json')
+
+ def tearDown(self):
+ self.uhd_app = None
+ self.tb = None
+ if args.lv_host is not None:
+ self.siggen.disconnect()
+ self.switch.disconnect_all()
+
+ def test_001(self):
+ self.tb.start()
+ time.sleep(2)
+ results = run_test(
+ self.tb,
+ self.args.runs) # dict key:dev, value: dict{dphase:[],stddev:[]}
+ time.sleep(1)
+ #self.tb.stop()
+ #self.tb.wait()
+ self.time_stamp = time.strftime('%Y%m%d%H%M')
+ self.passed = True
+ for result in results:
+ fdev = result['first']
+ sdev = result['second']
+ self.log.info('Comparing values for phase difference between {} and {}'.
+ format(fdev, sdev))
+ dphase_list = result['avg']
+ dev_list = result['stddev']
+ dphase = np.average(dphase_list)
+ dev = np.average(dev_list)
+ ref_meas = get_reference_meas(self.db, fdev, sdev, self.args.freq)
+ for dphase_i in dphase_list:
+ passed = True
+ if abs(dphase_i - dphase) > self.args.dphi and passed:
+ self.log.info(
+ '\t dPhase of a measurement_run differs from average dhpase. dphase_run: {}, dphase_avg: {}'.
+ format(dphase_i, dphase))
+ passed = False
+ if dev > self.args.phasedev:
+ self.log.info('\t dPhase deviates during measurement. stddev: {}'.
+ format(dev))
+ passed = False
+ if ref_meas:
+ if abs(ref_meas['dphase'] - dphase) > self.args.dphi:
+ self.log.info(
+ '\t dPhase differs from reference measurement. Now: {}, reference: {}'.
+ format(dphase, ref_meas['dphase']))
+ if not passed:
+ self.passed = False
+ else:
+ self.db.insert({
+ 'dev1': fdev,
+ 'dev2': sdev,
+ 'timestamp': self.time_stamp,
+ 'dphase': dphase,
+ 'dphase_dev': dev,
+ 'freq': self.args.freq
+ })
+ self.tb.stop()
+ self.tb.wait()
+ self.assertTrue(self.passed)
+
+
+def get_previous_meas(db, dev1, dev2, freq):
+ meas = Query()
+ results = db.search((meas.dev1 == dev1) & (meas.dev2 == dev2) & (meas.freq
+ == freq))
+ prev_result = dict()
+ if results:
+ prev_result = results[0]
+ for result in results:
+ if result['timestamp'] > prev_result['timestamp']:
+ prev_result = result
+ return prev_result
+
+
+def get_reference_meas(db, dev1, dev2, freq):
+ meas = Query()
+ results = db.search((meas.dev1 == dev1) & (meas.dev2 == dev2) & (meas.freq
+ == freq))
+ ref_result = dict()
+ if results:
+ ref_result = results[0]
+ for result in results:
+ if result['timestamp'] < ref_result['timestamp']:
+ ref_result = result
+ return ref_result
+
+
+if __name__ == '__main__':
+ # parse all arguments
+ parser = argparse.ArgumentParser(conflict_handler='resolve')
+ parser = setup_phase_alignment_parser(parser)
+ parser = setup_tx_phase_alignment_parser(parser)
+ parser = setup_rts_phase_alignment_parser(parser)
+ logging.basicConfig(stream=sys.stdout)
+ UHDApp.setup_argparser(parser=parser)
+ args = parser.parse_args()
+ logging.getLogger("test_phasealignment").setLevel(log_level(args.log_level))
+
+ freqlist = args.freqlist.strip().split(',')
+
+ def make_suite(testcase_class, freqlist):
+ testloader = unittest.TestLoader()
+ testnames = testloader.getTestCaseNames(testcase_class)
+ suite = unittest.TestSuite()
+ for name in testnames:
+ for freq in freqlist:
+ test_args = copy.deepcopy(args)
+ test_args.freq = float(freq)
+ suite.addTest(testcase_class(name, args=test_args))
+ return suite
+
+ # Add tests.
+ alltests = unittest.TestSuite()
+ alltests.addTest(make_suite(qa_phasealignment, freqlist))
+ result = unittest.TextTestRunner(verbosity=2).run(alltests) # Run tests.
+ sys.exit(not result.wasSuccessful())
diff --git a/tools/gr-usrptest/python/setup.py b/tools/gr-usrptest/python/setup.py
new file mode 100644
index 000000000..6969f413d
--- /dev/null
+++ b/tools/gr-usrptest/python/setup.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
+
+
+setup(name='urptest_automation',
+ version='0.0.1',
+ description='usrptest integration into RTS and Labview',
+ packages=['usrptest_automation'],
+ install_requires=['labview-automation>=15.0.0.dev1','hoplite>=15.0.0.dev1']
+ )
diff --git a/tools/gr-usrptest/swig/CMakeLists.txt b/tools/gr-usrptest/swig/CMakeLists.txt
new file mode 100644
index 000000000..ae42be628
--- /dev/null
+++ b/tools/gr-usrptest/swig/CMakeLists.txt
@@ -0,0 +1,65 @@
+# Copyright 2011 Free Software Foundation, Inc.
+#
+# This file is part of GNU Radio
+#
+# GNU Radio is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# GNU Radio is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Radio; see the file COPYING. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street,
+# Boston, MA 02110-1301, USA.
+
+########################################################################
+# Check if there is C++ code at all
+########################################################################
+if(NOT usrptest_sources)
+ MESSAGE(STATUS "No C++ sources... skipping swig/")
+ return()
+endif(NOT usrptest_sources)
+
+########################################################################
+# Include swig generation macros
+########################################################################
+find_package(SWIG)
+find_package(PythonLibs 2)
+if(NOT SWIG_FOUND OR NOT PYTHONLIBS_FOUND)
+ return()
+endif()
+include(GrSwig)
+include(GrPython)
+
+########################################################################
+# Setup swig generation
+########################################################################
+foreach(incdir ${GNURADIO_RUNTIME_INCLUDE_DIRS})
+ list(APPEND GR_SWIG_INCLUDE_DIRS ${incdir}/gnuradio/swig)
+endforeach(incdir)
+
+set(GR_SWIG_LIBRARIES gnuradio-usrptest)
+set(GR_SWIG_DOC_FILE ${CMAKE_CURRENT_BINARY_DIR}/usrptest_swig_doc.i)
+set(GR_SWIG_DOC_DIRS ${CMAKE_CURRENT_SOURCE_DIR}/../include)
+
+GR_SWIG_MAKE(usrptest_swig usrptest_swig.i)
+
+########################################################################
+# Install the build swig module
+########################################################################
+GR_SWIG_INSTALL(TARGETS usrptest_swig DESTINATION ${GR_PYTHON_DIR}/usrptest)
+
+########################################################################
+# Install swig .i files for development
+########################################################################
+install(
+ FILES
+ usrptest_swig.i
+ ${CMAKE_CURRENT_BINARY_DIR}/usrptest_swig_doc.i
+ DESTINATION ${GR_INCLUDE_DIR}/usrptest/swig
+)
diff --git a/tools/gr-usrptest/swig/usrptest_swig.i b/tools/gr-usrptest/swig/usrptest_swig.i
new file mode 100644
index 000000000..273b7c866
--- /dev/null
+++ b/tools/gr-usrptest/swig/usrptest_swig.i
@@ -0,0 +1,16 @@
+/* -*- c++ -*- */
+
+#define USRPTEST_API
+
+%include "gnuradio.i" // the common stuff
+
+//load generated python docstrings
+%include "usrptest_swig_doc.i"
+
+%{
+#include "usrptest/measurement_sink_f.h"
+%}
+
+
+%include "usrptest/measurement_sink_f.h"
+GR_SWIG_BLOCK_MAGIC2(usrptest, measurement_sink_f);
diff --git a/tools/kitchen_sink/kitchen_sink.cpp b/tools/kitchen_sink/kitchen_sink.cpp
index e17ee6437..818146927 100644
--- a/tools/kitchen_sink/kitchen_sink.cpp
+++ b/tools/kitchen_sink/kitchen_sink.cpp
@@ -35,7 +35,6 @@
#include <boost/thread/mutex.hpp>
#include <boost/thread/condition_variable.hpp>
#include <csignal>
-#include <uhd/utils/msg.hpp>
namespace po = boost::program_options;