from pyMez import *
If a summary of the modules imported appears and a time taken to import them the variables in pyMez/__init__.py
VERBOSE_IMPORT=True
TIMED_IMPORT=True
are set to True. To remove this diagnostic output set these two variables to be False.
However, if a specific function or class is required then direct import also works (this form imports the full API)
from pyMez.Code.DataHandlers.XMLModels import XMLBase
If you want to have the import skip the API, then add the pyMez folder to sys.path and then import beginning with Code
import sys
sys.path.append(r"C:\ProgramData\Anaconda2\Lib\site-packages\pyMez")
from Code.DataHandlers.XMLModels import XMLBase
# Here is the import statement for the base api. I have tried to include the most common things and exclude any slow loading
# modules.
from pyMez import *
# That is by far not all of the available modules. To get a listing of all of them print the constant API_MODULES
keys=sorted(API_MODULES.keys())
for key in keys:
print("{0}:{1}".format(key,API_MODULES[key]))
# The other modules that I tend to use a lot but import slowly are
# Simple fits
from pyMez.Code.Analysis.Fitting import *
# Interpolation
from pyMez.Code.Analysis.Interpolation import *
# Scattering parameter analysis
from pyMez.Code.Analysis.SParameter import *
# Data transformations mostly for sparameters / waveparameters
from pyMez.Code.Analysis.Transformations import *
# Data translations
from pyMez.Code.DataHandlers.Translations import *
# The meta-data structures based on directional graphs
from pyMez.Code.DataHandlers.GraphModels import *
# The Statistical and MUF wrappers
from pyMez.Code.DataHandlers.MUFModels import *
from pyMez.Code.DataHandlers.StatistiCALModels import *
There are many modules in the DataHandling subpackage. It is in the pyMez/Code/DataHandlers directory. The primary motivation of this subpackage is to create models for data manipulation and aggregation. The major data types of interest are ascii based tables, xml, html, touchstone files, and specialty models such as zip, statiscal, MUF. There are several major ideas:
In the history of computers being available in science there has been countless formats for data and each has its own merits, the best format is the one that accomplishes its goals for the particular task.
For Ascii Tables we use the class pyMez.Code.DataHandlers.GeneralModels.AsciiDataTable
The basic structure is: <img src="./pyMez_Tour_Files/AsciiDataTable_Structure.png" width=50%/> It has many options and a lot ways of building it.
# AsciiDataTable is a data type that means to be self documenting and handle common data that is small and non-uniform
# It has places for meta data and changing formatting
# to build it from scratch we can pass a options dictionary containing the data and column names and types
options={}
options["data"]=[[i+j for i in range(3)] for j in range(3) ]
options["column_types"]=["int","float","str"]
options["column_names"]=["a","b","c"]
options["header"]=["Data For Demonstatration","A 3x3 Matrix"]
options["footer"]=["Don't use footers they suck"]
options["metadata"]={"notes":"This is an example table"}
data_table=AsciiDataTable(**options)
# now we can acess the text version as a string using print
print(data_table)
# we can save the data table and reopen it.
# the default location is the current working directory and an autonamed file
# it is in the atribute path
data_table.path
# now we have a meta data dictionary that follows the table around
data_table.metadata["notes"]
# to saveas we just put the path we want into the save method
data_table.save(os.path.join(os.getcwd(),"pyMez_Tour_Files/test_data_table.txt"))
# to open it again we can use the class and the path
reopen=AsciiDataTable(os.path.join(os.getcwd(),"pyMez_Tour_Files/test_data_table.txt"))
print(reopen)
# The metadata dictionary is still there
reopen.metadata["notes"]
# now we can format the file anyway we want
reopen.options["column_names_begin_token"]="!"
reopen.options["data_delimiter"]="\t"
reopen.options["comment_begin"]="!"
reopen.options["comment_end"]=""
reopen.options["header_line_types"]=["normal","comment"]
print(reopen)
# I personally think footers are not the best idea so we can move the footer to the header
reopen.move_footer_to_header()
reopen.options["header_line_types"].append("comment")
print(reopen)
# now we can save and reopen again
reopen.save("./pyMez_Tour_Files/Reopen.txt")
rereopen=AsciiDataTable("./pyMez_Tour_Files/Reopen.txt")
print(rereopen)
# our metadata is still there becuase it is getting saved in the schema or reopen.options["metadata"]
rereopen.metadata
# it kind of works like a pandas data frame, but list is the fundamental data type and not a numpy array
reopen["a"]
# this means when we add stuff it works like this
new_list=reopen["a"]+reopen["b"]+reopen["c"]
print(new_list)
# all the data is in the data attribute
rereopen.data
# you can also get the data as a list of dictionaries
rereopen.get_data_dictionary_list()
# or get a row
rereopen.get_row(2)
# or get a the unique values in a column
rereopen.get_unique_column_values("a")
# there is the ability to use a row formatter
row_formatter="{0} Bannnas are {1:03.3f} but not {2}"
rereopen.options["row_formatter_string"]=row_formatter
print(rereopen)
# now to make a you own class inherit from AsciiDataTable and add special methods
class NewDataClass(AsciiDataTable):
"""Same as an ascii data but plots the first column using .show()"""
def show(self):
plt.plot(self.get_column(column_index=0))
plt.show()
new_data=NewDataClass(os.path.join(os.getcwd(),"pyMez_Tour_Files/test_data_table.txt"))
new_data.show()
# If you do data analysis using pandas we can transform the AsciiDataTable to a pandas data frame, or if
# want to preserve the header a dictionary of pandas data frames
from pyMez.Code.DataHandlers.Translations import *
pandas_df=AsciiDataTable_to_DataFrame(data_table)
pandas_dictionary=AsciiDataTable_to_DataFrameDictionary(data_table)
pandas_df
pandas_dictionary["Data"]
pandas_dictionary["Header"]
pandas_dictionary["Footer"]
The touchstone formats are for scattering parameters of any number of ports. There are 3 basic classes
S1PV1, S2PV2, SNP and they are in the module pyMez.Code.DataHandlers.TouchstoneModels
. You can use the SNP class to open any touchstone file with 2 or more ports.
s1p=S1PV1("./pyMez_Tour_Files/load.s1p")
s2p=S2PV1("./pyMez_Tour_Files/thru.s2p")
s4p=SNP("./pyMez_Tour_Files/Solution_0.s4p")
# now the touchstone family has a format ("RI","MA","DB")
# the class has a very similar interface as the AsciiDataTable
# Except all have a show method that plots the data
s1p.show();
s1p.column_names
s1p["reS11"]
# because of the number of many port parameters they get clustered
s4p.show(display_legend=False);
# There are two attributes that contain the sparamters, one is .data that has it in tabular form
# the other is .sparamter_complex that stores it in complex numbers
print(s1p.sparameter_complex)
print(s1p.data)
# to change the data format use
s1p.change_data_format("MA")
print(s1p)
s1p.change_data_format("RI")
print(s1p)
XML and HTML are markup languages used extensively on the web. We use them for reporting and data storage. The modules pyMez.Code.DataHandlers.XMLModels
and pyMez.Code.DataHandlers.HTMLModels
have most of these models, they are loaded in the base API. However two analysis modules pyMez.Code.Analysis.Reports
and pyMez.Code.Analysis.ProgramAnalysis
contain related classes and functions. The folder pyMez/Code/DataHandlers/XSL has all of the style sheets.
# A simple xml log
xml_log=XMLLog()
# it can add an entry
xml_log.add_entry("This is an entry")
print(xml_log)
xml_log.save("./pyMez_Tour_Files/log.xml")
xml_reopen_log=XMLLog("./pyMez_Tour_Files/log.xml")
print(xml_reopen_log)
# This an xml data table created from the AsciiDataTable
xml_data_table=AsciiDataTable_to_XmlDataTable(data_table)
print(xml_data_table)
# now we can change this to html using a XSL
html_data_table=xml_data_table.to_HTML(os.path.join(TESTS_DIRECTORY,"../XSL/DEFAULT_MEASUREMENT_STYLE.xsl"))
print(html_data_table)
# if we want to store it we can use save_HTML or load it into the HTMLBase class
html_data_table_2=HTMLBase(html_text=html_data_table)
html_data_table_2.show()
# or for a more interactive example, we can use a translation from s2p to xml and then to html and show it
xml_s2p=S2PV1_to_XmlDataTable(s2p,format="MA")
html_s2p=HTMLBase(html_text=xml_s2p.to_HTML(os.path.join(TESTS_DIRECTORY,"../XSL/S2P_MA_STYLE.xsl")))
html_s2p.show()
# the HTMLReport Class is a descendant of HTMLBase but has the added ability to embed images
from pyMez.Code.Analysis.Reports import HTMLReport
html_report=HTMLReport(None,html_text=html_data_table)
# now we want to add the image to the report
html_report.embedd_image_figure(image=s2p.show(silent=True),image_mode="MatplotlibFigure",
caption="A Plot of Table Data",figure_id="Figure1")
html_report.show()
help(html_report.embedd_image_figure)
# we can also add elements one by one
html_report.append_to_body("<h1>Title</h1>")
html_report.show()
# or add a log
html_log=HTMLBase(html_text=xml_log.to_HTML(os.path.join(TESTS_DIRECTORY,"../XSL/DEFAULT_LOG_STYLE.xsl")))
html_report+html_log
html_report.show()
Meta Models are based on a series of translations found in pyMez.Code.DataHandlers.Translations
and the models are in pyMez.Code.DataHandlers.GraphModels
. They are meant to be integrated into a Universal Data Translator.
# the modules are not loaded in the Base Api
from pyMez.Code.DataHandlers.GraphModels import *
# the meta models I use the most are TableGraph, MetadataGraph and ImageGraph
image_graph=ImageGraph()
%matplotlib inline
# for a visualization of the formats avaible to the meta model use the show attribute
# The green nodes are a one way path, use jump_to_external_node to reach them. The state of the graph will
# then be left in the closest node. The blue node is the current node
plt.close()
image_graph.show()
# The PIL Image class has a show method
image_pil=image_graph.data
image_pil.show()
%matplotlib wx
# This means that we can take data in any of these formats and turn it into any of the others
image_graph.move_to_node("MatplotlibFigure")
figure=image_graph.data
figure.show()
%matplotlib inline
# now the graph is at a different node
image_graph.show()
metadata_graph=MetadataGraph()
%matplotlib inline
metadata_graph.show()
# metadata is taken to be any key value pair. The most natural way to express this in python is a dictionary
metadata_dictionary={"Device":"42","Time":datetime.datetime.now(),"Notes":"A Test of metadata"}
metadata_graph.set_state(node_name="Dictionary",node_data=metadata_dictionary)
# now we can express this in any format in the graph
# for example we move to the AsciiDataTable node and print it
metadata_graph.move_to_node("AsciiDataTable")
meta_table=metadata_graph.data
print(meta_table)
# or in a pandas data frame
metadata_graph.move_to_node("DataFrame")
meta_df=metadata_graph.data
print(meta_df)
# or in as html meta data
metadata_graph.move_to_node("HtmlMetaString")
meta_df=metadata_graph.data
print(meta_df)
# or as a header list
metadata_graph.move_to_node("HeaderList")
meta_df=metadata_graph.data
print(meta_df)
# or as xml fragment
metadata_graph.move_to_node("XmlString")
meta_df=metadata_graph.data
print(meta_df)
# or as json
metadata_graph.move_to_node("JsonString")
meta_df=metadata_graph.data
print(meta_df)
table_graph=TableGraph()
table_graph.show()
The Analysis subpackage contains modules dedicated to the most common analysis tasks. It has an elaborate module for scattering parameters and more basic functionality for fits and other common tasks such as comparing tables with uncertainties.
The fitting and interpolation modules allow for the creation of and manipulation of data.They are not in the base API and must be loaded separately. Fitting for functions of a single variable works using a sympy/scipy composite function.
from pyMez.Code.Analysis.Fitting import *
from pyMez.Code.Analysis.Interpolation import *
# Now say we want to create a table of data
time_list=np.linspace(0,5,1000)
sine_wave=FunctionalModel(variables=["t"],parameters=["A","phi"],equation="A*sin(2*pi*t+phi)")
f_list=[1,1.2,1.4]
multisine=Multicosine(f_list)
# Now we have some fucntions we can set the parameters and use them to plot
sine_wave.set_parameters({"A":1.0,"phi":0})
multisine.set_parameters({"A_1":1.,"A_2":.3,"A_3":.5,"phi_1":0,"phi_2":np.pi/2.,"phi_3":0})
plt.plot(time_list,sine_wave(time_list),label="Sine Wave")
plt.plot(time_list,multisine(time_list),label="Multisine Wave")
plt.legend()
plt.show()
# If we want synthetic data we can use a data simulator or just add random noise
sythetic_data=DataSimulator(model=multisine,output_noise_center=0.,output_noise_width=.1,output_noise_type="normal")
sythetic_data.set_parameters({"A_1":1.,"A_2":.3,"A_3":.5,"phi_1":0,"phi_2":np.pi/2.,"phi_3":0})
sythetic_data.set_x(0,5,1000)
sythetic_data.get_data()
plt.close()
plt.plot(time_list,sine_wave(time_list),label="Sine Wave")
plt.plot(time_list,sythetic_data.data,label="Synthetic Data")
plt.legend()
plt.show()
# now we can use the multisine to fit the synthetic data
multisine.fit_data(time_list,sythetic_data.data)
# the fit method just sets the parameter values to the least squares value
multisine.parameter_values
plt.close()
plt.plot(time_list,sythetic_data.data,label="Synthetic Data")
plt.plot(time_list,multisine(time_list),label="Multisine Fit ")
plt.legend()
plt.show()
# now say we want to build a table with the data
# we start with a table that just has the time column
new_table=AsciiDataTable(None,column_types=["float"],column_names=["Time"],data=map(lambda x: [x],time_list.tolist()))
# we can add columns
new_table.add_column(column_name="Sine_Model",column_data=sine_wave(time_list).tolist(),column_type="float")
# ideal data
multisine.set_parameters({"A_1":1.,"A_2":.3,"A_3":.5,"phi_1":0,"phi_2":np.pi/2.,"phi_3":0})
new_table.add_column(column_name="Multisine_Model",column_data=multisine(time_list).tolist(),column_type="float")
# data with noise
new_table.add_column(column_name="Synthetic_Data",column_data=sythetic_data.data.tolist(),column_type="float")
#fit of data with noise
multisine.fit_data(time_list,sythetic_data.data)
new_table.add_column(column_name="Synthetic_Fit",column_data=multisine(time_list).tolist(),column_type="float")
# now we have built the table with
new_table.column_names
new_table.save("./pyMez_Tour_Files/Fitting_Table.txt")
help(interpolate_table)
# now if we want to interpolate we can just use interpolate_table
new_time_list=np.linspace(1,2,1000).tolist()
interpolated_new_table=interpolate_table(new_table,new_time_list)
plt.plot(time_list,sythetic_data.data,label="Original Data")
plt.plot(interpolated_new_table["Time"],interpolated_new_table["Synthetic_Data"],label="Interpolated Data")
plt.legend()
plt.show()
In the Analysis subpackage there are classes and functions to create and compare uncertainties. They are contained in pyMez.Code.Analysis.Uncertainty
, pyMez.Code.Analysis.SParameter
, and pyMez.Code.Analysis.NISTUncertainty
. In addition, there are several data types in the DataHandlers subpackage that deal with the output of error calculators such as StatistiCAL, and the Microwave Uncertainty Framework.
# these modules are not in the Base API so you need to load them separately.
from pyMez.Code.Analysis.Uncertainty import *
from pyMez.Code.Analysis.SParameter import *
# Now if you have two tables at least one with uncertainty we can create a standard error table
help(standard_error_data_table)
# The standard error table has a lot of possibilities, any set of column names can be used as the values
# and the errors can be a function, a percentage, a constant or a table. In addition, the error for table 2 can be specified
# or unspecified. In addtion the resulting table only has values at the independent_variable locations for table_1
# as an example we can take a raw file scattering parameter measurement from cal services, calculate the uncertainty using
# the calrep program, and then compare it to a results file
raw_scattering_parameters=TwoPortRawModel("./pyMez_Tour_Files/CTN206.A35_092805")
raw_scattering_parameters.column_names
# Now we can estimate the errors using the calrep program. It creates a series error estimates based on
# the six port error analysis
calrep_scattering_parameters=calrep(raw_scattering_parameters)
calrep_scattering_parameters.column_names
# Now we can load a file that was created as a mean of good measurements
mean_scattering_parameters=ResultFileModel("./pyMez_Tour_Files/CTN206.Results")
mean_scattering_parameters.column_names
# now we can specify all of the options. The standard error data table can handle any two AsciiDataTable descendants
error_options={"independent_variable_column_name":"Frequency",
"value_column_names":['magS11','argS11','magS21',
'argS21','magS22','argS22'],
"table_1_uncertainty_column_names":['uMgS11','uAgS11',
'uMgS21','uAgS21','uMgS22','uAgS22'],
"table_2_uncertainty_column_names":['uMgS11','uAgS11',
'uMgS21','uAgS21','uMgS22','uAgS22'],
"uncertainty_table_1":None,
"uncertainty_table_2":None,
"uncertainty_function_table_1":None,
"uncertainty_function_table_2":None,
"uncertainty_function":None,
"uncertainty_type":None,
"table_1_uncertainty_type":"table",
"table_2_uncertainty_type":None,
"expansion_factor":1,
'debug':False}
standard_error_scattering_parameters=standard_error_data_table(calrep_scattering_parameters,
mean_scattering_parameters,**error_options)
# now we can save, plot and use the standard error table
standard_error_scattering_parameters.column_names
standard_error_scattering_parameters.save("./pyMez_Tour_Files/Standard_Error.txt")
standard_error_scattering_parameters.show();
# we can use a special function to look at the calrep and results comparison
plot_calrep_results_comparison(calrep_model=calrep_scattering_parameters,results_model=mean_scattering_parameters);
plot_calrep_results_difference_comparison(calrep_model=calrep_scattering_parameters,results_model=mean_scattering_parameters);
There are many functions based on the manipulation of scattering parameter and wave parameter data normally taken using a vector network analyzer. The functions and classes that deal with scattering parameters reside in several modules. In the base API there is pyMez.Code.DataHandlers.TouchstoneModels
for dealing with snp style files. The add on modules pyMez.Code.DataHandlers.StatistiCALModels
and pyMez.Code.DataHandlers.MUFModels
have classes and functions that interact with vna calibration software from NIST. The modules pyMez.Code.DataHandlers.Translations
and pyMez.Code.Analysis.Transformations
have conversion functions to transform scattering parameters to other data types and wave parameters to scattering parameters. Finally the module pyMez.Code.Analysis.SParameters
has functions for analyzing frequency dependent data, calculating uncertainties, applying corrections and plotting.
# Lets open three connects (measurements) of a single device
connect_1=SNP(r"./pyMez_Tour_Files/Line_4909_WR15_20180313_001.s2p")
connect_2=SNP(r"./pyMez_Tour_Files/Line_4909_WR15_20180313_002.s2p")
connect_3=SNP(r"./pyMez_Tour_Files/Line_4909_WR15_20180313_003.s2p")
# now if we want to plot them together we can use
compare_s2p_plots([connect_1,connect_2,connect_3]);
# we can calculate the mean of the three in multiple ways. If we convert to AsciiDataTables we can add the files
# and then use frequency_model_collapse_multiple_measurements
first_file=Snp_to_AsciiDataTable(connect_1)
joined_data_table=first_file.copy()
for connect in [connect_2,connect_2]:
joined_data_table=joined_data_table+Snp_to_AsciiDataTable(connect)
mean_table=frequency_model_collapse_multiple_measurements(joined_data_table)
std_table=frequency_model_collapse_multiple_measurements(joined_data_table,method="STD")
# now we can plot the tables using the general function plot_frequency_model
plot_frequency_model(mean_table,plot_format="r-");
plot_frequency_model(std_table,plot_format="r-");
# we can also correct the data given a sixteen term correction in s4p format
correction=SNP("./pyMez_Tour_Files/Solution_WR15.s4p")
# The correction is made to the complex data so that it is essentially format free (just a list of lists of complex numbers)
corrected_complex_data=correct_sparameters_sixteen_term(sixteen_term_correction=correction.sparameter_complex,
sparameters_complex=connect_1.sparameter_complex)
# we can use the S2PV1 model to encapsulate the data
corrected_connect_1=S2PV1(sparameter_complex=corrected_complex_data)
corrected_connect_1.show();
# to check we can uncorrect the s2p
uncorrected_complex_data=uncorrect_sparameters_sixteen_term(sixteen_term_correction=correction.sparameter_complex,
sparameters_complex=corrected_connect_1.sparameter_complex)
# we can use the S2PV1 model to encapsulate the data
uncorrected_connect_1=S2PV1(sparameter_complex=uncorrected_complex_data,column_types=["float" for i in range(9)])
uncorrected_connect_1.show();
pyMez has several other analytic modules and will be extended with time. For instance pyMez.Code.Analysis.ProgramAnalysis
provides tools for creating an SVG example of a function that links a text form of input, a copy of the code, and a form of the output to an SVG diagram.
Instrument Control in pyMez is primarily contained in the module pyMez.Code.InstrumentControl.Instruments
. The subpackage pyMez.Code.InstrumentControl
is meant to expand overtime, as more instrument drivers are added. The module pyMez.Code.InstrumentControl.Experiments
houses the combination of several instruments and data management. If an experiment makes it to the place that it is going to be replicated several times, then the experiment class can be bound to a GUI.
There are a few issues that the package pyvisa did not address adequately for our lab. The Instrument class (VisaInstrument) now has several capabilities:
For the most part we deal with instruments using VISA (Using the class VisaInstrument). The main interface has some common features
To provide information regarding an instrument, including the default state commands, we have developed instrument sheets. Essentially they are xml documents that provide a series of information. The instrument sheet data handler class lives in pyMez.Code.DataHandlers.XMLModels
and is a parent of the VisaInstrument class. If a call to an instrument does not return an instrument from the instrument_description_directory passed in creation of the class it behaves as if the description is empty.
The creation of VisaInstrument looks for any unique string that identifies the instrument and then deduces the gpib address from the instrument sheet. The sheet can be extended to include any information that the user needs or wants.
An Example Instrument Sheet
<?xml version="1.0" encoding="utf-8"?>
<?xml-stylesheet type="text/xsl" href="DEFAULT_INSTRUMENT_STYLE.xsl"?>
<!-- Written by Aric Sanders 04/2017 -->
<Instrument>
<!-- Information Specific To My Power Meter-->
<Specific_Information>
<Name>NRPPowerMeter</Name>
<Alias>RS_NRP_Power_Meter_4639_01</Alias>
<Location>Building 1, 4639</Location>
<Manual href="../Documentation/Manuals/RS_NRP2_Manual.pdf"/>
<Image href="./RS_NRP_Power_Meter_4639_01_Images/NRP.jpg"/>
<Price></Price>
<Serial></Serial>
<IDN>Rohde&Schwarz,NRP,102508,06.01</IDN>
<Instrument_Type>GPIB</Instrument_Type>
<Instrument_Address>GPIB::14</Instrument_Address>
<Purchase_Date></Purchase_Date>
<NIST_Tag>935166</NIST_Tag>
</Specific_Information>
<!-- Information Common To All NRP Power meters-->
<General_Information>
<Manufacturer>Rohde and Schwarz</Manufacturer>
<Manufacturer_Website href="https://www.rohde-schwarz.com/us/home_48230.html" />
<Commands_Description>
<Command>This is the command sent over the GPIB bus.</Command>
<Type>Whether or not it returns a value or just sets something.</Type>
<Argument>The parameter the command passes to the instrument. Optional parameters
are denoted with an *. The types of paramters are int=integer, float=floating point number,
string=string, and None=NULL.</Argument>
<Returns> What gets returned by the function. </Returns>
<Description>A one line describing the purpose of the function, for more detailed info look in the manual. </Description>
</Commands_Description>
<Commands>
</Commands>
<Command_Parameter_Definitions>
</Command_Parameter_Definitions>
<State_Commands>
<Tuple Set="SENS:FUNC" Query="SENS:FUNC?"/>
<Tuple Set="UNIT:POW" Query="UNIT:POW?"/>
</State_Commands>
</General_Information>
</Instrument>
# for instance the NRPPowerMeter is a Rohde and Schwartz power meter
power_meter=VisaInstrument("NRPPowerMeter")
# Since the address was a unique string in the xml data sheet it finds all the information in that sheet and loads it
# into attributes that mirror the tag names (all lower case)
power_meter.idn
# this lets the user define information that is domain or user specific that is added to the control class on creation
power_meter.nist_tag
# it also allows the user to define "State_Commands" in the general description that creates a
# default state query dictionary constant for the instrument, this can be changed after creation
power_meter.DEFAULT_STATE_QUERY_DICTIONARY
# this controls the behaivor of the .get_state and save_state method when called without a state dictionary or table
power_meter.get_state()
# In addition the instrument_sheet has a XSL that transforms it to html
html_instrument_sheet=HTMLBase(html_text=power_meter.to_HTML(os.path.join(TESTS_DIRECTORY,"../XSL/DEFAULT_INSTRUMENT_STYLE.xsl")))
html_instrument_sheet.show()
A major thrust of pyMez is to provide dynamic metadata for instruments, that is the ability to store the state of an instrument at a given moment in a tabular or xml form and then recall it. For the xml version of states we use classes found in pyMez.Code.DataHandlers.XMLModels
which is part of the Base API. This is the default style, however it is easy to convert to a regular AsciiDataTable or similar. When dealing with states it is important to realize that a state dictionary without an index will be written to the instrument in random order, if order is important a state table with an index is available. The default state behavior is determined by the instrument sheet if one is found. The VisaInstrument class will save the states for instruments in the state_directory specified at creation or default to the current working directory.
# first we create an instrument in this case we will use the base class VisaInstrument but any descendant will work
# In our instrument_sheet_directory we have a vna with GPIB address 16
# we can also set the state_directory at the time of creation or later
vna=VisaInstrument("GPIB::16")
# now we can save the current state to the desired location
# if you do not specify a location it will auto name it
vna.save_state(state_path="./pyMez_Tour_Files/Right_Now_State.xml")
# now the state is an xml file containing the set commands and the values
# these have been read from the InstrumentSheet, but can be provided
xml_state=InstrumentState("./pyMez_Tour_Files/Right_Now_State.xml")
xml_state.document.getElementsByTagName('State_Description')[0]
print(xml_state)
<Instrument_State>
<State>
<Tuple Set="SENS:AVER" Value="Buffer Read at 2018-12-10T04:59:22.274000"/>
<Tuple Set="SENS:BAND" Value="Buffer Read at 2018-12-10T04:59:22.275000"/>
<Tuple Set="SOUR:POW" Value="Buffer Read at 2018-12-10T04:59:22.278000"/>
<Tuple Set="SENS:SWE:TYPE" Value="Buffer Read at 2018-12-10T04:59:22.276000"/>
<Tuple Set="SOUR:POW:CORR:STAT" Value="Buffer Read at 2018-12-10T04:59:22.279000"/>
<Tuple Set="SOUR:POW:SLOP" Value="Buffer Read at 2018-12-10T04:59:22.280000"/>
<Tuple Set="SENS:CORR:STAT" Value="Buffer Read at 2018-12-10T04:59:22.277000"/>
</State>
<State_Description>
<State_Timestamp>2018-12-10T04:59:22.288000</State_Timestamp>
<Instrument_Description>C:\ProgramData\Anaconda2\lib\site-packages\pyMez\Code\InstrumentControl\..\..\Instruments\E8361A_PNA_01.xml</Instrument_Description>
<State_Timestamp>2018-12-10T05:00:58.017000</State_Timestamp>
</State_Description>
</Instrument_State>
# again this xml data can be transformed to html using a style sheet
html_state=HTMLBase(html_text=xml_state.to_HTML(os.path.join(TESTS_DIRECTORY,"../XSL/DEFAULT_STATE_STYLE.xsl")))
html_state.show()
# if you create a series of GPIB commands you can index them so that they are written in order
state_table=[{"Index":0,"Set":"SOUR:POW:SLOP","Query":"SOUR:POW:SLOP?"},
{"Index":1,"Set":"SOUR:POW","Query":"SOUR:POW?"}]
vna.save_state(state_path="./pyMez_Tour_Files/Reduced_Right_Now_State.xml",state_table=state_table)
xml_reduced_state=InstrumentState("./pyMez_Tour_Files/Reduced_Right_Now_State.xml")
print(xml_reduced_state)
Instruments are typically a descendant of the VisaInstrument class and follow these design rules:
# Example of creating a new instrument class
class MyInstrument(VisaInstrument):
def get_frequency(self):
"Gets the instruments frequency"
frequency=self.query("Command_To_Get_Frequency")
return frequency
def set_frequency(self,frequency):
"Sets the instruments frequency"
self.write("Command_To_Set_Frequency {0}".format(frequency))
instrument=MyInstrument("FakeAddress")
instrument.get_frequency()
instrument.set_frequency(1000)
instrument.history
# currently there are classes for VNA operation, HighSpeedOscilloscope, Picoammeter-Source, and Power Meters
help(VNA)
When you call an instrument and it cannot be loaded because the GPIB bus is not connected or there is not an instrument at the specified address the instrument class enters a emulation mode where the commands sent to the instrument are recorded.This sets the attribute instrument.emulation_mode=True, which is useful in debugging commands and performance.
emulated_instrument_with_sheet=VisaInstrument("GPIB::16")
emulated_instrument=VisaInstrument("GPIB::20000")
# now if the instrument sheet is found there is information about the instrument from the xml
emulated_instrument_with_sheet.idn
emulated_instrument_with_sheet.commands
# if there is not an instrument sheet found in the instrument_description_directory then it functions the same without
# the attributes .idn, commands defined
emulated_instrument.commands
# now if we write to the bus
emulated_instrument.write("SOUR:POW 5")
# we can see the history here
emulated_instrument.history
# reads to the bus look like this
emulated_instrument.read()
# Now we can see the read results, query or ask commands perform a write then a read
emulated_instrument.history
# it also sets the emulation_mode to true
emulated_instrument.emulation_mode
# now to see how the set_state command works
state_to_set=[{"Index":0,"Set":"GPIB Set Command 1","Query":"Read Command 1","Value":2},
{"Index":1,"Set":"GPIB Set Command 2","Query":"Read Command 2","Value":"MyValue2"}]
# we are using the table form instead of the dictionary form
emulated_instrument.set_state(state_table=state_to_set)
# Now we can see that the Set Command has the Value inserted into it
emulated_instrument.history
# the get state command returns a "Set" and a "Value" from the instrument
emulated_instrument.get_state(state_query_table=state_to_set)
emulated_instrument.history
The subpackage pyMez.Code.Utils
is designed to be a place to store functions and classes that are helpful to each of the other subpackages. For instance, the functions that auto-generate help files, the auto-naming function and timing decorators all reside in this package.
The subpackage pyMez.Code.Utils.HelpUtils
contains the functions used to auto-generate the pyMez Documentation. This documentation is multi-tiered and contains an API help based on html produced by the pdoc package available on pip. This package reads a python package and using introspection generates html. It was chosen for its simplicity and the linking of source code. The pdoc package uses a custom template located in pyMez/Documentation/templates. In addition to the API help, a list of all the functions and classes organized by sub-package is generated using the create_index_html_script. It reads the code in a package and finds any pattern that matches a class or function then links to the html help created by pdoc. Finally this example page and the main page of the documentation is a jupyter notebook that has been converted to html using nbconvert. The create_examples_html_script works by reading all notebook files in pyMez/Documentation/Examples/jupyter and converting them to a similar file structure in pyMez/Documentation/Examples/html. These files then have the .ipynb
postfix changed to .html in the links using change_links_examples_script. This requires manual editing of the Examples_Home notebook to include links to the jupyter examples.
# we can get html help for any live object using return_help. This returns help for the full module.
from pyMez.Code.Utils.HelpUtils import *
html_help=HTMLBase(html_text=str(return_help(SNP)))
html_help.show()
The subpackage pyMez.Code.Utils.Names
contains the functions used to auto-generate names and pyMez.Code.Utils.Alias
contains functions for creating method aliases. Auto-naming follows the template {specific_descriptor}_{general_descriptor}_{isodate}_{iterator}.{extension}
where the iterator is decided on by the files all ready present in directory.
# for example
from pyMez.Code.Utils.Names import *
auto_name(directory="./pyMez_Tour_Files",specific_descriptor="Scope",general_descriptor="Measurement",extension="dat")
auto_name(directory="./pyMez_Tour_Files",specific_descriptor="Scope",general_descriptor="Measurement",extension="txt")
# the module Alias is used to create Aliases for methods
from pyMez.Code.Utils.Alias import *
class MyClass(object):
def __init__(self):
self.littleAttribue=[]
self.i_like_underscores=[]
# this calls and executes the alias function
for command in alias(self):
exec(command)
def my_method(self):
pass
dir(MyClass)
test_class=MyClass()
dir(test_class)
In addition to the covered utilities there are ones to time, create html from HPBasic programs, and fix other small issues
from pyMez.Code.Utils.PerformanceUtils import *
@timer
def run_loop(length=20000):
for i in range(length):
time.sleep(.001)
run_loop()
# tools to extract system and other metadata
from pyMez.Code.Utils.GetMetadata import *
get_system_metadata("./pyMez_Tour_Files/Solution_0.s4p")
get_metadata("./pyMez_Tour_Files/Code_Structure.png")