Important
This library has been archived and replaced by Semantic Link Labs. Everything in the fabric_cat_tools library has been moved to the Semantic Link Labs library and will be maintained there going forward. Semantic Link Labs is fully open-sourced and is part of Microsoft's official GitHub repositories.
- clear_cache
- create_semantic_model_from_bim
- get_semantic_model_bim
- get_measure_dependencies
- get_model_calc_dependencies
- measure_dependency_tree
- refresh_semantic_model
- cancel_dataset_refresh
- run_dax
- get_object_level_security
- translate_semantic_model
- list_semantic_model_objects
- report_rebind
- report_rebind_all
- create_report_from_reportjson
- get_report_json
- export_report
- clone_report
- list_dashboards
- launch_report
- generate_embedded_filter
- create_pqt_file
- create_blank_semantic_model
- migrate_field_parameters
- migrate_tables_columns_to_semantic_model
- migrate_calc_tables_to_semantic_model
- migrate_model_objects_to_semantic_model
- migrate_calc_tables_to_lakehouse
- refresh_calc_tables
- show_unsupported_direct_lake_objects
- update_direct_lake_partition_entity
- update_direct_lake_model_lakehouse_connection
- migration_validation
- check_fallback_reason
- control_fallback
- direct_lake_schema_compare
- direct_lake_schema_sync
- get_direct_lake_lakehouse
- get_directlake_guardrails_for_sku
- get_direct_lake_guardrails
- get_shared_expression
- get_direct_lake_sql_endpoint
- get_sku_size
- list_direct_lake_model_calc_tables
- warm_direct_lake_cache_perspective
- warm_direct_lake_cache_isresident
- get_lakehouse_tables
- get_lakehouse_columns
- list_lakehouses
- export_model_to_onelake
- create_shortcut_onelake
- delete_shortcut
- list_shortcuts
- optimize_lakehouse_tables
- create_warehouse
- update_item
- list_dataflow_storage_accounts
- list_warehouses
- save_as_delta_table
- resolve_dataset_id
- resolve_dataset_name
- resolve_lakehouse_id
- resolve_lakehouse_name
- resolve_report_id
- resolve_report_name
- add_calculated_column
- add_calculated_table
- add_calculated_table_column
- add_calculation_group
- add_calculation_item
- add_data_column
- add_entity_partition
- add_expression
- add_field_parameter
- add_hierarchy
- add_m_partition
- add_measure
- add_perspective
- add_relationship
- add_role
- add_table
- add_translation
- used_in_relationships
- used_in_hierarchies
- used_in_levels
- used_in_sort_by
- used_in_rls
- used_in_calc_item
- depends_on
- referenced_by
- fully_qualified_measures
- unqualified_columns
- remove_vertipaq_annotations
- set_vertipaq_annotations
- row_count
- used_size
- data_size
- dictionary_size
- total_size
- cardinality
- get_extended_properties
- set_extended_property
- get_extended_property_value
- remove_extended_property
- clear_extended_properties
Cancels the refresh of a semantic model which was executed via the Enhanced Refresh API.
import fabric_cat_tools as fct
fct.cancel_dataset_refresh(
dataset = 'MyReport',
#request_id = None,
#workspace = None
)
dataset str
Required; Name of the semantic model.
request_id str
Optional; The request id of a semantic model refresh. Defaults to finding the latest active refresh of the semantic model.
workspace str
Optional; The workspace where the semantic model resides.
A printout stating the success/failure of the operation.
Note
This function is only relevant to semantic models in Direct Lake mode.
import fabric_cat_tools as fct
fct.check_fallback_reason(
dataset = 'AdventureWorks',
#workspace = None
)
dataset str
Required; Name of the semantic model.
workspace str
Optional; The workspace where the semantic model resides.
Pandas dataframe showing the tables in the semantic model and their fallback reason.
import fabric_cat_tools as fct
fct.clear_cache(
dataset = 'AdventureWorks',
#workspace = ''
)
dataset str
Required; Name of the semantic model.
workspace str
Optional; The workspace where the semantic model resides.
A printout stating the success/failure of the operation.
import fabric_cat_tools as fct
fct.clone_report(
report = 'MyReport',
cloned_report = 'MyNewReport',
#workspace = None,
#target_workspace = None,
#target_dataset = None
)
report str
Required; Name of the report to be cloned.
cloned_report str
Required; Name of the new report.
workspace str
Optional; The workspace where the original report resides.
target_workspace str
Optional; The workspace where the new report will reside. Defaults to using the workspace in which the original report resides.
target_dataset str
Optional; The semantic model from which the new report will be connected. Defaults to using the semantic model used by the original report.
A printout stating the success/failure of the operation.
Note
This function is only relevant to semantic models in Direct Lake mode.
import fabric_cat_tools as fct
fct.control_fallback(
dataset = 'AdventureWorks',
direct_lake_behavior = 'DirectLakeOnly',
#workspace = None
)
dataset str
Required; Name of the semantic model.
direct_lake_behavior str
Required; Setting for Direct Lake Behavior. Options: ('Automatic', 'DirectLakeOnly', 'DirectQueryOnly').
workspace str
Optional; The workspace where the semantic model resides.
A printout stating the success/failure of the operation.
import fabric_cat_tools as fct
fct.create_blank_semantic_model(
dataset = 'AdventureWorks',
#workspace = None
)
dataset str
Required; Name of the semantic model.
compatibility_level int
Optional; Setting for the compatibility level of the semantic model. Default value: 1605.
workspace str
Optional; The workspace where the semantic model resides.
A printout stating the success/failure of the operation.
Dynamically generates a Power Query Template file based on the semantic model. The .pqt file is saved within the Files section of your lakehouse.
import fabric_cat_tools as fct
fct.create_pqt_file(
dataset = 'AdventureWorks',
#file_name = 'PowerQueryTemplate',
#workspace = None
)
dataset str
Required; Name of the import/DirectQuery semantic model.
file_name str
Optional; TName of the Power Query Template (.pqt) file to be created.
workspace str
Optional; The workspace where the semantic model resides.
A printout stating the success/failure of the operation.
import fabric_cat_tools as fct
fct.create_report_from_reportjson(
report = 'MyReport',
dataset = 'AdventureWorks',
report_json = '',
#theme_json = '',
#workspace = None
)
report str
Required; Name of the report.
dataset str
Required; Name of the semantic model to connect to the report.
Required; The report.json file to be used to create the report.
Optional; The theme.json file to be used for the theme of the report.
workspace str
Optional; The workspace where the semantic model resides.
A printout stating the success/failure of the operation.
import fabric_cat_tools as fct
fct.create_semantic_model_from_bim(
dataset = 'AdventureWorks',
bim_file = '',
#workspace = ''
)
dataset str
Required; Name of the semantic model.
Required; The model.bim file to be used to create the semantic model.
workspace str
Optional; The workspace where the semantic model resides.
A printout stating the success/failure of the operation.
Creates a shortcut to a delta table in OneLake.
import fabric_cat_tools as fct
fct.create_shortcut_onelake(
table_name = 'DimCalendar',
source_lakehouse = 'Lakehouse1',
source_workspace = 'Workspace1',
destination_lakehouse = 'Lakehouse2',
#destination_workspace = '',
shortcut_name = 'Calendar'
)
table_name str
Required; The table name for which a shortcut will be created.
source_lakehouse str
Required; The lakehouse in which the table resides.
sourceWorkspace str
Required; The workspace where the source lakehouse resides.
destination_lakehouse str
Required; The lakehouse where the shortcut will be created.
destination_workspace str
Optional; The workspace in which the shortcut will be created. Defaults to the 'sourceWorkspaceName' parameter value.
shortcut_name str
Optional; The name of the shortcut 'table' to be created. This defaults to the 'tableName' parameter value.
A printout stating the success/failure of the operation.
import fabric_cat_tools as fct
fct.create_warehouse(
warehouse = 'MyWarehouse',
workspace = None
)
warehouse str
Required; Name of the warehouse.
description str
Optional; Description of the warehouse.
workspace str
Optional; The workspace where the warehouse will reside.
A printout stating the success/failure of the operation.
Deletes a OneLake shortcut.
import fabric_cat_tools as fct
fct.delete_shortcut(
shortcut_name = 'DimCalendar',
lakehouse = 'Lakehouse1',
workspace = 'Workspace1'
)
shortcut_name str
Required; The name of the OneLake shortcut to delete.
lakehouse str
Optional; The lakehouse in which the shortcut resides.
workspace str
Optional; The workspace where the lakehouse resides.
A printout stating the success/failure of the operation.
Checks that all the tables in a Direct Lake semantic model map to tables in their corresponding lakehouse and that the columns in each table exist.
Note
This function is only relevant to semantic models in Direct Lake mode.
import fabric_cat_tools as fct
fct.direct_lake_schema_compare(
dataset = 'AdventureWorks',
workspace = '',
#lakehouse = '',
#lakehouse_workspace = ''
)
dataset str
Required; Name of the semantic model.
workspace str
Optional; The workspace where the semantic model resides.
lakehouse str
Optional; The lakehouse used by the Direct Lake semantic model.
lakehouse_workspace str
Optional; The workspace in which the lakehouse resides.
Shows tables/columns which exist in the semantic model but do not exist in the corresponding lakehouse.
Shows/adds columns which exist in the lakehouse but do not exist in the semantic model (only for tables in the semantic model).
Note
This function is only relevant to semantic models in Direct Lake mode.
import fabric_cat_tools as fct
fct.direct_lake_schema_sync(
dataset = 'AdvWorks',
add_to_model = True,
#workspace = '',
#lakehouse = '',
#lakehouse_workspace = ''
)
dataset str
Required; Name of the semantic model.
add_to_model bool
Optional; Adds columns which exist in the lakehouse but do not exist in the semantic model. No new tables are added. Default value: False.
workspace str
Optional; The workspace where the semantic model resides.
lakehouse str
Optional; The lakehouse used by the Direct Lake semantic model.
lakehouse_workspace str
Optional; The workspace in which the lakehouse resides.
A list of columns which exist in the lakehouse but not in the Direct Lake semantic model. If 'add_to_model' is set to True, a printout stating the success/failure of the operation is returned.
Exports a semantic model's tables to delta tables in the lakehouse. Creates shortcuts to the tables if a lakehouse is specified.
Important
This function requires:
XMLA read/write to be enabled on the Fabric capacity.
OneLake Integration feature to be enabled within the semantic model settings.
import fabric_cat_tools as fct
fct.export_model_to_onelake(
dataset = 'AdventureWorks',
workspace = None,
destination_lakehouse = 'Lakehouse2',
destination_workspace = 'Workspace2'
)
dataset str
Required; Name of the semantic model.
workspace str
Optional; The workspace where the semantic model resides.
destination_lakehouse str
Optional; The lakehouse where shortcuts will be created to access the delta tables created by the export. If the lakehouse specified does not exist, one will be created with that name. If no lakehouse is specified, shortcuts will not be created.
destination_workspace str
Optional; The workspace in which the lakehouse resides.
A printout stating the success/failure of the operation.
import fabric_cat_tools as fct
fct.export_report(
report = 'AdventureWorks',
export_format = 'PDF',
#file_name = None,
#bookmark_name = None,
#page_name = None,
#visual_name = None,
#workspace = None
)
import fabric_cat_tools as fct
fct.export_report(
report = 'AdventureWorks',
export_format = 'PDF',
#file_name = 'Exports\MyReport',
#bookmark_name = None,
#page_name = 'ReportSection293847182375',
#visual_name = None,
#workspace = None
)
import fabric_cat_tools as fct
fct.export_report(
report = 'AdventureWorks',
export_format = 'PDF',
#page_name = 'ReportSection293847182375',
#report_filter = "'Product Category'[Color] in ('Blue', 'Orange') and 'Calendar'[CalendarYear] <= 2020",
#workspace = None
)
import fabric_cat_tools as fct
fct.export_report(
report = 'AdventureWorks',
export_format = 'PDF',
#page_name = ['ReportSection293847182375', 'ReportSection4818372483347'],
#workspace = None
)
import fabric_cat_tools as fct
fct.export_report(
report = 'AdventureWorks',
export_format = 'PDF',
#page_name = ['ReportSection293847182375', 'ReportSection4818372483347'],
#visual_name = ['d84793724739', 'v834729234723847'],
#workspace = None
)
report str
Required; Name of the semantic model.
export_format str
Required; The format in which to export the report. See this link for valid formats: https://learn.microsoft.com/rest/api/power-bi/reports/export-to-file-in-group#fileformat. For image formats, enter the file extension in this parameter, not 'IMAGE'.
file_name str
Optional; The name of the file to be saved within the lakehouse. Do not include the file extension. Defaults ot the reportName parameter value.
bookmark_name str
Optional; The name (GUID) of a bookmark within the report.
Optional; The name (GUID) of the report page.
visual_name str or list of str
Optional; The name (GUID) of a visual. If you specify this parameter you must also specify the page_name parameter.
report_filter str
Optional; A report filter to be applied when exporting the report. Syntax is user-friendly. See above for examples.
workspace str
Optional; The workspace where the report resides.
A printout stating the success/failure of the operation.
import fabric_cat_tools as fct
fct.generate_embedded_filter(
filter = "'Product'[Product Category] = 'Bikes' and 'Geography'[Country Code] in (3, 6, 10)"
)
filter str
A string converting the filter into an embedded filter
Shows the guardrails for when Direct Lake semantic models will fallback to Direct Query based on Microsoft's online documentation.
import fabric_cat_tools as fct
fct.get_direct_lake_guardrails()
None
A table showing the Direct Lake guardrails by SKU.
Use the result of the 'get_sku_size' function as an input for this function's skuSize parameter.
import fabric_cat_tools as fct
fct.get_directlake_guardrails_for_sku(
sku_size = ''
)
sku_size str
Required; Sku size of a workspace/capacity
A table showing the Direct Lake guardrails for the given SKU.
Note
This function is only relevant to semantic models in Direct Lake mode.
import fabric_cat_tools as fct
fct.get_direct_lake_lakehouse(
dataset = 'AdventureWorks',
#workspace = '',
#lakehouse = '',
#lakehouse_workspace = ''
)
dataset str
Required; Name of the semantic model.
workspace str
Optional; The workspace where the semantic model resides.
lakehouse str
Optional; Name of the lakehouse used by the semantic model.
lakehouse_workspace str
Optional; The workspace where the lakehouse resides.
Note
This function is only relevant to semantic models in Direct Lake mode.
import fabric_cat_tools as fct
fct.get_direct_lake_sql_endpoint(
dataset = 'AdventureWorks',
#workspace = None
)
dataset str
Required; Name of the semantic model.
workspace str
Optional; The workspace where the semantic model resides.
A string containing the SQL Endpoint ID for a Direct Lake semantic model.
import fabric_cat_tools as fct
fct.get_lakehouse_columns(
lakehouse = 'AdventureWorks',
#workspace = None
)
lakehouse str
Optional; The lakehouse name.
workspace str
Optional; The workspace where the lakehouse resides.
A pandas dataframe showing the tables/columns within a lakehouse and their properties.
Shows the tables of a lakehouse and their respective properties. Option to include additional properties relevant to Direct Lake guardrails.
import fabric_cat_tools as fct
fct.get_lakehouse_tables(
lakehouse = 'MyLakehouse',
workspace = 'NewWorkspace',
extended = True,
count_rows = True)
lakehouse str
Optional; The lakehouse name.
workspace str
Optional; The workspace where the lakehouse resides.
extended bool
Optional; Adds the following additional table properties ['Files', 'Row Groups', 'Table Size', 'Parquet File Guardrail', 'Row Group Guardrail', 'Row Count Guardrail']. Also indicates the SKU for the workspace and whether guardrails are hit. Default value: False.
count_rows bool
Optional; Adds an additional column showing the row count of each table. Default value: False.
export bool
Optional; If specified as True, the resulting dataframe will be exported to a delta table in your lakehouse.
A pandas dataframe showing the delta tables within a lakehouse and their properties.
import fabric_cat_tools as fct
fct.get_measure_dependencies(
dataset = 'AdventureWorks',
#workspace = None
)
dataset str
Required; Name of the semantic model.
workspace str
Optional; The workspace where the semantic model resides.
A pandas dataframe showing all dependencies for all measures in the semantic model.
import fabric_cat_tools as fct
fct.get_model_calc_dependencies(
dataset = 'AdventureWorks',
#workspace = None
)
dataset str
Required; Name of the semantic model.
workspace str
Optional; The workspace where the semantic model resides.
A pandas dataframe showing all dependencies for all objects in the semantic model.
import fabric_cat_tools as fct
fct.get_object_level_security(
dataset = 'AdventureWorks',
workspace = '')
dataset str
Optional; The semantic model name.
workspace str
Optional; The workspace where the semantic model resides.
A pandas dataframe showing the columns used in object level security within a semantic model.
import fabric_cat_tools as fct
fct.get_report_json(
report = 'MyReport',
#workspace = None
)
import fabric_cat_tools as fct
fct.get_report_json(
report = 'MyReport',
#workspace = None,
save_to_file_name = 'MyFileName'
)
report str
Required; Name of the report.
workspace str
Optional; The workspace where the report resides.
save_to_file_name str
Optional; Specifying this parameter will save the report.json file to your lakehouse with the file name of this parameter.
The report.json file for a given Power BI report.
import fabric_cat_tools as fct
fct.get_semantic_model_bim(
dataset = 'AdventureWorks',
#workspace = None
)
import fabric_cat_tools as fct
fct.get_semantic_model_bim(
dataset = 'AdventureWorks',
#workspace = None,
save_to_file_name = 'MyFileName'
)
dataset str
Required; Name of the semantic model.
workspace str
Optional; The workspace where the semantic model resides.
save_to_file_name str
Optional; Specifying this parameter will save the model.bim file to your lakehouse with the file name of this parameter.
The model.bim file for a given semantic model.
import fabric_cat_tools as fct
fct.get_shared_expression(
lakehouse = '',
#workspace = ''
)
lakehouse str
Optional; The lakehouse name.
workspace str
Optional; The workspace where the lakehouse resides.
A string showing the expression which can be used to connect a Direct Lake semantic model to its SQL Endpoint.
import fabric_cat_tools as fct
fct.get_sku_size(
workspace = ''
)
workspace str
Optional; The workspace where the semantic model resides.
A string containing the SKU size for a workspace.
import fabric_cat_tools as fct
fct.import_vertipaq_analyzer(
folder_path = '/lakehouse/default/Files/VertipaqAnalyzer',
file_name = 'Workspace Name-DatasetName.zip'
)
folder_path str
Required; Folder within your lakehouse in which the .zip file containing the vertipaq analyzer info has been saved.
file_name str
Required; File name of the file which contains the vertipaq analyzer info.
import fabric_cat_tools as fct
fct.launch_report(
report = 'MyReport',
#workspace = None
)
report str
Required; The name of the report.
workspace str
Optional; The name of the workspace in which the report resides.
import fabric_cat_tools as fct
fct.list_dashboards(
#workspace = ''
)
workspace str
Optional; The workspace name.
A pandas dataframe showing the dashboards which exist in the workspace.
import fabric_cat_tools as fct
fct.list_dataflow_storage_accounts()
None
A pandas dataframe showing the accessible dataflow storage accounts.
Shows the calculated tables and their respective DAX expression for a Direct Lake model (which has been migrated from import/DirectQuery.
Note
This function is only relevant to semantic models in Direct Lake mode.
import fabric_cat_tools as fct
fct.list_direct_lake_model_calc_tables(
dataset = 'AdventureWorks',
#workspace = ''
)
dataset str
Required; Name of the semantic model.
workspace str
Optional; The workspace where the semantic model resides.
A pandas dataframe showing the calculated tables which were migrated to Direct Lake and whose DAX expressions are stored as model annotations.
import fabric_cat_tools as fct
fct.list_lakehouses(
workspace = None
)
workspaceName str
Optional; The workspace where the lakehouse resides.
A pandas dataframe showing the properties of a all lakehouses in a workspace.
import fabric_cat_tools as fct
fct.list_semantic_model_objects(
dataset = 'AdvWorks',
workspace = None
)
dataset str
Required; Name of the import/DirectQuery semantic model.
workspace str
Optional; The workspace where the semantic model resides.
A dataframe showing a list of objects in the semantic model
Shows the shortcuts within a lakehouse (note: the API behind this function is not yet available. The function will work as expected once the API is officially released)
import fabric_cat_tools as fct
fct.list_shortcuts(
lakehouse = 'MyLakehouse',
#workspace = ''
)
lakehouse str
Optional; Name of the lakehouse.
workspace str
Optional; The workspace where the lakehouse resides.
A pandas dataframe showing the shortcuts which exist in a given lakehouse and their properties.
import fabric_cat_tools as fct
fct.list_warehouses(
#workspace = None
)
workspace str
Optional; The workspace name.
A pandas dataframe showing the warehouses which exist in a given workspace and their properties.
import fabric_cat_tools as fct
fct.measure_dependency_tree(
dataset = 'AdventureWorks',
measure_name = 'Sales Amount',
#workspace = ''
)
dataset str
Required; Name of the semantic model.
measure_name str
Required; Name of the measure to use for building a dependency tree.
workspace str
Optional; The workspace where the semantic model resides.
A tree view showing the dependencies for a given measure within the semantic model.
Creates delta tables in your lakehouse based on the DAX expression of a calculated table in an import/DirectQuery semantic model. The DAX expression encapsulating the calculated table logic is stored in the new Direct Lake semantic model as model annotations.
Note
This function is specifically relevant for import/DirectQuery migration to Direct Lake
import fabric_cat_tools as fct
fct.migrate_calc_tables_to_lakehouse(
dataset = 'AdventureWorks',
new_dataset = 'AdventureWorksDL',
#workspace = '',
#new_dataset_workspace = '',
#lakehouse = '',
#lakehouse_workspace = ''
)
dataset str
Required; Name of the import/DirectQuery semantic model.
new_dataset str
Required; Name of the Direct Lake semantic model.
workspace str
Optional; The workspace where the semantic model resides.
new_dataset_workspace str
Optional; The workspace to be used by the Direct Lake semantic model.
lakehouse str
Optional; The lakehouse to be used by the Direct Lake semantic model.
lakehouse_workspace str
Optional; The workspace where the lakehouse resides.
A printout stating the success/failure of the operation.
Creates new tables in the Direct Lake semantic model based on the lakehouse tables created using the 'migrate_calc_tables_to_lakehouse' function.
Note
This function is specifically relevant for import/DirectQuery migration to Direct Lake
import fabric_cat_tools as fct
fct.migrate_calc_tables_to_semantic_model(
dataset = 'AdventureWorks',
new_dataset = 'AdventureWorksDL',
#workspace = '',
#new_dataset_workspace = '',
#lakehouse = '',
#lakehouse_workspace = ''
)
dataset str
Required; Name of the import/DirectQuery semantic model.
new_dataset str
Required; Name of the Direct Lake semantic model.
workspace str
Optional; The workspace where the semantic model resides.
new_dataset_workspace str
Optional; The workspace to be used by the Direct Lake semantic model.
lakehouse str
Optional; The lakehouse to be used by the Direct Lake semantic model.
lakehouse_workspace str
Optional; The workspace where the lakehouse resides.
A printout stating the success/failure of the operation.
Note
This function is specifically relevant for import/DirectQuery migration to Direct Lake
import fabric_cat_tools as fct
fct.migrate_field_parameters(
dataset = 'AdventureWorks',
new_dataset = '',
#workspace = '',
#new_dataset_workspace = ''
)
dataset str
Required; Name of the import/DirectQuery semantic model.
new_dataset str
Required; Name of the Direct Lake semantic model.
workspace str
Optional; The workspace where the semantic model resides.
new_dataset_workspace str
Optional; The workspace to be used by the Direct Lake semantic model.
A printout stating the success/failure of the operation.
Adds the rest of the model objects (besides tables/columns) and their properties to a Direct Lake semantic model based on an import/DirectQuery semantic model.
Note
This function is specifically relevant for import/DirectQuery migration to Direct Lake
import fabric_cat_tools as fct
fct.migrate_model_objects_to_semantic_model(
dataset = 'AdventureWorks',
new_dataset = '',
#workspace = '',
#new_dataset_workspace = ''
)
dataset str
Required; Name of the import/DirectQuery semantic model.
new_dataset str
Required; Name of the Direct Lake semantic model.
workspace str
Optional; The workspace where the semantic model resides.
new_dataset_workspace str
Optional; The workspace to be used by the Direct Lake semantic model.
A printout stating the success/failure of the operation.
Adds tables/columns to the new Direct Lake semantic model based on an import/DirectQuery semantic model.
Note
This function is specifically relevant for import/DirectQuery migration to Direct Lake
import fabric_cat_tools as fct
fct.migrate_tables_columns_to_semantic_model(
dataset = 'AdventureWorks',
new_dataset = 'AdventureWorksDL',
#workspace = '',
#new_dataset_workspace = '',
#lakehouse = '',
#lakehouse_workspace = ''
)
dataset str
Required; Name of the import/DirectQuery semantic model.
new_dataset str
Required; Name of the Direct Lake semantic model.
workspace str
Optional; The workspace where the semantic model resides.
new_dataset_workspace str
Optional; The workspace to be used by the Direct Lake semantic model.
lakehouse str
Optional; The lakehouse to be used by the Direct Lake semantic model.
lakehouse_workspace str
Optional; The workspace where the lakehouse resides.
A printout stating the success/failure of the operation.
Shows the objects in the original semantic model and whether then were migrated successfully or not.
import fabric_cat_tools as fct
fct.migration_validation(
dataset = 'AdvWorks',
new_dataset = 'AdvWorksDL',
workspace = None,
new_dataset_workspace = None
)
dataset str
Required; Name of the import/DirectQuery semantic model.
new_dataset str
Required; Name of the Direct Lake semantic model.
workspace str
Optional; The workspace where the semantic model resides.
new_dataset_workspace str
Optional; The workspace to be used by the Direct Lake semantic model.
A dataframe showing a list of objects and whether they were successfully migrated. Also shows the % of objects which were migrated successfully.
Shows the default Best Practice Rules for the semantic model used by the run_model_bpa function
import fabric_cat_tools as fct
fct.model_bpa_rules()
A pandas dataframe showing the default semantic model best practice rules.
Runs the OPTIMIZE function over the specified lakehouse tables.
import fabric_cat_tools as fct
fct.optimize_lakehouse_tables(
tables = ['Sales', 'Calendar'],
#lakehouse = None,
#workspace = None
)
import fabric_cat_tools as fct
fct.optimize_lakehouse_tables(
tables = None,
#lakehouse = 'MyLakehouse',
#workspace = None
)
Required; Name(s) of the lakehouse delta table(s) to optimize. If 'None' is entered, all of the delta tables in the lakehouse will be queued to be optimized.
lakehouse str
Optional; Name of the lakehouse.
workspace str
Optional; The workspace where the lakehouse resides.
A printout stating the success/failure of the operation.
Recreates the delta tables in the lakehouse based on the DAX expressions stored as model annotations in the Direct Lake semantic model.
Note
This function is only relevant to semantic models in Direct Lake mode.
import fabric_cat_tools as fct
fct.refresh_calc_tables(
dataset = 'AdventureWorks',
#workspace = None
)
dataset str
Required; Name of the semantic model.
workspace str
Optional; The workspace where the semantic model resides.
A printout stating the success/failure of the operation.
import fabric_cat_tools as fct
fct.refresh_semantic_model(
dataset = 'AdventureWorks',
refresh_type = 'full',
workspace = None
)
import fabric_cat_tools as fct
fct.refresh_semantic_model(
dataset = 'AdventureWorks',
tables = ['Sales', 'Geography'],
workspace = None
)
import fabric_cat_tools as fct
fct.refresh_semantic_model(
dataset = 'AdventureWorks',
partitions = ["'Sales'[Sales - 2024]", "'Sales'[Sales - 2023]"],
workspace = None
)
import fabric_cat_tools as fct
fct.refresh_semantic_model(
dataset = 'AdventureWorks',
tables = ['Geography'],
partitions = ["'Sales'[Sales - 2024]", "'Sales'[Sales - 2023]"],
workspace = None
)
dataset str
Required; Name of the semantic model. If no tables/partitions are specified, the entire semantic model is refreshed.
Optional; Tables to refresh.
Optional; Partitions to refresh. Must be in "'Table'[Partition]" format.
refresh_type str
Optional; Type of processing to perform. Options: ('full', 'automatic', 'dataOnly', 'calculate', 'clearValues', 'defragment'). Default value: 'full'.
retry_count int
Optional; Number of retry attempts. Default is 0.
workspace str
Optional; The workspace where the semantic model resides.
A printout stating the success/failure of the operation.
import fabric_cat_tools as fct
fct.report_rebind(
report = '',
dataset = '',
#report_workspace = '',
#dataset_workspace = ''
)
report str
Required; Name of the report.
dataset str
Required; Name of the semantic model to rebind to the report.
report_workspace str
Optional; The workspace where the report resides.
dataset_workspace str
Optional; The workspace where the semantic model resides.
A printout stating the success/failure of the operation.
Rebinds all reports in a workspace which are bound to a specific semantic model to a new semantic model.
import fabric_cat_tools as fct
fct.report_rebind_all(
dataset = '',
new_dataset = '',
#dataset_workspace = '' ,
#new_dataset_workspace = '' ,
#report_workspace = ''
)
dataset str
Required; Name of the semantic model currently binded to the reports.
new_dataset str
Required; Name of the semantic model to rebind to the reports.
dataset_workspace str
Optional; The workspace where the original semantic model resides.
new_dataset_workspace str
Optional; The workspace where the new semantic model resides.
report_workspace str
Optional; The workspace where the reports reside.
A printout stating the success/failure of the operation.
import fabric_cat_tools as fct
fct.resolve_lakehouse_name(
lakehouse_id = '',
#workspace = ''
)
lakehouse_id UUID
Required; UUID object representing a lakehouse.
workspace str
Optional; The workspace where the lakehouse resides.
A string containing the lakehouse name.
import fabric_cat_tools as fct
fct.resolve_lakehouse_id(
lakehouse = 'MyLakehouse',
#workspace = ''
)
lakehouse str
Required; Name of the lakehouse.
workspace str
Optional; The workspace where the lakehouse resides.
A string conaining the lakehouse ID.
import fabric_cat_tools as fct
fct.resolve_dataset_id(
dataset = 'MyReport',
#workspace = ''
)
datasetName str
Required; Name of the semantic model.
workspaceName str
Optional; The workspace where the semantic model resides.
A string containing the semantic model ID.
import fabric_cat_tools as fct
fct.resolve_dataset_name(
dataset_id = '',
#workspace = ''
)
dataset_id UUID
Required; UUID object representing a semantic model.
workspace str
Optional; The workspace where the semantic model resides.
A string containing the semantic model name.
import fabric_cat_tools as fct
fct.resolve_report_id(
report = 'MyReport',
#workspace = ''
)
report str
Required; Name of the report.
workspace str
Optional; The workspace where the report resides.
A string containing the report ID.
import fabric_cat_tools as fct
fct.resolve_report_name(
report_id = '',
#workspace = ''
)
report_id UUID
Required; UUID object representing a report.
workspace str
Optional; The workspace where the report resides.
A string containing the report name.
import fabric_cat_tools as fct
fct.run_dax(
dataset = 'AdventureWorks',
dax_query = 'Internet Sales',
user_name = '[email protected]',
#workspace = None
)
dataset str
Required; Name of the semantic model.
dax_query str
Required; The DAX query to be executed.
user_name str
Optional; The workspace where the semantic model resides.
workspace str
Optional; The workspace where the semantic model resides.
A pandas dataframe with the results of the DAX query.
import fabric_cat_tools as fct
fct.run_model_bpa(
dataset = 'AdventureWorks',
#workspace = None
)
dataset str
Required; Name of the semantic model.
rules_dataframe
Optional; A pandas dataframe including rules to be analyzed.
workspace str
Optional; The workspace where the semantic model resides.
return_dataframe bool
Optional; Returns a pandas dataframe instead of the visualization.
export bool
Optional; Exports the results to a delta table in the lakehouse.
A visualization showing objects which violate each Best Practice Rule by rule category.
Severity: Info: ℹ️ Warning:
import fabric_cat_tools as fct
fct.save_as_delta_table(
dataframe = df,
delta_table_name = 'MyNewTable',
write_mode = 'overwrite',
lakehouse = None,
workspace = None
)
import fabric_cat_tools as fct
fct.save_as_delta_table(
dataframe = df,
delta_table_name = 'MyNewTable',
write_mode = 'append',
lakehouse = None,
workspace = None
)
dataframe DataFrame
Required; The dataframe to save as a delta table.
delta_table_name str
Required; The name of the delta table to save the dataframe.
write_mode str
Required; Options: 'append' or 'overwrite'.
lakehouse str
Optional: The name of the lakehouse in which the delta table will be saved. Defaults to the default lakehouse attached to the notebook.
workspace str
Optional; The workspace where the lakehouse resides. Defaults to the workspace in which the notebook resides.
A printout stating the success/failure of the operation.
Returns a list of a semantic model's objects which are not supported by Direct Lake based on official documentation.
import fabric_cat_tools as fct
fct.show_unsupported_direct_lake_objects(
dataset = 'AdventureWorks',
#workspace = None
)
dataset str
Required; Name of the semantic model.
workspace str
Optional; The workspace where the semantic model resides.
3 pandas dataframes showing objects (tables/columns/relationships) within the semantic model which are currently not supported by Direct Lake mode.
import fabric_cat_tools as fct
fct.translate_semantic_model(
dataset = 'AdventureWorks',
languages = ['it-IT', 'fr-FR'],
#workspace = None
)
import fabric_cat_tools as fct
fct.translate_semantic_model(
dataset = 'AdventureWorks',
languages = ['it_IT', 'fr-FR'],
exclude_characters = '_-',
#workspace = None
)
dataset str
Required; Name of the semantic model.
Required; Language code(s) to translate.
exclude_characters str
Optional; Any character in this string will be replaced by a space when given to the AI translator.
workspace str
Optional; The workspace where the semantic model resides.
A printout stating the success/failure of the operation.
Note
This function is only relevant to semantic models in Direct Lake mode.
import fabric_cat_tools as fct
fct.update_direct_lake_model_lakehouse_connection(
dataset = '',
#lakehouse = '',
#workspace = ''
)
dataset str
Required; Name of the semantic model.
lakehouse str
Optional; Name of the lakehouse.
workspace str
Optional; The workspace where the semantic model resides.
lakehouse_workspace str
Optional; The workspace where the lakehouse resides.
A printout stating the success/failure of the operation.
Note
This function is only relevant to semantic models in Direct Lake mode.
import fabric_cat_tools as fct
fct.update_direct_lake_partition_entity(
dataset = 'AdventureWorks',
table_name = 'Internet Sales',
entity_name = 'FACT_InternetSales',
#workspace = '',
#lakehouse = '',
#lakehouse_workspace = ''
)
import fabric_cat_tools as fct
fct.update_direct_lake_partition_entity(
dataset = 'AdventureWorks',
table_name = ['Internet Sales', 'Geography'],
entity_name = ['FACT_InternetSales', 'DimGeography'],
#workspace = '',
#lakehouse = '',
#lakehouse_workspace = ''
)
dataset str
Required; Name of the semantic model.
Required; Name of the table in the semantic model.
entity_name str or list of str
Required; Name of the lakehouse table to be mapped to the semantic model table.
workspace str
Optional; The workspace where the semantic model resides.
lakehouse str
Optional; Name of the lakehouse.
lakehouse_workspace str
Optional; The workspace where the lakehouse resides.
A printout stating the success/failure of the operation.
import fabric_cat_tools as fct
fct.update_item(
item_type = 'Lakehouse',
current_name = 'MyLakehouse',
new_name = 'MyNewLakehouse',
#description = 'This is my new lakehouse',
#workspace = None
)
item_type str
Required; Type of item to update. Valid options: 'DataPipeline', 'Eventstream', 'KQLDatabase', 'KQLQueryset', 'Lakehouse', 'MLExperiment', 'MLModel', 'Notebook', 'Warehouse'.
current_name str
Required; Current name of the item.
new_name str
Required; New name of the item.
description str
Optional; New description of the item.
workspace str
Optional; The workspace where the item resides.
A printout stating the success/failure of the operation.
import fabric_cat_tools as fct
fct.vertipaq_analyzer(
dataset = 'AdventureWorks',
#workspace = '',
export = None
)
import fabric_cat_tools as fct
fct.vertipaq_analyzer(
dataset = 'AdventureWorks',
#workspace = '',
export = 'zip'
)
import fabric_cat_tools as fct
fct.vertipaq_analyzer(
dataset = 'AdventureWorks',
#workspace = '',
export = 'table'
)
dataset str
Required; Name of the semantic model.
workspace str
Optional; The workspace where the semantic model resides.
export str
Optional; Specifying 'zip' will export the results to a zip file in your lakehouse (which can be imported using the import_vertipaq_analyzer function. Specifying 'table' will export the results to delta tables (appended) in your lakehouse. Default value: None.
lakehouse_workspace str
Optional; The workspace in which the lakehouse used by a Direct Lake semantic model resides.
read_stats_from_data bool
Optional; Setting this parameter to true has the function get Column Cardinality and Missing Rows using DAX (Direct Lake semantic models achieve this using a Spark query to the lakehouse).
A visualization of the Vertipaq Analyzer statistics.
Warms the cache of a Direct Lake semantic model by running a simple DAX query against the columns in a perspective
Note
This function is only relevant to semantic models in Direct Lake mode.
import fabric_cat_tools as fct
fct.warm_direct_lake_cache_perspective(
dataset = 'AdventureWorks',
perspective = 'WarmCache',
add_dependencies = True,
#workspace = None
)
dataset str
Required; Name of the semantic model.
perspective str
Required; Name of the perspective which contains objects to be used for warming the cache.
add_dependencies bool
Optional; Includes object dependencies in the cache warming process.
workspace str
Optional; The workspace where the semantic model resides.
A printout stating the success/failure of the operation.
Performs a refresh on the semantic model and puts the columns which were in memory prior to the refresh back into memory.
Note
This function is only relevant to semantic models in Direct Lake mode.
import fabric_cat_tools as fct
fct.warm_direct_lake_cache_isresident(
dataset = 'AdventureWorks',
#workspace = None
)
dataset str
Required; Name of the semantic model.
workspace str
Optional; The workspace where the semantic model resides.
A printout stating the success/failure of the operation.
with connect_semantic_model(dataset ='AdventureWorks', workspace = None, readonly = True) as tom:
with connect_semantic_model(dataset ='AdventureWorks', workspace = None, readonly = False) as tom:
dataset str
Required; The name of the semantic model.
workspace str
Optional; The name of the workspace in which the semantic model resides. Defaults to the workspace in which the notebook resides.
readonly bool
Optional; Setting this to true uses a read only mode of TOM. Setting this to false enables read/write and saves any changes made to the semantic model. Default value: True.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.add_calculated_column(
table_name = 'Segment',
column_name = 'Business Segment',
expression = '',
data_type = 'String'
)
table_name str
Required; The name of the table where the column will be added.
column_name str
Required; The name of the calculated column.
expression str
Required; The DAX expression for the calculated column.
data_type str
Required; The data type of the calculated column.
format_string str
Optional; The formats strinf for the column.
hidden bool
Optional; Sets the column to be hidden if True. Default value: False.
description str
Optional; The description of the column.
display_folder str
Optional; The display folder for the column.
data_category str
Optional; The data category of the column.
key bool
Optional; Marks the column as the primary key of the table. Default value: False.
summarize_by str
Optional; Sets the value for the Summarize By property of the column.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.add_calculated_table(
name = 'Segment',
expression = ''
)
name str
Required; The name of the table.
expression str
Required; The DAX expression for the table.
description str
Optional; The description of the table.
data_category str
Optional; The data category of the table.
hidden bool
Optional; Sets the table to be hidden if True. Default value: False.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.add_calculated_table_column(
table_name = 'Segment',
column_name = 'Business Segment',
source_column = '',
data_type = 'String'
)
table_name str
Required; The name of the table in which the column will reside.
column_name str
Required; The name of the column.
source_column str
Required; The source column for the column.
data_type str
Required; The data type of the column.
format_string str
Optional; The format string of the column.
hidden bool
Optional; Sets the column to be hidden if True. Default value: False.
description str
Optional; The description of the column.
display_folder str
Optional; The display folder for the column.
data_category str
Optional; The data category of the column.
key bool
Optional; Marks the column as the primary key of the table. Default value: False.
summarize_by str
Optional; Sets the value for the Summarize By property of the column.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.add_calculation_group(
name = 'Segment',
precedence = 1
)
name str
Required; The name of the calculation group.
precedence int
Optional;
description str
Optional; The description of the calculation group.
hidden bool
Optional; Sets the calculation group to be hidden if True. Default value: False.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.add_calculation_item(
table_name = 'Segment',
calculation_item_name = 'YTD'
expression = "CALCULATE(SELECTEDMEASURE(), DATESYTD('Date'[Date]))"
)
table_name str
Required; The name of the table.
calculation_item_name str
Required; The name of the calculation item.
expression str
Required; The DAX expression encapsulating the logic of the calculation item.
ordinal int
Optional;
format_string_expression str
Optional;
description str
Optional; The description of the calculation item.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.add_data_column(
table_name = 'Segment',
column_name = 'Business Segment',
source_column = '',
data_type = 'String'
)
table_name str
Required; The name of the table in which the column will exist.
column_name str
Required; The name of the column.
source_column str
Required; The name of the column in the source.
data_type str
Required; The data type of the column.
format_string str
Optional; The format string of the column.
hidden bool
Optional; Sets the column to be hidden if True. Default value: False.
description str
Optional; The description of the column.
display_folder str
Optional; The display folder for the column.
data_category str
Optional; The data category of the column.
key bool
Optional; Marks the column as the primary key of the table. Default value: False.
summarize_by str
Optional; Sets the value for the Summarize By property of the column.
Adds an entity partition to a table in a semantic model. Entity partitions are used for tables within Direct Lake semantic models.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.add_entity_partition(
table_name = 'Sales',
entity_name = 'Fact_Sales'
)
table_name str
Required; The name of the table in which to place the entity partition.
entity_name str
Required; The name of the lakehouse table.
expression str
Optional; The expression to use for the partition. This defaults to using the existing 'DatabaseQuery' expression within the Direct Lake semantic model.
description str
Optional; The description of the partition.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.add_expression(
name = 'DatabaseQuery',
expression = 'let...'
)
name str
Required; The name of the expression.
expression str
Required; The M-code encapsulating the logic for the expression.
description str
Optional; The description of the expression.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.add_field_parameter(
table_name = 'Segment',
objects = ["'Product'[Product Category]", "[Sales Amount]", "'Geography'[Country]"]
)
table_name str
Required; The name of the field parameter.
Required; A list of columns/tables to place in the field parameter. Columns must be fully qualified (i.e. "'Table Name'[Column Name]" and measures must be unqualified (i.e. "[Measure Name]").
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.add_hierarchy(
table_name = 'Geography',
hierarchy_name = 'Geo Hierarchy',
columns = ['Continent', 'Country', 'City']
)
table_name str
Required; The name of the table in which the hierarchy will reside.
hierarchy_name str
Required; The name of the hierarchy.
Required; A list of columns to use in the hierarchy. Must be ordered from the top of the hierarchy down (i.e. ["Continent", "Country", "City"]).
Optional; A list of levels to use in the hierarchy. These will be the displayed name (instead of the column names). If omitted, the levels will default to showing the column names.
hierarchy_description str
Optional; The description of the hierarchy.
hierarchy_hidden bool
Optional; Sets the hierarchy to be hidden if True. Default value: False.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.add_m_partiiton(
table_name = 'Segment',
partition_name = 'Segment',
expression = 'let...',
mode = 'Import'
)
table_name str
Required; The name of the table in which the partition will reside.
partition_name str
Required; The name of the M partition.
expression str
Required; The M-code encapsulating the logic of the partition.
mode str
Optional; The storage mode for the partition. Default value: 'Import'.
description str
Optional; The description of the partition.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.add_measure(
table_name = 'Sales',
measure_name = 'Sales Amount',
expression = "SUM('Sales'[SalesAmount])",
format_string = '$,00'
)
table_name str
Required; The name of the table in which the measure will reside.
measure_name str
Required; The name of the measure.
expression str
Required; The DAX expression encapsulating the logic of the measure.
format_string str
Optional; The format string of the measure.
hidden bool
Optional; Sets the measure to be hidden if True. Default value: False.
description str
Optional; The description of the measure.
display_folder str
Optional; The display folder for the measure.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.add_perspective(
perspective_name = 'Marketing'
)
perspective_name str
Required; The name of the perspective.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.add_relationship(
from_table = 'Sales',
from_column = 'ProductKey',
to_table = 'Product',
to_column = 'ProductKey',
from_cardinality = 'Many',
to_cardinality = 'One',
is_active = True
)
from_table str
Required; The name of the table on the 'from' side of the relationship.
from_column str
Required; The name of the column on the 'from' side of the relationship.
to_table str
Required; The name of the table on the 'to' side of the relationship.
to_column str
Required; The name of the column on the 'to' side of the relationship.
from_cardinality str
Required; The cardinality of the 'from' side of the relationship. Options: ['Many', 'One', 'None'].
to_cardinality str
Required; The cardinality of the 'to' side of the relationship. Options: ['Many', 'One', 'None'].
cross_filtering_behavior str
Optional; Setting for the cross filtering behavior of the relationship. Options: ('Automatic', 'OneDirection', 'BothDirections'). Default value: 'Automatic'.
is_active bool
Optional; Setting for whether the relationship is active or not. Default value: True.
security_filtering_behavior str
Optional; Setting for the security filtering behavior of the relationship. Options: ('None', 'OneDirection', 'BothDirections'). Default value: 'OneDirection'.
rely_on_referential_integrity bool
Optional; ; Setting for the rely on referential integrity of the relationship. Default value: False.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.add_role(
role_name = 'Reader'
)
role_name str
Required; The name of the role.
model_permission str
Optional; The model permission of the role. Default value: 'Reader'.
description str
Optional; The description of the role.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.add_table(
name = 'Sales',
description = 'This is the sales table.',
hidden = False
)
name str
Required; The name of the table.
description str
Optional; The descrition of the table.
data_category str
Optional; The data category of the table.
hidden bool
Optional; Sets the table to be hidden if True. Default value: False.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.add_to_perspective(
object = tom.model.Tables['Sales'].Measures['Sales Amount'],
perspective_name = 'Marketing'
)
object
Required; The TOM object.
perspective_name str
Required; The name of the perspective.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.add_translation(
language = 'it-IT'
)
language str
Required; The language code to add to the semantic model.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
for c in tom.all_calculation_items():
print(c.Name)
None
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
for c in tom.all_columns():
print(c.Name)
None
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
for h in tom.all_hierarchies():
print(h.Name)
None
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
for l in tom.all_levels():
print(l.Name)
None
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
for m in tom.all_measures():
print(m.Name)
None
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
for p in tom.all_partitions():
print(p.Name)
None
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
for r in tom.all_rls():
print(r.Name)
None
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
tom.cardinality(object = tom.model.Tables['Product'].Columns['Color'])
column
Required; The TOM column object.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.clear_annotations(object = tom.model.Tables['Product'].Columns['Color'])
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.clear_annotations(object = tom.model.Tables['Product'])
object
Required; The TOM object.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_smantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.clear_extened_properties(object = tom.model.Tables['Product'].Columns['Color'])
object
Required; The TOM object.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
tom.data_size(column = tom.model.Tables['Product'].Columns['Color'])
column
Required; The TOM column object.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
dep = fct.get_model_calc_dependencies(dataset = 'AdventureWorks', workspace = None)
tom.depends_on(
object = tom.model.Tables['Product'].Columns['Color'],
dependencies = dep
)
object
Required; The TOM object.
dependencies
Required; A dataframe showing the model's calculation dependencies.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
tom.dictionary_size(column = tom.model.Tables['Product'].Columns['Color'])
column
Required; The TOM column object.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
dep = fct.get_model_calc_dependencies(dataset = 'AdventureWorks', workspace = None)
tom.fully_qualified_measuress(
object = tom.model.Tables['Product'].Columns['Color'],
dependencies = dep
)
object
Required; The TOM object.
dependencies
Required; A dataframe showing the model's calculation dependencies.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
tom.get_annotation_value(
object = tom.model.Tables['Product'].Columns['Color'],
name = 'MyAnnotation'
)
object
Required; The TOM object.
name str
Required; The name of the annotation.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
tom.get_annotations(
object = tom.model.Tables['Product'].Columns['Color']
)
object
Required; The TOM object.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
tom.get_extended_properties(
object = tom.model.Tables['Product'].Columns['Color']
)
object
Required; The TOM object.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
tom.get_extended_property_value(
object = tom.model.Tables['Product'].Columns['Color'],
name = 'MyExtendedProperty'
)
object
Required; The TOM object.
name str
Required; The name of the extended property.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
tom.in_perspective(
object = tom.model.Tables['Product'].Columns['Color'],
perspective_name = 'Marketing'
)
object
Required; The TOM object.
perspective_name str
Required; The name of the perspective.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
print(tom.is_direct_lake())
None
True/False
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
print(tom.is_field_parameter(
table_name = 'Parameter'
))
table_name str
Required; The name of the table.
True/False
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
tom.records_per_segment(
object = tom.model.Tables['Sales'].Partitions['Sales - 2024']
)
object
Required; The TOM object.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
dep = fct.get_model_calc_dependencies(dataset = 'AdventureWorks', workspace = None)
tom.referenced_by(
object = tom.model.Tables['Product'].Columns['Color'],
dependencies = dep
)
object
Required; The TOM object.
dependencies
Required; A dataframe showing the model's calculation dependencies.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.remove_annotation(
object = tom.model.Tables['Product'].Columns['Color'],
name = 'MyAnnotation'
)
object
Required; The TOM object.
name str
Required; The name of the annotation.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.remove_extended_property(
object = tom.model.Tables['Product'].Columns['Color'],
name = 'MyExtendedProperty'
)
object
Required; The TOM object.
name str
Required; The name of the extended property.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.remove_from_perspective(
object = tom.model.Tables['Product'].Columns['Color'],
perspective_name = 'Marketing'
)
object
Required; The TOM object.
perspective_name str
Required; The name of the perspective.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.remove_object(
object = tom.model.Tables['Product'].Columns['Color']
)
object
Required; The TOM object.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.remove_translation(
object = tom.model.Tables['Product'].Columns['Color'],
language = 'it-IT'
)
object
Required; The TOM object.
language str
Required; The language code.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.remove_vertipaq_annotations()
None
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
tom.row_count(
object = tom.model.Tables['Product']
)
object
Required; The TOM object.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.set_annotation(
object = tom.model.Tables['Product'].Columns['Color'],
name = 'MyAnnotation',
value = '1'
)
object
Required; The TOM object.
name str
Required; The annotation name.
value str
Required; The annotation value.
Sets the DirectLakeBehavior property for a Direct Lake semantic model.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.set_direct_lake_behavior(
direct_lake_behavior = 'DirectLakeOnly'
)
direct_lake_behavior str
Required; The DirectLakeBehavior value.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.set_extended_property(
object = tom.model.Tables['Product'].Columns['Color'],
type = 'Json',
name = 'MyExtendedProperty',
value = '{...}'
)
object
Required; The TOM object.
extended_property_type str
Required; The type of extended property to set. Options: ['Json', 'String'].
name str
Required; The name of the extended property.
value str
Required; The value of the extended property.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.set_is_available_in_mdx(
table_name = 'Sales',
column_name = 'SalesAmount',
value = False
)
table_name str
Required; The name of the table in which the column resides.
column_name str
Required; The name of the column.
value bool
Required; The value to set for the IsAvailableInMDX property.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.set_ols(
role_name = 'Reader'
table_name = 'Geography',
column_name = 'Country',
permission = 'None'
)
role_name str
Required; The name of the role.
table_name str
Required; The name of the table.
column_name str
Required; The name of the column.
permission str
Required; The permission for a given column. Options: ['Read', 'None', 'Default'].
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.set_rls(
role_name = 'Reader'
table_name = 'UserGeography',
filter_expression = "'UserGeography'[UserEmail] = USERPRINCIPALNAME()"
)
role_name str
Required; The name of the role.
table_name str
Required; The name of the table to place row level security.
filter_expression str
Required; The DAX expression containing the row level security logic.
Sets the Summarize By property on a column in a semantic model.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.set_summarize_by(
table_name = 'Geography',
column_name = 'Country',
value = 'None'
)
table_name str
Required; The name of the table in which the column resides.
column_name str
Required; The name of the column.
value str
Required; The summarize by property of the column.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.set_translation(
object = tom.model.Tables['Geography']
language = 'it-IT'
property = 'Name'
value = 'Geografia'
)
object
Required; The TOM object.
language str
Required; The language code in which to translate the object property.
property str
Required; The property to translate. One of the following values: ['Name', 'Description', 'Display Folder'].
value str
Required; The translation value.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = False) as tom:
tom.set_vertipaq_annotations()
None
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
tom.total_size(
object = tom.model.Tables['Sales'].Columns['SalesAmount']
)
object
Required; The TOM object.
The total size (in bytes) of the object.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
dep = fct.get_model_calc_dependencies(dataset = 'AdventureWorks', workspace = None)
tom.unqualified_columns(
object = tom.model.Tables['Product'].Columns['Color'],
dependencies = dep
)
object
Required; The TOM object.
dependencies
Required; A dataframe showing the model's calculation dependencies.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
dep = fct.get_model_calc_dependencies(dataset = 'AdventureWorks', workspace = None)
tom.used_in_calc_item(
object = tom.model.Tables['Product'].Columns['Color'],
dependencies = dep
)
object
Required; The TOM object.
dependencies
Required; A dataframe showing the model's calculation dependencies.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
tom.used_in_hierarchies(
column = tom.model.Tables['Geography'].Columns['City']
)
column
Required; The TOM column object.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
tom.used_in_levels(
column = tom.model.Tables['Geography'].Columns['City']
)
column
Required; The TOM column object.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
tom.used_in_relationships(
object = tom.model.Tables['Geography'].Columns['GeographyID']
)
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
tom.used_in_relationships(
object = tom.model.Tables['Geography']
)
object
Required; The TOM object.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
dep = fct.get_model_calc_dependencies(dataset = 'AdventureWorks', workspace = None)
tom.used_in_rls(
object = tom.model.Tables['Product'].Columns['Color'],
dependencies = dep
)
object
Required; The TOM object.
dependencies
Required; A dataframe showing the model's calculation dependencies.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
tom.used_in_sort_by(
column = tom.model.Tables['Geography'].Columns['City']
)
column
Required; The TOM column object.
import fabric_cat_tools as fct
from fabric_cat_tools.TOM import connect_semantic_model
with connect_semantic_model(dataset = 'AdventureWorks', workspace = None, readonly = True) as tom:
tom.used_size(
object = tom.model.Tables['Geography'].Hierarchies['Geo Hierarchy']
)
object
Required; The TOM object.
Fabric CAT Tools has been decomissioned. Please Semantic Link Labs going forward.