Table¶
Contained within this file are experimental interfaces for working with the Synapse Python Client. Unless otherwise noted these interfaces are subject to change at any time. Use at your own risk.
Example Script¶
Working with tables
"""The purpose of this script is to demonstrate how to use the new OOP interface for tables.
The following actions are shown in this script:
1. Creating a table
2. Storing a table
3. Getting a table
4. Storing rows in a table
5. Querying for data from a table
6. Deleting a row from a table
7. Deleting a table
"""
import csv
import os
import random
import string
from datetime import date, datetime, timedelta, timezone
import synapseclient
from synapseclient.models import Column, ColumnType, Table
PROJECT_ID = "syn52948289"
ROWS_TO_WRITE = 10
syn = synapseclient.Synapse(debug=True)
syn.login()
def write_random_csv_with_data(path: str):
randomized_data_columns = {
"my_string_column": str,
"my_integer_column": int,
"my_double_column": float,
"my_boolean_column": bool,
}
# Generate randomized data
data = {}
for name, type in randomized_data_columns.items():
if type == int:
data[name] = [random.randint(0, 100) for _ in range(ROWS_TO_WRITE + 1)]
elif type == float:
data[name] = [random.uniform(0, 100) for _ in range(ROWS_TO_WRITE + 1)]
elif type == bool:
data[name] = [bool(random.getrandbits(1)) for _ in range(ROWS_TO_WRITE + 1)]
elif type == str:
data[name] = [
"".join(random.choices(string.ascii_uppercase + string.digits, k=5))
for _ in range(ROWS_TO_WRITE + 1)
]
with open(path, "w", newline="", encoding="utf-8") as csvfile:
writer = csv.writer(csvfile)
# Write column names
writer.writerow(data.keys())
# Write data
for i in range(ROWS_TO_WRITE + 1):
writer.writerow([values[i] for values in data.values()])
def store_table():
# Creating annotations for my table ==================================================
annotations_for_my_table = {
"my_single_key_string": "a",
"my_key_string": ["b", "a", "c"],
"my_key_bool": [False, False, False],
"my_key_double": [1.2, 3.4, 5.6],
"my_key_long": [1, 2, 3],
"my_key_date": [date.today(), date.today() - timedelta(days=1)],
"my_key_datetime": [
datetime.today(),
datetime.today() - timedelta(days=1),
datetime.now(tz=timezone(timedelta(hours=-5))),
datetime(2023, 12, 7, 13, 0, 0, tzinfo=timezone(timedelta(hours=0))),
datetime(2023, 12, 7, 13, 0, 0, tzinfo=timezone(timedelta(hours=-7))),
],
}
# Creating columns for my table ======================================================
columns = [
Column(id=None, name="my_string_column", column_type=ColumnType.STRING),
Column(id=None, name="my_integer_column", column_type=ColumnType.INTEGER),
Column(id=None, name="my_double_column", column_type=ColumnType.DOUBLE),
Column(id=None, name="my_boolean_column", column_type=ColumnType.BOOLEAN),
]
# Creating a table ===============================================================
table = Table(
name="my_first_test_table_ksidubhgfkjsdgf",
columns=columns,
parent_id=PROJECT_ID,
annotations=annotations_for_my_table,
)
table = table.store()
print("Table created:")
print(table)
# Getting a table =================================================================
copy_of_table = Table(id=table.id)
copy_of_table = copy_of_table.get()
print("Table retrieved:")
print(copy_of_table)
# Updating annotations on my table ===============================================
copy_of_table.annotations["my_key_string"] = ["new", "values", "here"]
stored_table = copy_of_table.store()
print("Table updated:")
print(stored_table)
# Storing data to a table =========================================================
name_of_csv = "my_csv_file_with_random_data"
path_to_csv = os.path.join(os.path.expanduser("~/temp"), f"{name_of_csv}.csv")
write_random_csv_with_data(path_to_csv)
copy_of_table.store_rows(values=path_to_csv)
print("Stored data to table from CSV")
# Querying for data from a table =================================================
table_id_to_query = copy_of_table.id
dataframe_from_query = Table.query(query=f"SELECT * FROM {table_id_to_query}")
print(f"Got results: {dataframe_from_query}")
# Deleting a row from the table =====================================================
copy_of_table.delete_rows(query=f"SELECT * from {table_id_to_query} LIMIT 1")
# Deleting a table ===============================================================
table_to_delete = Table(
name="my_test_table_I_want_to_delete",
parent_id=PROJECT_ID,
).store()
table_to_delete.delete()
store_table()
API Reference¶
synapseclient.models.Table
dataclass
¶
Bases: AccessControllable
, TableBase
, TableStoreRowMixin
, TableDeleteRowMixin
, DeleteMixin
, ColumnMixin
, GetMixin
, QueryMixin
, TableUpsertMixin
, TableStoreMixin
, TableSynchronousProtocol
, BaseJSONSchema
A Table represents the metadata of a table.
ATTRIBUTE | DESCRIPTION |
---|---|
id |
The unique immutable ID for this table. A new ID will be generated for new Tables. Once issued, this ID is guaranteed to never change or be re-issued |
name |
The name of this table. Must be 256 characters or less. Names may only contain: letters, numbers, spaces, underscores, hyphens, periods, plus signs, apostrophes, and parentheses |
description |
The description of this entity. Must be 1000 characters or less. |
parent_id |
The ID of the Entity that is the parent of this table. |
columns |
The columns of this table. This is an ordered dictionary where the key is the
name of the column and the value is the Column object. When creating a new instance
of a Table object you may pass any of the following types as the
The order of the columns will be the order they are stored in Synapse. If you need
to reorder the columns the recommended approach is to use the You may modify the attributes of the Column object to change the column type, name, or other attributes. For example suppose I'd like to change a column from a INTEGER to a DOUBLE. I can do so by changing the column type attribute of the Column object. The next time you store the table the column will be updated in Synapse with the new type.
Note that the keys in this dictionary should match the column names as they are in Synapse. However, know that the name attribute of the Column object is used for all interactions with the Synapse API. The OrderedDict key is purely for the usage of this interface. For example, if you wish to rename a column you may do so by changing the name attribute of the Column object. The key in the OrderedDict does not need to be changed. The next time you store the table the column will be updated in Synapse with the new name and the key in the OrderedDict will be updated.
TYPE:
|
etag |
Synapse employs an Optimistic Concurrency Control (OCC) scheme to handle concurrent updates. Since the E-Tag changes every time an entity is updated it is used to detect when a client's current representation of an entity is out-of-date. |
created_on |
The date this table was created. |
created_by |
The ID of the user that created this table. |
modified_on |
The date this table was last modified. In YYYY-MM-DD-Thh:mm:ss.sssZ format |
modified_by |
The ID of the user that last modified this table. |
version_number |
(Read Only) The version number issued to this version on the
object. Use this |
version_label |
(Read Only) The version label for this table. Use the
|
version_comment |
(Read Only) The version comment for this table. Use the
|
is_latest_version |
(Read Only) If this is the latest version of the object. |
is_search_enabled |
When creating or updating a table or view specifies if full text search should be enabled. Note that enabling full text search might slow down the indexing of the table or view. |
activity |
The Activity model represents the main record of Provenance in Synapse. It is analygous to the Activity defined in the W3C Specification on Provenance. Activity cannot be removed during a store operation by setting it to None. You must use: synapseclient.models.Activity.delete_async or synapseclient.models.Activity.disassociate_from_entity_async. |
annotations |
Additional metadata associated with the table. The key is the name
of your desired annotations. The value is an object containing a list of
values (use empty list to represent no values for key) and the value type
associated with all values in the list. To remove all annotations set this
to an empty dict
TYPE:
|
Create a table with data without specifying columns
This API is setup to allow the data to define which columns are created on the Synapse table automatically. The limitation with this behavior is that the columns created will only be of the following types:
- STRING
- LARGETEXT
- INTEGER
- DOUBLE
- BOOLEAN
- DATE
The determination of the column type is based on the data that is passed in using the pandas function infer_dtype. If you need a more specific column type, or need to add options to the colums follow the examples below.
import pandas as pd
from synapseclient import Synapse
from synapseclient.models import Table, SchemaStorageStrategy
syn = Synapse()
syn.login()
my_data = pd.DataFrame(
{
"my_string_column": ["a", "b", "c", "d"],
"my_integer_column": [1, 2, 3, 4],
"my_double_column": [1.0, 2.0, 3.0, 4.0],
"my_boolean_column": [True, False, True, False],
}
)
table = Table(
name="my_table",
parent_id="syn1234",
).store()
table.store_rows(values=my_data, schema_storage_strategy=SchemaStorageStrategy.INFER_FROM_DATA)
# Prints out the stored data about this specific column
print(table.columns["my_string_column"])
Rename an existing column
This examples shows how you may retrieve a table from synapse, rename a column, and then store the table back in synapse.
from synapseclient import Synapse
from synapseclient.models import Table
syn = Synapse()
syn.login()
table = Table(
name="my_table",
parent_id="syn1234",
).get()
# You may also get the table by id:
table = Table(
id="syn4567"
).get()
table.columns["my_old_column"].name = "my_new_column"
# Before the data is stored in synapse you'll still be able to use the old key to access the column entry
print(table.columns["my_old_column"])
table.store()
# After the data is stored in synapse you'll be able to use the new key to access the column entry
print(table.columns["my_new_column"])
Create a table with a list of columns
A list of columns may be passed in when creating a new table. The order of the columns in the list will be the order they are stored in Synapse. If the table already exists and you create the Table instance in this way the columns will be appended to the end of the existing columns.
from synapseclient import Synapse
from synapseclient.models import Column, ColumnType, Table
syn = Synapse()
syn.login()
columns = [
Column(name="my_string_column", column_type=ColumnType.STRING),
Column(name="my_integer_column", column_type=ColumnType.INTEGER),
Column(name="my_double_column", column_type=ColumnType.DOUBLE),
Column(name="my_boolean_column", column_type=ColumnType.BOOLEAN),
]
table = Table(
name="my_table",
parent_id="syn1234",
columns=columns
)
table.store()
Creating a table with a dictionary of columns
When specifying a number of columns via a dict setting the name
attribute
on the Column
object is optional. When it is not specified it will be
pulled from the key of the dict.
from synapseclient import Synapse
from synapseclient.models import Column, ColumnType, Table
syn = Synapse()
syn.login()
columns = {
"my_string_column": Column(column_type=ColumnType.STRING),
"my_integer_column": Column(column_type=ColumnType.INTEGER),
"my_double_column": Column(column_type=ColumnType.DOUBLE),
"my_boolean_column": Column(column_type=ColumnType.BOOLEAN),
}
table = Table(
name="my_table",
parent_id="syn1234",
columns=columns
)
table.store()
Creating a table with an OrderedDict of columns
When specifying a number of columns via a dict setting the name
attribute
on the Column
object is optional. When it is not specified it will be
pulled from the key of the dict.
from collections import OrderedDict
from synapseclient import Synapse
from synapseclient.models import Column, ColumnType, Table
syn = Synapse()
syn.login()
columns = OrderedDict({
"my_string_column": Column(column_type=ColumnType.STRING),
"my_integer_column": Column(column_type=ColumnType.INTEGER),
"my_double_column": Column(column_type=ColumnType.DOUBLE),
"my_boolean_column": Column(column_type=ColumnType.BOOLEAN),
})
table = Table(
name="my_table",
parent_id="syn1234",
columns=columns
)
table.store()
Source code in synapseclient/models/table.py
918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239 1240 1241 1242 1243 1244 1245 1246 1247 1248 1249 1250 1251 1252 1253 1254 1255 1256 1257 1258 1259 1260 1261 1262 1263 1264 1265 1266 1267 1268 1269 1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307 1308 1309 1310 1311 1312 1313 1314 1315 1316 1317 1318 1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329 1330 1331 1332 1333 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343 1344 1345 1346 1347 1348 1349 1350 1351 1352 1353 1354 1355 1356 1357 1358 1359 1360 1361 1362 1363 1364 1365 1366 1367 1368 1369 1370 1371 1372 1373 1374 1375 1376 1377 1378 1379 1380 1381 1382 1383 1384 1385 1386 1387 1388 1389 1390 1391 1392 1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464 1465 1466 1467 1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482 1483 1484 1485 1486 1487 1488 1489 1490 1491 1492 1493 1494 1495 1496 1497 1498 1499 1500 1501 1502 1503 1504 1505 1506 1507 1508 1509 1510 1511 1512 1513 1514 1515 1516 1517 1518 1519 1520 1521 1522 1523 1524 1525 1526 1527 |
|
Functions¶
get
¶
get(include_columns: bool = True, include_activity: bool = False, *, synapse_client: Optional[Synapse] = None) -> Self
Get the metadata about the table from synapse.
PARAMETER | DESCRIPTION |
---|---|
include_columns
|
If True, will include fully filled column objects in the
TYPE:
|
include_activity
|
If True the activity will be included in the file if it exists. Defaults to False.
TYPE:
|
synapse_client
|
If not passed in and caching was not disabled by
|
RETURNS | DESCRIPTION |
---|---|
Self
|
The Table instance stored in synapse. |
Getting metadata about a table using id
Get a table by ID and print out the columns and activity. include_columns
defaults to True and include_activity
defaults to False. When you need to
update existing columns or activity these need to be set to True during the
get
call, then you'll make the changes, and finally call the
.store()
method.
from synapseclient import Synapse
from synapseclient.models import Table
syn = Synapse()
syn.login()
table = Table(id="syn4567").get(include_activity=True)
print(table)
# Columns are retrieved by default
print(table.columns)
print(table.activity)
Getting metadata about a table using name and parent_id
Get a table by name/parent_id and print out the columns and activity.
include_columns
defaults to True and include_activity
defaults to
False. When you need to update existing columns or activity these need to
be set to True during the get
call, then you'll make the changes,
and finally call the .store()
method.
from synapseclient import Synapse
from synapseclient.models import Table
syn = Synapse()
syn.login()
table = Table(name="my_table", parent_id="syn1234").get(include_columns=True, include_activity=True)
print(table)
print(table.columns)
print(table.activity)
Source code in synapseclient/models/table.py
76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 |
|
store
¶
Store non-row information about a table including the columns and annotations.
Note the following behavior for the order of columns:
- If a column is added via the
add_column
method it will be added at the index you specify, or at the end of the columns list. - If column(s) are added during the contruction of your
Table
instance, ie.Table(columns=[Column(name="foo")])
, they will be added at the begining of the columns list. - If you use the
store_rows
method and theschema_storage_strategy
is set toINFER_FROM_DATA
the columns will be added at the end of the columns list.
PARAMETER | DESCRIPTION |
---|---|
dry_run
|
If True, will not actually store the table but will log to the console what would have been stored.
TYPE:
|
job_timeout
|
The maximum amount of time to wait for a job to complete.
This is used when updating the table schema. If the timeout
is reached a
|
synapse_client
|
If not passed in and caching was not disabled by
|
RETURNS | DESCRIPTION |
---|---|
Self
|
The Table instance stored in synapse. |
Source code in synapseclient/models/table.py
41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 |
|
delete
¶
Delete the entity from synapse. This is not version specific. If you'd like to delete a specific version of the entity you must use the synapseclient.api.delete_entity function directly.
PARAMETER | DESCRIPTION |
---|---|
synapse_client
|
If not passed in and caching was not disabled by
|
RETURNS | DESCRIPTION |
---|---|
None
|
None |
Deleting a table
Deleting a table is only supported by the ID of the table.
from synapseclient import Synapse
syn = Synapse()
syn.login()
Table(id="syn4567").delete()
Source code in synapseclient/models/table.py
142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 |
|
query
staticmethod
¶
query(query: str, include_row_id_and_row_version: bool = True, convert_to_datetime: bool = False, download_location=None, quote_character='"', escape_character='\\', line_end=str(linesep), separator=',', header=True, *, synapse_client: Optional[Synapse] = None, **kwargs) -> Union[DATA_FRAME_TYPE, str]
Query for data on a table stored in Synapse. The results will always be
returned as a Pandas DataFrame unless you specify a download_location
in which
case the results will be downloaded to that location. There are a number of
arguments that you may pass to this function depending on if you are getting
the results back as a DataFrame or downloading the results to a file.
PARAMETER | DESCRIPTION |
---|---|
query
|
The query to run. The query must be valid syntax that Synapse can understand. See this document that describes the expected syntax of the query: https://rest-docs.synapse.org/rest/org/sagebionetworks/repo/web/controller/TableExamples.html
TYPE:
|
include_row_id_and_row_version
|
If True the
TYPE:
|
convert_to_datetime
|
(DataFrame only) If set to True, will convert all Synapse DATE columns from UNIX timestamp integers into UTC datetime objects
TYPE:
|
download_location
|
(CSV Only) If set to a path the results will be downloaded to that directory. The results will be downloaded as a CSV file. A path to the downloaded file will be returned instead of a DataFrame.
DEFAULT:
|
quote_character
|
(CSV Only) The character to use to quote fields. The default is a double quote.
DEFAULT:
|
escape_character
|
(CSV Only) The character to use to escape special characters. The default is a backslash.
DEFAULT:
|
line_end
|
(CSV Only) The character to use to end a line. The default is the system's line separator. |
separator
|
(CSV Only) The character to use to separate fields. The default is a comma.
DEFAULT:
|
header
|
(CSV Only) If set to True the first row will be used as the header row. The default is True.
DEFAULT:
|
**kwargs
|
(DataFrame only) Additional keyword arguments to pass to pandas.read_csv. See https://pandas.pydata.org/docs/reference/api/pandas.read_csv.html for complete list of supported arguments. This is exposed as internally the query downloads a CSV from Synapse and then loads it into a dataframe.
DEFAULT:
|
synapse_client
|
If not passed in and caching was not disabled by
|
RETURNS | DESCRIPTION |
---|---|
Union[DATA_FRAME_TYPE, str]
|
The results of the query as a Pandas DataFrame or a path to the downloaded |
Union[DATA_FRAME_TYPE, str]
|
query results if |
Querying for data
This example shows how you may query for data in a table and print out the results.
from synapseclient import Synapse
from synapseclient.models import query
syn = Synapse()
syn.login()
results = query(query="SELECT * FROM syn1234")
print(results)
Source code in synapseclient/models/mixins/table_components.py
2468 2469 2470 2471 2472 2473 2474 2475 2476 2477 2478 2479 2480 2481 2482 2483 2484 2485 2486 2487 2488 2489 2490 2491 2492 2493 2494 2495 2496 2497 2498 2499 2500 2501 2502 2503 2504 2505 2506 2507 2508 2509 2510 2511 2512 2513 2514 2515 2516 2517 2518 2519 2520 2521 2522 2523 2524 2525 2526 2527 2528 2529 2530 2531 2532 2533 2534 2535 2536 2537 2538 2539 2540 2541 2542 2543 2544 2545 2546 2547 2548 2549 2550 2551 2552 2553 |
|
query_part_mask
staticmethod
¶
query_part_mask(query: str, part_mask: int, *, synapse_client: Optional[Synapse] = None, **kwargs) -> QueryResultOutput
Query for data on a table stored in Synapse. This is a more advanced use case
of the query
function that allows you to determine what addiitional metadata
about the table or query should also be returned. If you do not need this
additional information then you are better off using the query
function.
The query for this method uses this Rest API: https://rest-docs.synapse.org/rest/POST/entity/id/table/query/async/start.html
PARAMETER | DESCRIPTION |
---|---|
query
|
The query to run. The query must be valid syntax that Synapse can understand. See this document that describes the expected syntax of the query: https://rest-docs.synapse.org/rest/org/sagebionetworks/repo/web/controller/TableExamples.html
TYPE:
|
part_mask
|
The bitwise OR of the part mask values you want to return in the results. The following list of part masks are implemented to be returned in the results: - Query Results (queryResults) = 0x1 - Query Count (queryCount) = 0x2 - The sum of the file sizes (sumFileSizesBytes) = 0x40 - The last updated on date of the table (lastUpdatedOn) = 0x80
TYPE:
|
synapse_client
|
If not passed in and caching was not disabled by
|
RETURNS | DESCRIPTION |
---|---|
QueryResultOutput
|
The results of the query as a Pandas DataFrame. |
Querying for data with a part mask
This example shows how to use the bitwise OR
of Python to combine the
part mask values and then use that to query for data in a table and print
out the results.
In this case we are getting the results of the query, the count of rows, and the last updated on date of the table.
from synapseclient import Synapse
from synapseclient.models import query_part_mask
syn = Synapse()
syn.login()
QUERY_RESULTS = 0x1
QUERY_COUNT = 0x2
LAST_UPDATED_ON = 0x80
# Combine the part mask values using bitwise OR
part_mask = QUERY_RESULTS | QUERY_COUNT | LAST_UPDATED_ON
result = query_part_mask(query="SELECT * FROM syn1234", part_mask=part_mask)
print(result)
Source code in synapseclient/models/mixins/table_components.py
2555 2556 2557 2558 2559 2560 2561 2562 2563 2564 2565 2566 2567 2568 2569 2570 2571 2572 2573 2574 2575 2576 2577 2578 2579 2580 2581 2582 2583 2584 2585 2586 2587 2588 2589 2590 2591 2592 2593 2594 2595 2596 2597 2598 2599 2600 2601 2602 2603 2604 2605 2606 2607 2608 2609 2610 2611 2612 2613 2614 2615 2616 2617 |
|
store_rows
¶
store_rows(values: Union[str, Dict[str, Any], DATA_FRAME_TYPE], schema_storage_strategy: SchemaStorageStrategy = None, column_expansion_strategy: ColumnExpansionStrategy = None, dry_run: bool = False, additional_changes: List[Union[TableSchemaChangeRequest, UploadToTableRequest, AppendableRowSetRequest]] = None, *, insert_size_bytes: int = 900 * MB, csv_table_descriptor: Optional[CsvTableDescriptor] = None, read_csv_kwargs: Optional[Dict[str, Any]] = None, to_csv_kwargs: Optional[Dict[str, Any]] = None, job_timeout: int = 600, synapse_client: Optional[Synapse] = None) -> None
Add or update rows in Synapse from the sources defined below. In most cases the result of this function call will append rows to the table. In the case of an update this method works on a full row replacement. What this means is that you may not do a partial update of a row. If you want to update a row you must pass in all the data for that row, or the data for the columns not provided will be set to null.
If you'd like to update a row see the example Updating rows in a table
below.
If you'd like to perform an upsert
or partial update of a row you may use
the .upsert_rows()
method. See that method for more information.
Note the following behavior for the order of columns:
- If a column is added via the
add_column
method it will be added at the index you specify, or at the end of the columns list. - If column(s) are added during the contruction of your
Table
instance, ie.Table(columns=[Column(name="foo")])
, they will be added at the begining of the columns list. - If you use the
store_rows
method and theschema_storage_strategy
is set toINFER_FROM_DATA
the columns will be added at the end of the columns list.
Limitations:
- Synapse limits the number of rows that may be stored in a single request to a CSV file that is 1GB. If you are storing a CSV file that is larger than this limit the data will be chunked into smaller requests. This process is done by reading the file once to determine what the row and byte boundries are and calculating the MD5 hash of that portion, then reading the file again to send the data to Synapse. This process is done to ensure that the data is not corrupted during the upload process, in addition Synapse requires the MD5 hash of the data to be sent in the request along with the number of bytes that are being sent.
- The limit of 1GB is also enforced when storing a dictionary or a DataFrame.
The data will be converted to a CSV format using the
.to_csv()
pandas function. If you are storing more than a 1GB file it is recommended that you store the data as a CSV and use the file path to upload the data. This is due to the fact that the DataFrame chunking process is slower than reading portions of a file on disk and calculating the MD5 hash of that portion.
The following is a Sequence Daigram that describes the process noted in the limitation above. It shows how the data is chunked into smaller requests when the data exceeds the limit of 1GB, and how portions of the data are read from the CSV file on disk while being uploaded to Synapse.
sequenceDiagram
participant User
participant Table
participant FileSystem
participant Synapse
User->>Table: store_rows(values)
alt CSV size > 1GB
Table->>Synapse: Apply schema changes before uploading
note over Table, FileSystem: Read CSV twice
Table->>FileSystem: Read entire CSV (First Pass)
FileSystem-->>Table: Compute chunk sizes & MD5 hashes
loop Read and Upload CSV chunks (Second Pass)
Table->>FileSystem: Read next chunk from CSV
FileSystem-->>Table: Return bytes
Table->>Synapse: Upload CSV chunk
Synapse-->>Table: Return `file_handle_id`
Table->>Synapse: Send 'TableUpdateTransaction' to append/update rows
Synapse-->>Table: Transaction result
end
else
Table->>Synapse: Upload CSV without splitting & Any additional schema changes
Synapse-->>Table: Return `file_handle_id`
Table->>Synapse: Send `TableUpdateTransaction' to append/update rows
Synapse-->>Table: Transaction result
end
Table-->>User: Upload complete
The following is a Sequence Daigram that describes the process noted in the limitation above for DataFrames. It shows how the data is chunked into smaller requests when the data exceeds the limit of 1GB, and how portions of the data are read from the DataFrame while being uploaded to Synapse.
sequenceDiagram
participant User
participant Table
participant MemoryBuffer
participant Synapse
User->>Table: store_rows(DataFrame)
loop For all rows in DataFrame in 100 row increments
Table->>MemoryBuffer: Convert DataFrame rows to CSV in-memory
MemoryBuffer-->>Table: Compute chunk sizes & MD5 hashes
end
alt Multiple chunks detected
Table->>Synapse: Apply schema changes before uploading
end
loop For all chunks found in first loop
loop for all parts in chunk byte boundry
Table->>MemoryBuffer: Read small (< 8MB) part of the chunk
MemoryBuffer-->>Table: Return bytes (with correct offset)
Table->>Synapse: Upload part
Synapse-->>Table: Upload response
end
Table->>Synapse: Complete upload
Synapse-->>Table: Return `file_handle_id`
Table->>Synapse: Send 'TableUpdateTransaction' to append/update rows
Synapse-->>Table: Transaction result
end
Table-->>User: Upload complete
PARAMETER | DESCRIPTION |
---|---|
values
|
Supports storing data from the following sources:
|
schema_storage_strategy
|
Determines how to automate the creation of columns
based on the data that is being stored. If you want to have full
control over the schema you may set this to The limitation with this behavior is that the columns created may only be of the following types:
The determination is based on how this pandas function infers the data type: infer_dtype This may also only set the The usage of this feature will never delete a column, shrink a column,
or change the type of a column that already exists. If you need to
change any of these attributes you must do so after getting the table
via a
TYPE:
|
column_expansion_strategy
|
Determines how to automate the expansion of
columns based on the data that is being stored. The options given allow
cells with a limit on the length of content (Such as strings) to be
expanded to a larger size if the data being stored exceeds the limit.
If you want to have full control over the schema you may set this to
TYPE:
|
dry_run
|
Log the actions that would be taken, but do not actually perform the actions. This will not print out the data that would be stored or modified as a result of this action. It will print out the actions that would be taken, such as creating a new column, updating a column, or updating table metadata. This is useful for debugging and understanding what actions would be taken without actually performing them.
TYPE:
|
additional_changes
|
Additional changes to the table that should execute
within the same transaction as appending or updating rows. This is used
as a part of the
TYPE:
|
insert_size_bytes
|
The maximum size of data that will be stored to Synapse
within a single transaction. The API have a limit of 1GB, but the
default is set to 900 MB to allow for some overhead in the request. The
implication of this limit is that when you are storing a CSV that is
larger than this limit the data will be chunked into smaller requests
by reading the file once to determine what the row and byte boundries
are and calculating the MD5 hash of that portion, then reading the file
again to send the data to Synapse. This process is done to ensure that
the data is not corrupted during the upload process, in addition Synapse
requires the MD5 hash of the data to be sent in the request along with
the number of bytes that are being sent. This argument is also used
when storing a dictionary or a DataFrame. The data will be converted to
a CSV format using the
TYPE:
|
csv_table_descriptor
|
When passing in a CSV file this will allow you to
specify the format of the CSV file. This is only used when the
TYPE:
|
read_csv_kwargs
|
Additional arguments to pass to the |
to_csv_kwargs
|
Additional arguments to pass to the |
job_timeout
|
The maximum amount of time to wait for a job to complete.
This is used when inserting, and updating rows of data. Each individual
request to Synapse will be sent as an independent job. If the timeout
is reached a
TYPE:
|
synapse_client
|
If not passed in and caching was not disabled by
|
RETURNS | DESCRIPTION |
---|---|
None
|
None |
Inserting rows into a table that already has columns
This example shows how you may insert rows into a table.
Suppose we have a table with the following columns:
col1 | col2 | col3 |
---|---|---|
The following code will insert rows into the table:
from synapseclient import Synapse
from synapseclient.models import Table
syn = Synapse()
syn.login()
data_to_insert = {
'col1': ['A', 'B', 'C'],
'col2': [1, 2, 3],
'col3': [1, 2, 3],
}
Table(id="syn1234").store_rows(values=data_to_insert)
The resulting table will look like this:
col1 | col2 | col3 |
---|---|---|
A | 1 | 1 |
B | 2 | 2 |
C | 3 | 3 |
Inserting rows into a table that does not have columns
This example shows how you may insert rows into a table that does not have columns. The columns will be inferred from the data that is being stored.
from synapseclient import Synapse
from synapseclient.models import Table, SchemaStorageStrategy
syn = Synapse()
syn.login()
data_to_insert = {
'col1': ['A', 'B', 'C'],
'col2': [1, 2, 3],
'col3': [1, 2, 3],
}
Table(id="syn1234").store_rows(
values=data_to_insert,
schema_storage_strategy=SchemaStorageStrategy.INFER_FROM_DATA
)
The resulting table will look like this:
col1 | col2 | col3 |
---|---|---|
A | 1 | 1 |
B | 2 | 2 |
C | 3 | 3 |
Using the dry_run option with a SchemaStorageStrategy of INFER_FROM_DATA
This example shows how you may use the dry_run
option with the
SchemaStorageStrategy
set to INFER_FROM_DATA
. This will show you the
actions that would be taken, but not actually perform the actions.
from synapseclient import Synapse
from synapseclient.models import Table, SchemaStorageStrategy
syn = Synapse()
syn.login()
data_to_insert = {
'col1': ['A', 'B', 'C'],
'col2': [1, 2, 3],
'col3': [1, 2, 3],
}
Table(id="syn1234").store_rows(
values=data_to_insert,
dry_run=True,
schema_storage_strategy=SchemaStorageStrategy.INFER_FROM_DATA
)
The result of running this action will print to the console the actions that would be taken, but not actually perform the actions.
Updating rows in a table
This example shows how you may query for data in a table, update the data, and then store the updated rows back in Synapse.
Suppose we have a table that has the following data:
col1 | col2 | col3 |
---|---|---|
A | 1 | 1 |
B | 2 | 2 |
C | 3 | 3 |
Behind the scenese the tables also has ROW_ID
and ROW_VERSION
columns
which are used to identify the row that is being updated. These columns
are not shown in the table above, but is included in the data that is
returned when querying the table. If you add data that does not have these
columns the data will be treated as new rows to be inserted.
from synapseclient import Synapse
from synapseclient.models import Table, query
syn = Synapse()
syn.login()
query_results = query(query="select * from syn1234 where col1 in ('A', 'B')")
# Update `col2` of the row where `col1` is `A` to `22`
query_results.loc[query_results['col1'] == 'A', 'col2'] = 22
# Update `col3` of the row where `col1` is `B` to `33`
query_results.loc[query_results['col1'] == 'B', 'col3'] = 33
Table(id="syn1234").store_rows(values=query_results)
The resulting table will look like this:
col1 | col2 | col3 |
---|---|---|
A | 22 | 1 |
B | 2 | 33 |
C | 3 | 3 |
Source code in synapseclient/models/table.py
396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 |
|
upsert_rows
¶
upsert_rows(values: DATA_FRAME_TYPE, primary_keys: List[str], dry_run: bool = False, *, rows_per_query: int = 50000, update_size_bytes: int = 1.9 * MB, insert_size_bytes: int = 900 * MB, job_timeout: int = 600, wait_for_eventually_consistent_view: bool = False, wait_for_eventually_consistent_view_timeout: int = 600, synapse_client: Optional[Synapse] = None, **kwargs) -> None
This method allows you to perform an upsert
(Update and Insert) for row(s).
This means that you may update a row with only the data that you want to change.
When supplied with a row that does not match the given primary_keys
a new
row will be inserted.
Using the primary_keys
argument you may specify which columns to use to
determine if a row already exists. If a row exists with the same values in the
columns specified in this list the row will be updated. If a row does not exist
it will be inserted.
Limitations:
- The request to update, and the request to insert data does not occur in a single transaction. This means that the update of data may succeed, but the insert of data may fail. Additionally, as noted in the limitation below, if data is chunked up into multiple requests you may find that a portion of your data is updated, but another portion is not.
- The number of rows that may be upserted in a single call should be kept to a minimum (< 50,000). There is significant overhead in the request to Synapse for each row that is upserted. If you are upserting a large number of rows a better approach may be to query for the data you want to update, update the data, then use the [store_rows][synapseclient.models.mixins.table_components.TableStoreRowMixin.store_row] method to update the data in Synapse. Any rows you want to insert may be added to the DataFrame that is passed to the [store_rows][synapseclient.models.mixins.table_components.TableStoreRowMixin.store_rows] method.
- When upserting mnay rows the requests to Synapse will be chunked into smaller requests. The limit is 2MB per request. This chunking will happen automatically and should not be a concern for most users. If you are having issues with the request being too large you may lower the number of rows you are trying to upsert, or note the above limitation.
- The
primary_keys
argument must contain at least one column. - The
primary_keys
argument cannot contain columns that are a LIST type. - The
primary_keys
argument cannot contain columns that are a JSON type. - The values used as the
primary_keys
must be unique in the table. If there are multiple rows with the same values in theprimary_keys
the behavior is that an exception will be raised. - The columns used in
primary_keys
cannot contain updated values. Since the values in these columns are used to determine if a row exists, they cannot be updated in the same transaction.
The following is a Sequence Diagram that describces the upsert process at a high level:
sequenceDiagram
participant User
participant Table
participant Synapse
User->>Table: upsert_rows()
loop Query and Process Updates in Chunks (rows_per_query)
Table->>Synapse: Query existing rows using primary keys
Synapse-->>Table: Return matching rows
Note Over Table: Create partial row updates
loop For results from query
Note Over Table: Sum row/chunk size
alt Chunk size exceeds update_size_bytes
Table->>Synapse: Push update chunk
Synapse-->>Table: Acknowledge update
end
Table->>Table: Add row to chunk
end
alt Remaining updates exist
Table->>Synapse: Push final update chunk
Synapse-->>Table: Acknowledge update
end
end
alt New rows exist
Table->>Table: Identify new rows for insertion
Table->>Table: Call `store_rows()` function
end
Table-->>User: Upsert complete
PARAMETER | DESCRIPTION |
---|---|
values
|
Supports storing data from the following sources:
TYPE:
|
primary_keys
|
The columns to use to determine if a row already exists. If a row exists with the same values in the columns specified in this list the row will be updated. If a row does not exist it will be inserted. |
dry_run
|
If set to True the data will not be updated in Synapse. A message
will be printed to the console with the number of rows that would have
been updated and inserted. If you would like to see the data that would
be updated and inserted you may set the
TYPE:
|
rows_per_query
|
The number of rows that will be queries from Synapse per request. Since we need to query for the data that is being updated this will determine the number of rows that are queried at a time. The default is 50,000 rows.
TYPE:
|
update_size_bytes
|
The maximum size of the request that will be sent to Synapse when updating rows of data. The default is 1.9MB.
TYPE:
|
insert_size_bytes
|
The maximum size of the request that will be sent to Synapse when inserting rows of data. The default is 900MB.
TYPE:
|
job_timeout
|
The maximum amount of time to wait for a job to complete.
This is used when inserting, and updating rows of data. Each individual
request to Synapse will be sent as an independent job. If the timeout
is reached a
TYPE:
|
wait_for_eventually_consistent_view
|
Only used if the table is a view. If set to True this will wait for the view to reflect any changes that you've made to the view. This is useful if you need to query the view after making changes to the data.
TYPE:
|
wait_for_eventually_consistent_view_timeout
|
The maximum amount of time to wait for a view to be eventually consistent. The default is 600 seconds.
TYPE:
|
synapse_client
|
If not passed in and caching was not disabled by
|
**kwargs
|
Additional arguments that are passed to the
DEFAULT:
|
Updating 2 rows and inserting 1 row
In this given example we have a table with the following data:
col1 | col2 | col3 |
---|---|---|
A | 1 | 1 |
B | 2 | 2 |
The following code will update the first row's col2
to 22
, update the
second row's col3
to 33
, and insert a new row:
from synapseclient import Synapse
from synapseclient.models import Table
import pandas as pd
syn = Synapse()
syn.login()
table = Table(id="syn123").get(include_columns=True)
df = {
'col1': ['A', 'B', 'C'],
'col2': [22, 2, 3],
'col3': [1, 33, 3],
}
table.upsert_rows(values=df, primary_keys=["col1"])
The resulting table will look like this:
col1 | col2 | col3 |
---|---|---|
A | 22 | 1 |
B | 2 | 33 |
C | 3 | 3 |
Deleting data from a specific cell
In this given example we have a table with the following data:
col1 | col2 | col3 |
---|---|---|
A | 1 | 1 |
B | 2 | 2 |
The following code will update the first row's col2
to 22
, update the
second row's col3
to 33
, and insert a new row:
from synapseclient import Synapse
from synapseclient.models import Table
syn = Synapse()
syn.login()
table = Table(id="syn123").get(include_columns=True)
df = {
'col1': ['A', 'B'],
'col2': [None, 2],
'col3': [1, None],
}
table.upsert_rows(values=df, primary_keys=["col1"])
The resulting table will look like this:
col1 | col2 | col3 |
---|---|---|
A | 1 | |
B | 2 |
Source code in synapseclient/models/table.py
169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 |
|
delete_rows
¶
Delete rows from a table given a query to select rows. The query at a
minimum must select the ROW_ID
and ROW_VERSION
columns. If you want to
inspect the data that will be deleted ahead of time you may use the
.query
method to get the data.
PARAMETER | DESCRIPTION |
---|---|
query
|
The query to select the rows to delete. The query at a minimum
must select the
TYPE:
|
synapse_client
|
If not passed in and caching was not disabled by
|
RETURNS | DESCRIPTION |
---|---|
DATA_FRAME_TYPE
|
The results of your query for the rows that were deleted from the table. |
Selecting a row to delete
This example shows how you may select a row to delete from a table.
from synapseclient import Synapse
from synapseclient.models import Table
syn = Synapse()
syn.login()
Table(id="syn1234").delete_rows(query="SELECT ROW_ID, ROW_VERSION FROM syn1234 WHERE foo = 'asdf'")
Selecting all rows that contain a null value
This example shows how you may select a row to delete from a table where a column has a null value.
from synapseclient import Synapse
from synapseclient.models import Table
syn = Synapse()
syn.login()
Table(id="syn1234").delete_rows(query="SELECT ROW_ID, ROW_VERSION FROM syn1234 WHERE foo is null")
Source code in synapseclient/models/table.py
788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 |
|
snapshot
¶
snapshot(comment: str = None, label: str = None, include_activity: bool = True, associate_activity_to_new_version: bool = True, *, synapse_client: Optional[Synapse] = None) -> Dict[str, Any]
Request to create a new snapshot of a table. The provided comment, label, and activity will be applied to the current version thereby creating a snapshot and locking the current version. After the snapshot is created a new version will be started with an 'in-progress' label.
PARAMETER | DESCRIPTION |
---|---|
comment
|
Comment to add to this snapshot to the table.
TYPE:
|
label
|
Label to add to this snapshot to the table. The label must be unique, if a label is not provided a unique label will be generated.
TYPE:
|
include_activity
|
If True the activity will be included in snapshot if it
exists. In order to include the activity, the activity must have already
been stored in Synapse by using the
TYPE:
|
associate_activity_to_new_version
|
If True the activity will be associated with the new version of the table. If False the activity will not be associated with the new version of the table.
TYPE:
|
synapse_client
|
If not passed in and caching was not disabled by
|
Creating a snapshot of a table
Comment and label are optional, but filled in for this example.
from synapseclient.models import Table
from synapseclient import Synapse
syn = Synapse()
syn.login()
my_table = Table(id="syn1234")
my_table.snapshot(
comment="This is a new snapshot comment",
label="This is a unique label"
)
Including the activity (Provenance) in the snapshot and not pulling it forward to the new in-progress
version of the table.
By default this method is set up to include the activity in the snapshot and
then pull the activity forward to the new version. If you do not want to
include the activity in the snapshot you can set include_activity
to
False. If you do not want to pull the activity forward to the new version
you can set associate_activity_to_new_version
to False.
See the activity attribute on the Table class for more information on how to interact with the activity.
from synapseclient.models import Table
from synapseclient import Synapse
syn = Synapse()
syn.login()
my_table = Table(id="syn1234")
my_table.snapshot(
comment="This is a new snapshot comment",
label="This is a unique label",
include_activity=True,
associate_activity_to_new_version=False
)
RETURNS | DESCRIPTION |
---|---|
Dict[str, Any]
|
A dictionary that matches: https://rest-docs.synapse.org/rest/org/sagebionetworks/repo/model/table/SnapshotResponse.html |
Source code in synapseclient/models/table.py
841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 |
|
delete_column
¶
delete_column(name: str) -> None
Mark a column for deletion. Note that this does not delete the column from
Synapse. You must call the .store()
function on this table class instance to
delete the column from Synapse. This is a convenience function to eliminate
the need to manually delete the column from the dictionary and add it to the
._columns_to_delete
attribute.
PARAMETER | DESCRIPTION |
---|---|
name
|
The name of the column to delete.
TYPE:
|
RETURNS | DESCRIPTION |
---|---|
None
|
None |
Deleting a column
This example shows how you may delete a column from a table and then store the change back in Synapse.
from synapseclient import Synapse
from synapseclient.models import Table
syn = Synapse()
syn.login()
table = Table(
id="syn1234"
).get(include_columns=True)
table.delete_column(name="my_column")
table.store()
Deleting a column (async)
This example shows how you may delete a column from a table and then store the change back in Synapse.
import asyncio
from synapseclient import Synapse
from synapseclient.models import Table
syn = Synapse()
syn.login()
async def main():
table = await Table(
id="syn1234"
).get_async(include_columns=True)
table.delete_column(name="my_column")
table.store_async()
asyncio.run(main())
Source code in synapseclient/models/mixins/table_components.py
1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239 1240 1241 1242 1243 1244 1245 1246 1247 1248 1249 1250 1251 1252 1253 1254 1255 1256 1257 1258 1259 1260 1261 1262 1263 1264 1265 1266 1267 1268 1269 1270 1271 1272 1273 |
|
add_column
¶
Add column(s) to the table. Note that this does not store the column(s) in
Synapse. You must call the .store()
function on this table class instance to
store the column(s) in Synapse. This is a convenience function to eliminate
the need to manually add the column(s) to the dictionary.
This function will add an item to the .columns
attribute of this class
instance. .columns
is a dictionary where the key is the name of the column
and the value is the Column object.
PARAMETER | DESCRIPTION |
---|---|
column
|
The column(s) to add, may be a single Column object or a list of Column objects. |
index
|
The index to insert the column at. If not passed in the column will be added to the end of the list.
TYPE:
|
RETURNS | DESCRIPTION |
---|---|
None
|
None |
Adding a single column
This example shows how you may add a single column to a table and then store the change back in Synapse.
from synapseclient import Synapse
from synapseclient.models import Column, ColumnType, Table
syn = Synapse()
syn.login()
table = Table(
id="syn1234"
).get(include_columns=True)
table.add_column(
Column(name="my_column", column_type=ColumnType.STRING)
)
table.store()
Adding multiple columns
This example shows how you may add multiple columns to a table and then store the change back in Synapse.
from synapseclient import Synapse
from synapseclient.models import Column, ColumnType, Table
syn = Synapse()
syn.login()
table = Table(
id="syn1234"
).get(include_columns=True)
table.add_column([
Column(name="my_column", column_type=ColumnType.STRING),
Column(name="my_column2", column_type=ColumnType.INTEGER),
])
table.store()
Adding a column at a specific index
This example shows how you may add a column at a specific index to a table and then store the change back in Synapse. If the index is out of bounds the column will be added to the end of the list.
from synapseclient import Synapse
from synapseclient.models import Column, ColumnType, Table
syn = Synapse()
syn.login()
table = Table(
id="syn1234"
).get(include_columns=True)
table.add_column(
Column(name="my_column", column_type=ColumnType.STRING),
# Add the column at the beginning of the list
index=0
)
table.store()
Adding a single column (async)
This example shows how you may add a single column to a table and then store the change back in Synapse.
import asyncio
from synapseclient import Synapse
from synapseclient.models import Column, ColumnType, Table
syn = Synapse()
syn.login()
async def main():
table = await Table(
id="syn1234"
).get_async(include_columns=True)
table.add_column(
Column(name="my_column", column_type=ColumnType.STRING)
)
await table.store_async()
asyncio.run(main())
Adding multiple columns (async)
This example shows how you may add multiple columns to a table and then store the change back in Synapse.
import asyncio
from synapseclient import Synapse
from synapseclient.models import Column, ColumnType, Table
syn = Synapse()
syn.login()
async def main():
table = await Table(
id="syn1234"
).get_async(include_columns=True)
table.add_column([
Column(name="my_column", column_type=ColumnType.STRING),
Column(name="my_column2", column_type=ColumnType.INTEGER),
])
await table.store_async()
asyncio.run(main())
Adding a column at a specific index (async)
This example shows how you may add a column at a specific index to a table and then store the change back in Synapse. If the index is out of bounds the column will be added to the end of the list.
import asyncio
from synapseclient import Synapse
from synapseclient.models import Column, ColumnType, Table
syn = Synapse()
syn.login()
async def main():
table = await Table(
id="syn1234"
).get_async(include_columns=True)
table.add_column(
Column(name="my_column", column_type=ColumnType.STRING),
# Add the column at the beginning of the list
index=0
)
await table.store_async()
asyncio.run(main())
Source code in synapseclient/models/mixins/table_components.py
1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307 1308 1309 1310 1311 1312 1313 1314 1315 1316 1317 1318 1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329 1330 1331 1332 1333 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343 1344 1345 1346 1347 1348 1349 1350 1351 1352 1353 1354 1355 1356 1357 1358 1359 1360 1361 1362 1363 1364 1365 1366 1367 1368 1369 1370 1371 1372 1373 1374 1375 1376 1377 1378 1379 1380 1381 1382 1383 1384 1385 1386 1387 1388 1389 1390 1391 1392 1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464 1465 1466 1467 1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 |
|
reorder_column
¶
Reorder a column in the table. Note that this does not store the column in
Synapse. You must call the .store()
function on this table class instance to
store the column in Synapse. This is a convenience function to eliminate
the need to manually reorder the .columns
attribute dictionary.
You must ensure that the index is within the bounds of the number of columns in the table. If you pass in an index that is out of bounds the column will be added to the end of the list.
PARAMETER | DESCRIPTION |
---|---|
name
|
The name of the column to reorder.
TYPE:
|
index
|
The index to move the column to starting with 0.
TYPE:
|
RETURNS | DESCRIPTION |
---|---|
None
|
None |
Reordering a column
This example shows how you may reorder a column in a table and then store the change back in Synapse.
from synapseclient import Synapse
from synapseclient.models import Column, ColumnType, Table
syn = Synapse()
syn.login()
table = Table(
id="syn1234"
).get(include_columns=True)
# Move the column to the beginning of the list
table.reorder_column(name="my_column", index=0)
table.store()
Reordering a column (async)
This example shows how you may reorder a column in a table and then store the change back in Synapse.
import asyncio
from synapseclient import Synapse
from synapseclient.models import Column, ColumnType, Table
syn = Synapse()
syn.login()
async def main():
table = await Table(
id="syn1234"
).get_async(include_columns=True)
# Move the column to the beginning of the list
table.reorder_column(name="my_column", index=0)
table.store_async()
asyncio.run(main())
Source code in synapseclient/models/mixins/table_components.py
1483 1484 1485 1486 1487 1488 1489 1490 1491 1492 1493 1494 1495 1496 1497 1498 1499 1500 1501 1502 1503 1504 1505 1506 1507 1508 1509 1510 1511 1512 1513 1514 1515 1516 1517 1518 1519 1520 1521 1522 1523 1524 1525 1526 1527 1528 1529 1530 1531 1532 1533 1534 1535 1536 1537 1538 1539 1540 1541 1542 1543 1544 1545 1546 1547 1548 1549 1550 1551 1552 1553 1554 1555 1556 1557 1558 1559 |
|
get_permissions
¶
get_permissions(*, synapse_client: Optional[Synapse] = None) -> Permissions
Get the permissions that the caller has on an Entity.
PARAMETER | DESCRIPTION |
---|---|
synapse_client
|
If not passed in and caching was not disabled by
|
RETURNS | DESCRIPTION |
---|---|
Permissions
|
A Permissions object |
Using this function:
Getting permissions for a Synapse Entity
from synapseclient import Synapse
from synapseclient.models import File
syn = Synapse()
syn.login()
permissions = File(id="syn123").get_permissions()
Getting access types list from the Permissions object
permissions.access_types
Source code in synapseclient/models/protocols/access_control_protocol.py
24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 |
|
get_acl
¶
get_acl(principal_id: int = None, check_benefactor: bool = True, *, synapse_client: Optional[Synapse] = None) -> List[str]
Get the ACL that a user or group has on an Entity.
Note: If the entity does not have local sharing settings, or ACL set directly on it, this will look up the ACL on the benefactor of the entity. The benefactor is the entity that the current entity inherits its permissions from. The benefactor is usually the parent entity, but it can be any ancestor in the hierarchy. For example, a newly created Project will be its own benefactor, while a new FileEntity's benefactor will start off as its containing Project or Folder. If the entity already has local sharing settings, the benefactor would be itself.
PARAMETER | DESCRIPTION |
---|---|
principal_id
|
Identifier of a user or group (defaults to PUBLIC users)
TYPE:
|
check_benefactor
|
If True (default), check the benefactor for the entity to get the ACL. If False, only check the entity itself. This is useful for checking the ACL of an entity that has local sharing settings, but you want to check the ACL of the entity itself and not the benefactor it may inherit from.
TYPE:
|
synapse_client
|
If not passed in and caching was not disabled by
|
RETURNS | DESCRIPTION |
---|---|
List[str]
|
An array containing some combination of ['READ', 'UPDATE', 'CREATE', 'DELETE', 'DOWNLOAD', 'MODERATE', 'CHANGE_PERMISSIONS', 'CHANGE_SETTINGS'] or an empty array |
Source code in synapseclient/models/protocols/access_control_protocol.py
63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 |
|
list_acl
¶
list_acl(recursive: bool = False, include_container_content: bool = False, target_entity_types: Optional[List[str]] = None, log_tree: bool = False, *, synapse_client: Optional[Synapse] = None, _progress_bar: Optional[tqdm] = None) -> AclListResult
List the Access Control Lists (ACLs) for this entity and optionally its children.
This function returns the local sharing settings for the entity and optionally its children. It provides a mapping of all ACLs for the given container/entity.
Important Note: This function returns the LOCAL sharing settings only, not the effective permissions that each Synapse User ID/Team has on the entities. More permissive permissions could be granted via a Team that the user has access to that has permissions on the entity, or through inheritance from parent entities.
PARAMETER | DESCRIPTION |
---|---|
recursive
|
If True and the entity is a container (e.g., Project or Folder),
recursively process child containers. Note that this must be used with
include_container_content=True to have any effect. Setting recursive=True
with include_container_content=False will raise a ValueError.
Only works on classes that support the
TYPE:
|
include_container_content
|
If True, include ACLs from contents directly within containers (files and folders inside self). This must be set to True for recursive to have any effect. Defaults to False.
TYPE:
|
target_entity_types
|
Specify which entity types to process when listing ACLs. Allowed values are "folder" and "file" (case-insensitive). If None, defaults to ["folder", "file"]. |
log_tree
|
If True, logs the ACL results to console in ASCII tree format showing entity hierarchies and their ACL permissions in a tree-like structure. Defaults to False.
TYPE:
|
synapse_client
|
If not passed in and caching was not disabled by
|
_progress_bar
|
Internal parameter. Progress bar instance to use for updates when called recursively. Should not be used by external callers.
TYPE:
|
RETURNS | DESCRIPTION |
---|---|
AclListResult
|
An AclListResult object containing a structured representation of ACLs where: |
AclListResult
|
|
AclListResult
|
|
AclListResult
|
|
RAISES | DESCRIPTION |
---|---|
ValueError
|
If the entity does not have an ID or if an invalid entity type is provided. |
SynapseHTTPError
|
If there are permission issues accessing ACLs. |
Exception
|
For any other errors that may occur during the process. |
List ACLs for a single entity
from synapseclient import Synapse
from synapseclient.models import File
syn = Synapse()
syn.login()
acl_result = File(id="syn123").list_acl()
print(acl_result)
# Access entity ACLs (entity_acls is a list, not a dict)
for entity_acl in acl_result.all_entity_acls:
if entity_acl.entity_id == "syn123":
# Access individual ACL entries
for acl_entry in entity_acl.acl_entries:
if acl_entry.principal_id == "273948":
print(f"Principal 273948 has permissions: {acl_entry.permissions}")
# I can also access the ACL for the file itself
print(acl_result.entity_acl)
print(acl_result)
List ACLs recursively for a folder and all its children
from synapseclient import Synapse
from synapseclient.models import Folder
syn = Synapse()
syn.login()
acl_result = Folder(id="syn123").list_acl(
recursive=True,
include_container_content=True
)
# Access each entity's ACL (entity_acls is a list)
for entity_acl in acl_result.all_entity_acls:
print(f"Entity {entity_acl.entity_id} has ACL with {len(entity_acl.acl_entries)} principals")
# I can also access the ACL for the folder itself
print(acl_result.entity_acl)
# List ACLs for only folder entities
folder_acl_result = Folder(id="syn123").list_acl(
recursive=True,
include_container_content=True,
target_entity_types=["folder"]
)
List ACLs with ASCII tree visualization
When log_tree=True
, the ACLs will be logged in a tree format. Additionally,
the ascii_tree
attribute of the AclListResult will contain the ASCII tree
representation of the ACLs.
from synapseclient import Synapse
from synapseclient.models import Folder
syn = Synapse()
syn.login()
acl_result = Folder(id="syn123").list_acl(
recursive=True,
include_container_content=True,
log_tree=True, # Enable ASCII tree logging
)
# The ASCII tree representation of the ACLs will also be available
# in acl_result.ascii_tree
print(acl_result.ascii_tree)
Source code in synapseclient/models/protocols/access_control_protocol.py
316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 |
|
set_permissions
¶
set_permissions(principal_id: int = None, access_type: List[str] = None, modify_benefactor: bool = False, warn_if_inherits: bool = True, overwrite: bool = True, *, synapse_client: Optional[Synapse] = None) -> Dict[str, Union[str, list]]
Sets permission that a user or group has on an Entity. An Entity may have its own ACL or inherit its ACL from a benefactor.
PARAMETER | DESCRIPTION |
---|---|
principal_id
|
Identifier of a user or group.
TYPE:
|
access_type
|
Type of permission to be granted. One or more of CREATE, READ, DOWNLOAD, UPDATE, DELETE, CHANGE_PERMISSIONS. Defaults to ['READ', 'DOWNLOAD'] |
modify_benefactor
|
Set as True when modifying a benefactor's ACL. The term 'benefactor' is used to indicate which Entity an Entity inherits its ACL from. For example, a newly created Project will be its own benefactor, while a new FileEntity's benefactor will start off as its containing Project. If the entity already has local sharing settings the benefactor would be itself. It may also be the immediate parent, somewhere in the parent tree, or the project itself.
TYPE:
|
warn_if_inherits
|
When
TYPE:
|
overwrite
|
By default this function overwrites existing permissions for the specified user. Set this flag to False to add new permissions non-destructively.
TYPE:
|
synapse_client
|
If not passed in and caching was not disabled by
|
RETURNS | DESCRIPTION |
---|---|
Dict[str, Union[str, list]]
|
An Access Control List object |
Setting permissions
Grant all registered users download access
from synapseclient import Synapse
from synapseclient.models import File
syn = Synapse()
syn.login()
File(id="syn123").set_permissions(principal_id=273948, access_type=['READ','DOWNLOAD'])
Grant the public view access
from synapseclient import Synapse
from synapseclient.models import File
syn = Synapse()
syn.login()
File(id="syn123").set_permissions(principal_id=273949, access_type=['READ'])
Source code in synapseclient/models/protocols/access_control_protocol.py
102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 |
|
bind_schema
¶
bind_schema(json_schema_uri: str, *, enable_derived_annotations: Optional[bool] = False, synapse_client: Optional[Synapse] = None) -> JSONSchemaBinding
Bind a JSON schema to the entity.
PARAMETER | DESCRIPTION |
---|---|
json_schema_uri
|
The URI of the JSON schema to bind to the entity.
TYPE:
|
enable_derived_annotations
|
If true, enable derived annotations. Defaults to False. |
synapse_client
|
The Synapse client instance. If not provided, the last created instance from the Synapse class constructor will be used. |
RETURNS | DESCRIPTION |
---|---|
JSONSchemaBinding
|
An object containing details about the JSON schema binding. |
Using this function
Binding JSON schema to a folder or a file. This example expects that you
have a Synapse project to use, and a file to upload. Set the PROJECT_NAME
and FILE_PATH
variables to your project name and file path respectively.
from synapseclient import Synapse
from synapseclient.models import File, Folder
syn = Synapse()
syn.login()
# Define Project and JSON schema info
PROJECT_NAME = "test_json_schema_project" # replace with your project name
FILE_PATH = "~/Sample.txt" # replace with your test file path
PROJECT_ID = syn.findEntityId(name=PROJECT_NAME)
ORG_NAME = "UniqueOrg" # replace with your organization name
SCHEMA_NAME = "myTestSchema" # replace with your schema name
FOLDER_NAME = "test_script_folder"
VERSION = "0.0.1"
SCHEMA_URI = f"{ORG_NAME}-{SCHEMA_NAME}-{VERSION}"
# Create organization (if not already created)
js = syn.service("json_schema")
all_orgs = js.list_organizations()
for org in all_orgs:
if org["name"] == ORG_NAME:
print(f"Organization {ORG_NAME} already exists: {org}")
break
else:
print(f"Creating organization {ORG_NAME}.")
created_organization = js.create_organization(ORG_NAME)
print(f"Created organization: {created_organization}")
my_test_org = js.JsonSchemaOrganization(ORG_NAME)
test_schema = my_test_org.get_json_schema(SCHEMA_NAME)
if not test_schema:
# Create the schema (if not already created)
schema_definition = {
"$id": "mySchema",
"type": "object",
"properties": {
"foo": {"type": "string"},
"bar": {"type": "integer"},
},
"required": ["foo"]
}
test_schema = my_test_org.create_json_schema(schema_definition, SCHEMA_NAME, VERSION)
# Create a test folder
test_folder = Folder(name=FOLDER_NAME, parent_id=PROJECT_ID)
test_folder.store()
# Bind JSON schema to the folder
bound_schema = test_folder.bind_schema(
json_schema_uri=SCHEMA_URI,
enable_derived_annotations=True
)
print(f"Result from binding schema to folder: {bound_schema}")
# Bind the same schema to a file
example_file = File(
path=FILE_PATH, # Replace with your test file path
parent_id=test_folder.id,
).store()
bound_schema_file = example_file.bind_schema(
json_schema_uri=SCHEMA_URI,
enable_derived_annotations=True
)
print(f"Result from binding schema to file: {bound_schema_file}")
Source code in synapseclient/models/protocols/json_schema_protocol.py
23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 |
|
get_schema
¶
get_schema(*, synapse_client: Optional[Synapse] = None) -> JSONSchemaBinding
Get the JSON schema bound to the entity.
PARAMETER | DESCRIPTION |
---|---|
synapse_client
|
The Synapse client instance. If not provided, the last created instance from the Synapse class constructor will be used. |
RETURNS | DESCRIPTION |
---|---|
JSONSchemaBinding
|
An object containing details about the bound JSON schema. |
Using this function
Retrieving the bound JSON schema from a folder or file. This example demonstrates
how to get existing schema bindings from entities that already have schemas bound.
Set the PROJECT_NAME
variable to your project name.
from synapseclient import Synapse
from synapseclient.models import File, Folder
syn = Synapse()
syn.login()
# Define Project and JSON schema info
PROJECT_NAME = "test_json_schema_project" # replace with your project name
FILE_PATH = "~/Sample.txt" # replace with your test file path
PROJECT_ID = syn.findEntityId(name=PROJECT_NAME)
ORG_NAME = "UniqueOrg" # replace with your organization name
SCHEMA_NAME = "myTestSchema" # replace with your schema name
FOLDER_NAME = "test_script_folder"
VERSION = "0.0.1"
SCHEMA_URI = f"{ORG_NAME}-{SCHEMA_NAME}-{VERSION}"
# Create organization (if not already created)
js = syn.service("json_schema")
all_orgs = js.list_organizations()
for org in all_orgs:
if org["name"] == ORG_NAME:
print(f"Organization {ORG_NAME} already exists: {org}")
break
else:
print(f"Creating organization {ORG_NAME}.")
created_organization = js.create_organization(ORG_NAME)
print(f"Created organization: {created_organization}")
my_test_org = js.JsonSchemaOrganization(ORG_NAME)
test_schema = my_test_org.get_json_schema(SCHEMA_NAME)
if not test_schema:
# Create the schema (if not already created)
schema_definition = {
"$id": "mySchema",
"type": "object",
"properties": {
"foo": {"type": "string"},
"bar": {"type": "integer"},
},
"required": ["foo"]
}
test_schema = my_test_org.create_json_schema(schema_definition, SCHEMA_NAME, VERSION)
print(f"Created new schema: {SCHEMA_NAME}")
# Create a test folder
test_folder = Folder(name=FOLDER_NAME, parent_id=PROJECT_ID)
test_folder.store()
print(f"Created test folder: {FOLDER_NAME}")
# Bind JSON schema to the folder first
bound_schema = test_folder.bind_schema(
json_schema_uri=SCHEMA_URI,
enable_derived_annotations=True
)
print(f"Bound schema to folder: {bound_schema}")
# Create and bind schema to a file
example_file = File(
path=FILE_PATH, # Replace with your test file path
parent_id=test_folder.id,
).store()
bound_schema_file = example_file.bind_schema(
json_schema_uri=SCHEMA_URI,
enable_derived_annotations=True
)
print(f"Bound schema to file: {bound_schema_file}")
# Retrieve the bound schema from the folder
retrieved_folder_schema = test_folder.get_schema()
print(f"Retrieved schema from folder: {retrieved_folder_schema}")
# Retrieve the bound schema from the file
retrieved_file_schema = example_file.get_schema()
print(f"Retrieved schema from file: {retrieved_file_schema}")
Source code in synapseclient/models/protocols/json_schema_protocol.py
120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 |
|
unbind_schema
¶
Unbind the JSON schema from the entity.
PARAMETER | DESCRIPTION |
---|---|
synapse_client
|
The Synapse client instance. If not provided, the last created instance from the Synapse class constructor will be used. |
Using this function
Unbinding a JSON schema from a folder or file. This example demonstrates
how to remove schema bindings from entities. Assumes entities already have
schemas bound. Set the PROJECT_NAME
variable to your project name.
from synapseclient import Synapse
from synapseclient.models import File, Folder
syn = Synapse()
syn.login()
# Define Project and JSON schema info
PROJECT_NAME = "test_json_schema_project" # replace with your project name
FILE_PATH = "~/Sample.txt" # replace with your test file path
PROJECT_ID = syn.findEntityId(name=PROJECT_NAME)
ORG_NAME = "UniqueOrg" # replace with your organization name
SCHEMA_NAME = "myTestSchema" # replace with your schema name
FOLDER_NAME = "test_script_folder"
VERSION = "0.0.1"
SCHEMA_URI = f"{ORG_NAME}-{SCHEMA_NAME}-{VERSION}"
# Create organization (if not already created)
js = syn.service("json_schema")
all_orgs = js.list_organizations()
for org in all_orgs:
if org["name"] == ORG_NAME:
print(f"Organization {ORG_NAME} already exists: {org}")
break
else:
print(f"Creating organization {ORG_NAME}.")
created_organization = js.create_organization(ORG_NAME)
print(f"Created organization: {created_organization}")
my_test_org = js.JsonSchemaOrganization(ORG_NAME)
test_schema = my_test_org.get_json_schema(SCHEMA_NAME)
if not test_schema:
# Create the schema (if not already created)
schema_definition = {
"$id": "mySchema",
"type": "object",
"properties": {
"foo": {"type": "string"},
"bar": {"type": "integer"},
},
"required": ["foo"]
}
test_schema = my_test_org.create_json_schema(schema_definition, SCHEMA_NAME, VERSION)
print(f"Created new schema: {SCHEMA_NAME}")
# Create a test folder
test_folder = Folder(name=FOLDER_NAME, parent_id=PROJECT_ID)
test_folder.store()
print(f"Created test folder: {FOLDER_NAME}")
# Bind JSON schema to the folder first
bound_schema = test_folder.bind_schema(
json_schema_uri=SCHEMA_URI,
enable_derived_annotations=True
)
print(f"Bound schema to folder: {bound_schema}")
# Create and bind schema to a file
example_file = File(
path=FILE_PATH, # Replace with your test file path
parent_id=test_folder.id,
).store()
bound_schema_file = example_file.bind_schema(
json_schema_uri=SCHEMA_URI,
enable_derived_annotations=True
)
print(f"Bound schema to file: {bound_schema_file}")
# Unbind the schema from the folder
test_folder.unbind_schema()
print("Successfully unbound schema from folder")
# Unbind the schema from the file
example_file.unbind_schema()
print("Successfully unbound schema from file")
Source code in synapseclient/models/protocols/json_schema_protocol.py
220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 |
|
validate_schema
¶
validate_schema(*, synapse_client: Optional[Synapse] = None) -> Union[JSONSchemaValidation, InvalidJSONSchemaValidation]
Validate the entity against the bound JSON schema.
PARAMETER | DESCRIPTION |
---|---|
synapse_client
|
The Synapse client instance. If not provided, the last created instance from the Synapse class constructor will be used. |
RETURNS | DESCRIPTION |
---|---|
Union[JSONSchemaValidation, InvalidJSONSchemaValidation]
|
The validation results. |
Using this function
Validating a folder or file against the bound JSON schema. This example demonstrates
how to validate entities with annotations against their bound schemas. Requires entities
to have schemas already bound. Set the PROJECT_NAME
variable to your project name.
from synapseclient import Synapse
from synapseclient.models import File, Folder
import time
syn = Synapse()
syn.login()
# Define Project and JSON schema info
PROJECT_NAME = "test_json_schema_project" # replace with your project name
FILE_PATH = "~/Sample.txt" # replace with your test file path
PROJECT_ID = syn.findEntityId(name=PROJECT_NAME)
ORG_NAME = "UniqueOrg" # replace with your organization name
SCHEMA_NAME = "myTestSchema" # replace with your schema name
FOLDER_NAME = "test_script_folder"
VERSION = "0.0.1"
SCHEMA_URI = f"{ORG_NAME}-{SCHEMA_NAME}-{VERSION}"
# Create organization (if not already created)
js = syn.service("json_schema")
all_orgs = js.list_organizations()
for org in all_orgs:
if org["name"] == ORG_NAME:
print(f"Organization {ORG_NAME} already exists: {org}")
break
else:
print(f"Creating organization {ORG_NAME}.")
created_organization = js.create_organization(ORG_NAME)
print(f"Created organization: {created_organization}")
my_test_org = js.JsonSchemaOrganization(ORG_NAME)
test_schema = my_test_org.get_json_schema(SCHEMA_NAME)
if not test_schema:
# Create the schema (if not already created)
schema_definition = {
"$id": "mySchema",
"type": "object",
"properties": {
"foo": {"type": "string"},
"bar": {"type": "integer"},
},
"required": ["foo"]
}
test_schema = my_test_org.create_json_schema(schema_definition, SCHEMA_NAME, VERSION)
print(f"Created new schema: {SCHEMA_NAME}")
# Create a test folder
test_folder = Folder(name=FOLDER_NAME, parent_id=PROJECT_ID)
test_folder.store()
print(f"Created test folder: {FOLDER_NAME}")
# Bind JSON schema to the folder
bound_schema = test_folder.bind_schema(
json_schema_uri=SCHEMA_URI,
enable_derived_annotations=True
)
print(f"Bound schema to folder: {bound_schema}")
# Create and bind schema to a file
example_file = File(
path=FILE_PATH, # Replace with your test file path
parent_id=test_folder.id,
).store()
bound_schema_file = example_file.bind_schema(
json_schema_uri=SCHEMA_URI,
enable_derived_annotations=True
)
print(f"Bound schema to file: {bound_schema_file}")
# Validate the folder entity against the bound schema
test_folder.annotations = {"foo": "test_value", "bar": 42} # Example annotations
test_folder.store()
print("Added annotations to folder and stored")
time.sleep(2) # Allow time for processing
validation_response = test_folder.validate_schema()
print(f"Folder validation response: {validation_response}")
# Validate the file entity against the bound schema
example_file.annotations = {"foo": "test_value", "bar": 43} # Example annotations
example_file.store()
print("Added annotations to file and stored")
time.sleep(2) # Allow time for processing
validation_response_file = example_file.validate_schema()
print(f"File validation response: {validation_response_file}")
Source code in synapseclient/models/protocols/json_schema_protocol.py
314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 |
|
get_schema_derived_keys
¶
get_schema_derived_keys(*, synapse_client: Optional[Synapse] = None) -> JSONSchemaDerivedKeys
Retrieve derived JSON schema keys for the entity.
PARAMETER | DESCRIPTION |
---|---|
synapse_client
|
The Synapse client instance. If not provided, the last created instance from the Synapse class constructor will be used. |
RETURNS | DESCRIPTION |
---|---|
JSONSchemaDerivedKeys
|
An object containing the derived keys for the entity. |
Using this function
Retrieving derived keys from a folder or file. This example demonstrates
how to get derived annotation keys from schemas with constant values.
Set the PROJECT_NAME
variable to your project name.
from synapseclient import Synapse
from synapseclient.models import File, Folder
syn = Synapse()
syn.login()
# Define Project and JSON schema info
PROJECT_NAME = "test_json_schema_project" # replace with your project name
FILE_PATH = "~/Sample.txt" # replace with your test file path
PROJECT_ID = syn.findEntityId(name=PROJECT_NAME)
ORG_NAME = "UniqueOrg" # replace with your organization name
DERIVED_TEST_SCHEMA_NAME = "myTestDerivedSchema" # replace with your derived schema name
FOLDER_NAME = "test_script_folder"
VERSION = "0.0.1"
SCHEMA_URI = f"{ORG_NAME}-{DERIVED_TEST_SCHEMA_NAME}-{VERSION}"
# Create organization (if not already created)
js = syn.service("json_schema")
all_orgs = js.list_organizations()
for org in all_orgs:
if org["name"] == ORG_NAME:
print(f"Organization {ORG_NAME} already exists: {org}")
break
else:
print(f"Creating organization {ORG_NAME}.")
created_organization = js.create_organization(ORG_NAME)
print(f"Created organization: {created_organization}")
my_test_org = js.JsonSchemaOrganization(ORG_NAME)
test_schema = my_test_org.get_json_schema(DERIVED_TEST_SCHEMA_NAME)
if not test_schema:
# Create the schema (if not already created)
schema_definition = {
"$id": "mySchema",
"type": "object",
"properties": {
"foo": {"type": "string"},
"baz": {"type": "string", "const": "example_value"}, # Example constant for derived annotation
"bar": {"type": "integer"},
},
"required": ["foo"]
}
test_schema = my_test_org.create_json_schema(schema_definition, DERIVED_TEST_SCHEMA_NAME, VERSION)
print(f"Created new derived schema: {DERIVED_TEST_SCHEMA_NAME}")
# Create a test folder
test_folder = Folder(name=FOLDER_NAME, parent_id=PROJECT_ID)
test_folder.store()
print(f"Created test folder: {FOLDER_NAME}")
# Bind JSON schema to the folder
bound_schema = test_folder.bind_schema(
json_schema_uri=SCHEMA_URI,
enable_derived_annotations=True
)
print(f"Bound schema to folder with derived annotations: {bound_schema}")
# Create and bind schema to a file
example_file = File(
path=FILE_PATH, # Replace with your test file path
parent_id=test_folder.id,
).store()
bound_schema_file = example_file.bind_schema(
json_schema_uri=SCHEMA_URI,
enable_derived_annotations=True
)
print(f"Bound schema to file with derived annotations: {bound_schema_file}")
# Get the derived keys from the bound schema of the folder
test_folder.annotations = {"foo": "test_value_new", "bar": 42} # Example annotations
test_folder.store()
print("Added annotations to folder and stored")
derived_keys = test_folder.get_schema_derived_keys()
print(f"Derived keys from folder: {derived_keys}")
# Get the derived keys from the bound schema of the file
example_file.annotations = {"foo": "test_value_new", "bar": 43} # Example annotations
example_file.store()
print("Added annotations to file and stored")
derived_keys_file = example_file.get_schema_derived_keys()
print(f"Derived keys from file: {derived_keys_file}")
Source code in synapseclient/models/protocols/json_schema_protocol.py
425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 |
|
delete_permissions
¶
delete_permissions(include_self: bool = True, include_container_content: bool = False, recursive: bool = False, target_entity_types: Optional[List[str]] = None, dry_run: bool = False, show_acl_details: bool = True, show_files_in_containers: bool = True, *, benefactor_tracker: Optional[BenefactorTracker] = None, synapse_client: Optional[Synapse] = None) -> None
Delete the entire Access Control List (ACL) for a given Entity. This is not scoped to a specific user or group, but rather removes all permissions associated with the Entity. After this operation, the Entity will inherit permissions from its benefactor, which is typically its parent entity or the Project it belongs to.
In order to remove permissions for a specific user or group, you
should use the set_permissions
method with the access_type
set to
an empty list.
By default, Entities such as FileEntity and Folder inherit their permission from their containing Project. For such Entities the Project is the Entity's 'benefactor'. This permission inheritance can be overridden by creating an ACL for the Entity. When this occurs the Entity becomes its own benefactor and all permission are determined by its own ACL.
If the ACL of an Entity is deleted, then its benefactor will automatically be set to its parent's benefactor.
Special notice for Projects: The ACL for a Project cannot be deleted, you must individually update or revoke the permissions for each user or group.
PARAMETER | DESCRIPTION |
---|---|
include_self
|
If True (default), delete the ACL of the current entity. If False, skip deleting the ACL of the current entity.
TYPE:
|
include_container_content
|
If True, delete ACLs from contents directly within containers (files and folders inside self). This must be set to True for recursive to have any effect. Defaults to False.
TYPE:
|
recursive
|
If True and the entity is a container (e.g., Project or Folder),
recursively process child containers. Note that this must be used with
include_container_content=True to have any effect. Setting recursive=True
with include_container_content=False will raise a ValueError.
Only works on classes that support the
TYPE:
|
target_entity_types
|
Specify which entity types to process when deleting ACLs.
Allowed values are "folder" and "file" (case-insensitive).
If None, defaults to ["folder", "file"]. This does not affect the
entity type of the current entity, which is always processed if
|
dry_run
|
If True, log the changes that would be made instead of actually performing the deletions. When enabled, all ACL deletion operations are simulated and logged at info level. Defaults to False.
TYPE:
|
show_acl_details
|
When dry_run=True, controls whether current ACL details are displayed for entities that will have their permissions changed. If True (default), shows detailed ACL information. If False, hides ACL details for cleaner output. Has no effect when dry_run=False.
TYPE:
|
show_files_in_containers
|
When dry_run=True, controls whether files within containers are displayed in the preview. If True (default), shows all files. If False, hides files when their only change is benefactor inheritance (but still shows files with local ACLs being deleted). Has no effect when dry_run=False.
TYPE:
|
benefactor_tracker
|
Optional tracker for managing benefactor relationships. Used for recursive functionality to track which entities will be affected
TYPE:
|
synapse_client
|
If not passed in and caching was not disabled by
|
RETURNS | DESCRIPTION |
---|---|
None
|
None |
RAISES | DESCRIPTION |
---|---|
ValueError
|
If the entity does not have an ID or if an invalid entity type is provided. |
SynapseHTTPError
|
If there are permission issues or if the entity already inherits permissions. |
Exception
|
For any other errors that may occur during the process. |
Note: The caller must be granted ACCESS_TYPE.CHANGE_PERMISSIONS on the Entity to call this method.
Delete permissions for a single entity
from synapseclient import Synapse
from synapseclient.models import File
syn = Synapse()
syn.login()
File(id="syn123").delete_permissions()
Delete permissions recursively for a folder and all its children
from synapseclient import Synapse
from synapseclient.models import Folder
syn = Synapse()
syn.login()
# Delete permissions for this folder only (does not affect children)
Folder(id="syn123").delete_permissions()
# Delete permissions for all files and folders directly within this folder,
# but not the folder itself
Folder(id="syn123").delete_permissions(
include_self=False,
include_container_content=True
)
# Delete permissions for all items in the entire hierarchy (folders and their files)
# Both recursive and include_container_content must be True
Folder(id="syn123").delete_permissions(
recursive=True,
include_container_content=True
)
# Delete permissions only for folder entities within this folder recursively
# and their contents
Folder(id="syn123").delete_permissions(
recursive=True,
include_container_content=True,
target_entity_types=["folder"]
)
# Delete permissions only for files within this folder and all subfolders
Folder(id="syn123").delete_permissions(
include_self=False,
recursive=True,
include_container_content=True,
target_entity_types=["file"]
)
# Dry run example: Log what would be deleted without making changes
Folder(id="syn123").delete_permissions(
recursive=True,
include_container_content=True,
dry_run=True
)
Source code in synapseclient/models/protocols/access_control_protocol.py
173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 |
|
synapseclient.models.Column
dataclass
¶
Bases: ColumnSynchronousProtocol
A column model contains the metadata of a single column of a table or view.
Source code in synapseclient/models/table_components.py
1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239 1240 1241 1242 1243 1244 1245 1246 1247 1248 1249 1250 1251 1252 1253 1254 1255 1256 1257 1258 1259 1260 1261 1262 1263 1264 1265 1266 1267 1268 1269 1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307 1308 1309 1310 1311 1312 1313 1314 1315 1316 1317 1318 1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329 1330 1331 1332 1333 1334 1335 1336 1337 1338 1339 1340 1341 |
|
Functions¶
get
¶
Get a column by its ID.
PARAMETER | DESCRIPTION |
---|---|
synapse_client
|
If not passed in and caching was not disabled by
|
RETURNS | DESCRIPTION |
---|---|
Self
|
The Column instance. |
Getting a column by ID
Getting a column by ID
from synapseclient import Synapse
from synapseclient.models import Column
syn = Synapse()
syn.login()
column = Column(id="123").get()
Source code in synapseclient/models/protocols/table_protocol.py
17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
|
list
staticmethod
¶
list(prefix: Optional[str] = None, limit: int = 100, offset: int = 0, *, synapse_client: Optional[Synapse] = None) -> Generator[Self, None, None]
List columns with optional prefix filtering.
PARAMETER | DESCRIPTION |
---|---|
prefix
|
Optional prefix to filter columns by name. |
limit
|
Number of columns to retrieve per request to Synapse (pagination parameter). The function will continue retrieving results until all matching columns are returned.
TYPE:
|
offset
|
The index of the first column to return (pagination parameter).
TYPE:
|
synapse_client
|
If not passed in and caching was not disabled by
|
YIELDS | DESCRIPTION |
---|---|
Self
|
A generator that yields Column instances. |
Getting all columns
Getting all columns
from synapseclient import Synapse
from synapseclient.models import Column
syn = Synapse()
syn.login()
for column in Column.list():
print(column.name)
Getting columns with a prefix
Getting columns with a prefix
from synapseclient import Synapse
from synapseclient.models import Column
syn = Synapse()
syn.login()
for column in Column.list(prefix="my_prefix"):
print(column.name)
Source code in synapseclient/models/protocols/table_protocol.py
42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 |
|
synapseclient.models.SchemaStorageStrategy
¶
Enum used to determine how to store the schema of a table in Synapse.
Source code in synapseclient/models/table_components.py
1553 1554 1555 1556 1557 1558 1559 1560 1561 1562 1563 1564 1565 1566 1567 1568 1569 1570 1571 1572 1573 1574 1575 1576 1577 1578 1579 |
|
Attributes¶
INFER_FROM_DATA
class-attribute
instance-attribute
¶
INFER_FROM_DATA = 'INFER_FROM_DATA'
(Default) Allow the data to define which columns are created on the Synapse table automatically. The limitation with this behavior is that the columns created may only be of the following types:
- STRING
- LARGETEXT
- INTEGER
- DOUBLE
- BOOLEAN
- DATE
The determination of the column type is based on the data that is passed in using the pandas function infer_dtype. If you need a more specific column type, or need to add options to the colums follow the examples shown in the Table class.
The columns created as a result of this strategy will be appended to the end of the existing columns if the table already exists.
synapseclient.models.ColumnExpansionStrategy
¶
Determines how to automate the expansion of columns based on the data that is being stored. The options given allow cells with a limit on the length of content (Such as strings) to be expanded to a larger size if the data being stored exceeds the limit. A limit to list length is also enforced in Synapse by automatic expansion for lists is not yet supported through this interface.
Source code in synapseclient/models/table_components.py
1582 1583 1584 1585 1586 1587 1588 1589 1590 1591 1592 1593 1594 1595 1596 1597 1598 1599 1600 1601 1602 1603 1604 |
|
synapseclient.models.FacetType
¶
Set to one of the enumerated values to indicate a column should be treated as a facet.
Source code in synapseclient/models/table_components.py
397 398 399 400 401 402 403 404 405 406 407 408 |
|
Attributes¶
ENUMERATION
class-attribute
instance-attribute
¶
ENUMERATION = 'enumeration'
Returns the most frequently seen values and their respective frequency counts; selecting these returned values will cause the table results to be filtered such that only rows with the selected values are returned.
RANGE
class-attribute
instance-attribute
¶
RANGE = 'range'
Allows the column to be filtered by a chosen lower and upper bound; these bounds are inclusive.
synapseclient.models.ColumnType
¶
The column type determines the type of data that can be stored in a column. Switching between types (using a transaction with TableUpdateTransaction in the "changes" list) is generally allowed except for switching to "_LIST" suffixed types. In such cases, a new column must be created and data must be copied over manually
Source code in synapseclient/models/table_components.py
411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 |
|
Attributes¶
STRING
class-attribute
instance-attribute
¶
STRING = 'STRING'
The STRING data type is a small text strings with between 1 and 1,000 characters. Each STRING column will have a declared maximum size between 1 and 1,000 characters (with 50 characters as the default when maximumSize = null). The maximum STRING size is applied to the budget of the maximum table width, therefore it is best to use the smallest possible maximum size for the data. For strings larger than 250 characters, consider using the LARGETEXT column type for improved performance. Each STRING column counts as maxSize*4 (4 bytes per character) towards the total width of a table.
DOUBLE
class-attribute
instance-attribute
¶
DOUBLE = 'DOUBLE'
The DOUBLE data type is a double-precision 64-bit IEEE 754 floating point. Its range of values is approximately +/-1.79769313486231570E+308 (15 significant decimal digits). Each DOUBLE column counts as 23 bytes towards the total width of a table.
INTEGER
class-attribute
instance-attribute
¶
INTEGER = 'INTEGER'
The INTEGER data type is a 64-bit two's complement integer. The signed integer has a minimum value of -2^63 and a maximum value of 2^63-1. Each INTEGER column counts as 20 bytes towards the total width of a table.
BOOLEAN
class-attribute
instance-attribute
¶
BOOLEAN = 'BOOLEAN'
The BOOLEAN data type has only two possible values: 'true' and 'false'. Each BOOLEAN column counts as 5 bytes towards the total width of a table.
DATE
class-attribute
instance-attribute
¶
DATE = 'DATE'
The DATE data type represent the specified number of milliseconds since the standard base time known as 'the epoch', namely January 1, 1970, 00:00:00 GM. Each DATE column counts as 20 bytes towards the total width of a table.
FILEHANDLEID
class-attribute
instance-attribute
¶
FILEHANDLEID = 'FILEHANDLEID'
The FILEHANDLEID data type represents a file stored within a table. To store a file in a table, first use the 'File Services' to upload a file to generate a new FileHandle, then apply the fileHandle.id as the value for this column. Note: This column type works best for files that are binary (non-text) or text files that are 1 MB or larger. For text files that are smaller than 1 MB consider using the LARGETEXT column type to improve download performance. Each FILEHANDLEID column counts as 20 bytes towards the total width of a table.
ENTITYID
class-attribute
instance-attribute
¶
ENTITYID = 'ENTITYID'
The ENTITYID type represents a reference to a Synapse Entity. Values will include the 'syn' prefix, such as 'syn123'. Each ENTITYID column counts as 44 bytes towards the total width of a table.
SUBMISSIONID
class-attribute
instance-attribute
¶
SUBMISSIONID = 'SUBMISSIONID'
The SUBMISSIONID type represents a reference to an evaluation submission. The value should be the ID of the referenced submission. Each SUBMISSIONID column counts as 20 bytes towards the total width of a table.
EVALUATIONID
class-attribute
instance-attribute
¶
EVALUATIONID = 'EVALUATIONID'
The EVALUATIONID type represents a reference to an evaluation. The value should be the ID of the referenced evaluation. Each EVALUATIONID column counts as 20 bytes towards the total width of a table.
LINK
class-attribute
instance-attribute
¶
LINK = 'LINK'
The LINK data type represents any URL with 1,000 characters or less. Each LINK column counts as maxSize*4 (4 bytes per character) towards the total width of a table.
MEDIUMTEXT
class-attribute
instance-attribute
¶
MEDIUMTEXT = 'MEDIUMTEXT'
The MEDIUMTEXT data type represents a string that is between 1 and 2,000 characters without the need to specify a maximum size. For smaller strings where the maximum size is known consider using the STRING column type. For larger strings, consider using the LARGETEXT or FILEHANDLEID column types. Each MEDIUMTEXT column counts as 421 bytes towards the total width of a table.
LARGETEXT
class-attribute
instance-attribute
¶
LARGETEXT = 'LARGETEXT'
The LARGETEXT data type represents a string that is greater than 250 characters but less than 524,288 characters (2 MB of UTF-8 4 byte chars). For smaller strings consider using the STRING or MEDIUMTEXT column types. For larger strings, consider using the FILEHANDELID column type. Each LARGE_TEXT column counts as 2133 bytes towards the total width of a table.
USERID
class-attribute
instance-attribute
¶
USERID = 'USERID'
The USERID data type represents a reference to a Synapse User. The value should be the ID of the referenced User. Each USERID column counts as 20 bytes towards the total width of a table.
STRING_LIST
class-attribute
instance-attribute
¶
STRING_LIST = 'STRING_LIST'
Multiple values of STRING.
INTEGER_LIST
class-attribute
instance-attribute
¶
INTEGER_LIST = 'INTEGER_LIST'
Multiple values of INTEGER.
BOOLEAN_LIST
class-attribute
instance-attribute
¶
BOOLEAN_LIST = 'BOOLEAN_LIST'
Multiple values of BOOLEAN.
ENTITYID_LIST
class-attribute
instance-attribute
¶
ENTITYID_LIST = 'ENTITYID_LIST'
Multiple values of ENTITYID.
USERID_LIST
class-attribute
instance-attribute
¶
USERID_LIST = 'USERID_LIST'
Multiple values of USERID.
JSON
class-attribute
instance-attribute
¶
JSON = 'JSON'
A flexible type that allows to store JSON data. Each JSON column counts as 2133 bytes towards the total width of a table. A JSON value string should be less than 524,288 characters (2 MB of UTF-8 4 byte chars).
synapseclient.models.JsonSubColumn
dataclass
¶
For column of type JSON that represents the combination of multiple sub-columns, this property is used to define each sub-column.
Source code in synapseclient/models/table_components.py
1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 |
|
Attributes¶
column_type
instance-attribute
¶
column_type: ColumnType
The column type determines the type of data that can be stored in a column. Switching between types (using a transaction with TableUpdateTransaction in the "changes" list) is generally allowed except for switching to "_LIST" suffixed types. In such cases, a new column must be created and data must be copied over manually
json_path
instance-attribute
¶
json_path: str
Defines the JSON path of the sub column. Use the '$' char to represent the root of JSON object. If the JSON key of a sub column is 'a', then the jsonPath for that column would be: '$.a'.
facet_type
class-attribute
instance-attribute
¶
Set to one of the enumerated values to indicate a column should be treated as a facet
Functions¶
fill_from_dict
classmethod
¶
fill_from_dict(synapse_sub_column: Dict[str, Any]) -> JsonSubColumn
Converts a response from the synapseclient into this dataclass.
Source code in synapseclient/models/table_components.py
1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 |
|
to_synapse_request
¶
Converts the Column object into a dictionary that can be passed into the REST API.
Source code in synapseclient/models/table_components.py
1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 |
|
synapseclient.models.SumFileSizes
dataclass
¶
A model for the sum of file sizes in a query result bundle.
This result is modeled from: https://rest-docs.synapse.org/rest/org/sagebionetworks/repo/model/table/SumFileSizes.html
Source code in synapseclient/models/table_components.py
37 38 39 40 41 42 43 44 45 46 47 48 |
|
synapseclient.models.Query
dataclass
¶
Represents a SQL query with optional parameters.
This result is modeled from: https://rest-docs.synapse.org/rest/org/sagebionetworks/repo/model/table/Query.html
Source code in synapseclient/models/table_components.py
978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 |
|
Attributes¶
additional_filters
class-attribute
instance-attribute
¶
Appends additional filters to the SQL query. These are applied before facets. Filters within the list have an AND relationship. If a WHERE clause already exists on the SQL query or facets are selected, it will also be ANDed with the query generated by these additional filters.
selected_facets
class-attribute
instance-attribute
¶
The selected facet filters
include_entity_etag
class-attribute
instance-attribute
¶
Optional, default false. When true, a query results against views will include the Etag of each entity in the results. Note: The etag is necessary to update Entities in the view.
select_file_column
class-attribute
instance-attribute
¶
The id of the column used to select file entities (e.g. to fetch the action required for download). The column needs to be an ENTITYID type column and be part of the schema of the underlying table/view.
select_file_version_column
class-attribute
instance-attribute
¶
The id of the column used as the version for selecting file entities when required (e.g. to add a materialized view query to the download cart with version enabled). The column needs to be an INTEGER type column and be part of the schema of the underlying table/view.
offset
class-attribute
instance-attribute
¶
The optional offset into the results
limit
class-attribute
instance-attribute
¶
The optional limit to the results
sort
class-attribute
instance-attribute
¶
The sort order for the query results (ARRAY
Functions¶
to_synapse_request
¶
Converts the Query object into a dictionary that can be passed into the REST API.
Source code in synapseclient/models/table_components.py
1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 |
|
synapseclient.models.QueryBundleRequest
dataclass
¶
Bases: AsynchronousCommunicator
A query bundle request that can be submitted to Synapse to retrieve query results with metadata.
This class combines query request parameters with the ability to receive a QueryResultBundle through the AsynchronousCommunicator pattern.
The partMask determines which parts of the result bundle are included: - Query Results (queryResults) = 0x1 - Query Count (queryCount) = 0x2 - Select Columns (selectColumns) = 0x4 - Max Rows Per Page (maxRowsPerPage) = 0x8 - The Table Columns (columnModels) = 0x10 - Facet statistics for each faceted column (facetStatistics) = 0x20 - The sum of the file sizes (sumFileSizesBytes) = 0x40 - The last updated on date (lastUpdatedOn) = 0x80 - The combined SQL query including additional filters (combinedSql) = 0x100 - The list of actions required for any file in the query (actionsRequired) = 0x200
This result is modeled from: https://rest-docs.synapse.org/rest/org/sagebionetworks/repo/model/table/QueryBundleRequest.html
Source code in synapseclient/models/table_components.py
1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464 1465 1466 1467 1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482 1483 1484 1485 1486 1487 1488 1489 1490 1491 1492 1493 1494 1495 1496 1497 1498 1499 1500 1501 1502 1503 1504 1505 1506 1507 1508 1509 1510 1511 1512 1513 1514 1515 1516 1517 1518 1519 1520 1521 1522 1523 1524 1525 1526 1527 1528 1529 1530 1531 1532 1533 1534 1535 1536 1537 1538 1539 1540 1541 1542 1543 1544 1545 1546 1547 1548 1549 1550 |
|
Attributes¶
concrete_type
class-attribute
instance-attribute
¶
concrete_type: str = QUERY_BUNDLE_REQUEST
The concrete type of this request
part_mask
class-attribute
instance-attribute
¶
Optional integer mask to request specific parts. Default includes all parts if not specified.
query_result
class-attribute
instance-attribute
¶
query_result: Optional[QueryResult] = None
A page of query result
query_count
class-attribute
instance-attribute
¶
The total number of rows that match the query
select_columns
class-attribute
instance-attribute
¶
select_columns: Optional[List[SelectColumn]] = None
The list of SelectColumns from the select clause
max_rows_per_page
class-attribute
instance-attribute
¶
The maximum number of rows that can be retrieved in a single call
column_models
class-attribute
instance-attribute
¶
The list of ColumnModels for the table
facets
class-attribute
instance-attribute
¶
The list of facets for the search results
sum_file_sizes
class-attribute
instance-attribute
¶
sum_file_sizes: Optional[SumFileSizes] = None
The sum of the file size for all files in the given view query
last_updated_on
class-attribute
instance-attribute
¶
The date-time when this table/view was last updated
combined_sql
class-attribute
instance-attribute
¶
The SQL that is combination of a the input SQL, FacetRequests, AdditionalFilters, Sorting, and Pagination
actions_required
class-attribute
instance-attribute
¶
The first 50 actions required to download the files that are part of the query
Functions¶
to_synapse_request
¶
Convert to QueryBundleRequest format for async job submission.
Source code in synapseclient/models/table_components.py
1519 1520 1521 1522 1523 1524 1525 1526 1527 1528 1529 1530 1531 |
|
fill_from_dict
¶
Fill the request results from Synapse response (QueryResultBundle).
Source code in synapseclient/models/table_components.py
1533 1534 1535 1536 1537 1538 1539 1540 1541 1542 1543 1544 1545 1546 1547 1548 1549 1550 |
|
synapseclient.models.QueryJob
dataclass
¶
Bases: AsynchronousCommunicator
A query job that can be submitted to Synapse and return a DownloadFromTableResult.
This class combines query request parameters with the ability to receive query results through the AsynchronousCommunicator pattern.
Request modeled from: https://rest-docs.synapse.org/rest/org/sagebionetworks/repo/model/table/DownloadFromTableRequest.html
Response modeled from: https://rest-docs.synapse.org/rest/org/sagebionetworks/repo/model/table/DownloadFromTableResult.html
Source code in synapseclient/models/table_components.py
853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 |
|
Attributes¶
concrete_type
class-attribute
instance-attribute
¶
concrete_type: str = QUERY_TABLE_CSV_REQUEST
The concrete type of the request (usually DownloadFromTableRequest)
write_header
class-attribute
instance-attribute
¶
Should the first line contain the columns names as a header in the resulting file? Set to 'true' to include the headers else, 'false'. The default value is 'true'.
include_row_id_and_row_version
class-attribute
instance-attribute
¶
Should the first two columns contain the row ID and row version? The default value is 'true'.
csv_table_descriptor
class-attribute
instance-attribute
¶
csv_table_descriptor: Optional[CsvTableDescriptor] = None
The description of a csv for upload or download.
file_name
class-attribute
instance-attribute
¶
The optional name for the downloaded table.
additional_filters
class-attribute
instance-attribute
¶
Appends additional filters to the SQL query. These are applied before facets. Filters within the list have an AND relationship. If a WHERE clause already exists on the SQL query or facets are selected, it will also be ANDed with the query generated by these additional filters.
selected_facets
class-attribute
instance-attribute
¶
The selected facet filters.
include_entity_etag
class-attribute
instance-attribute
¶
"Optional, default false. When true, a query results against views will include the Etag of each entity in the results. Note: The etag is necessary to update Entities in the view.
select_file_column
class-attribute
instance-attribute
¶
The id of the column used to select file entities (e.g. to fetch the action required for download). The column needs to be an ENTITYID type column and be part of the schema of the underlying table/view.
select_file_version_column
class-attribute
instance-attribute
¶
The id of the column used as the version for selecting file entities when required (e.g. to add a materialized view query to the download cart with version enabled). The column needs to be an INTEGER type column and be part of the schema of the underlying table/view.
offset
class-attribute
instance-attribute
¶
The optional offset into the results
limit
class-attribute
instance-attribute
¶
The optional limit to the results
sort
class-attribute
instance-attribute
¶
The sort order for the query results (ARRAY
job_id
class-attribute
instance-attribute
¶
The job ID returned from the async job
results_file_handle_id
class-attribute
instance-attribute
¶
The file handle ID of the results CSV file
table_id
class-attribute
instance-attribute
¶
The ID of the table that was queried
headers
class-attribute
instance-attribute
¶
headers: Optional[List[SelectColumn]] = None
The column headers from the query result
response_concrete_type
class-attribute
instance-attribute
¶
The concrete type of the response (usually DownloadFromTableResult)
Functions¶
to_synapse_request
¶
Convert to DownloadFromTableRequest format for async job submission.
Source code in synapseclient/models/table_components.py
934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 |
|
fill_from_dict
¶
Fill the job results from Synapse response.
Source code in synapseclient/models/table_components.py
960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 |
|
synapseclient.models.QueryNextPageToken
dataclass
¶
Token for retrieving the next page of query results.
This result is modeled from: https://rest-docs.synapse.org/rest/org/sagebionetworks/repo/model/table/QueryNextPageToken.html
Source code in synapseclient/models/table_components.py
689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 |
|
Attributes¶
concrete_type
class-attribute
instance-attribute
¶
The concrete type of this object
entity_id
class-attribute
instance-attribute
¶
The ID of the entity (table/view) being queried
Functions¶
fill_from_dict
classmethod
¶
fill_from_dict(data: Dict[str, Any]) -> QueryNextPageToken
Create a QueryNextPageToken from a dictionary response.
Source code in synapseclient/models/table_components.py
706 707 708 709 710 711 712 713 |
|
synapseclient.models.QueryResult
dataclass
¶
A page of query result.
This result is modeled from: https://rest-docs.synapse.org/rest/org/sagebionetworks/repo/model/table/QueryResult.html
Source code in synapseclient/models/table_components.py
817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 |
|
Attributes¶
query_results
instance-attribute
¶
query_results: RowSet
Represents a set of row of a TableEntity (RowSet)
concrete_type
class-attribute
instance-attribute
¶
concrete_type: str = QUERY_RESULT
The concrete type of this object
next_page_token
class-attribute
instance-attribute
¶
next_page_token: Optional[QueryNextPageToken] = None
Token for retrieving the next page of results, if available
Functions¶
fill_from_dict
classmethod
¶
fill_from_dict(data: Dict[str, Any]) -> QueryResult
Create a QueryResult from a dictionary response.
Source code in synapseclient/models/table_components.py
834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 |
|
synapseclient.models.QueryResultBundle
dataclass
¶
A bundle of information about a query result.
This result is modeled from: https://rest-docs.synapse.org/rest/org/sagebionetworks/repo/model/table/QueryResultBundle.html
Source code in synapseclient/models/table_components.py
1344 1345 1346 1347 1348 1349 1350 1351 1352 1353 1354 1355 1356 1357 1358 1359 1360 1361 1362 1363 1364 1365 1366 1367 1368 1369 1370 1371 1372 1373 1374 1375 1376 1377 1378 1379 1380 1381 1382 1383 1384 1385 1386 1387 1388 1389 1390 1391 1392 1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 |
|
Attributes¶
concrete_type
class-attribute
instance-attribute
¶
concrete_type: str = QUERY_TABLE_CSV_REQUEST
The concrete type of this object
query_result
class-attribute
instance-attribute
¶
query_result: QueryResult = None
A page of query result
query_count
class-attribute
instance-attribute
¶
The total number of rows that match the query. Use mask = 0x2 to include in the bundle.
select_columns
class-attribute
instance-attribute
¶
select_columns: Optional[List[SelectColumn]] = None
The list of SelectColumns from the select clause. Use mask = 0x4 to include in the bundle.
max_rows_per_page
class-attribute
instance-attribute
¶
The maximum number of rows that can be retrieved in a single call. This is a function of the columns that are selected in the query. Use mask = 0x8 to include in the bundle.
column_models
class-attribute
instance-attribute
¶
The list of ColumnModels for the table. Use mask = 0x10 to include in the bundle.
facets
class-attribute
instance-attribute
¶
TODO: create facets dataclass
sum_file_sizes
class-attribute
instance-attribute
¶
sum_file_sizes: Optional[SumFileSizes] = None
The sum of the file size for all files in the given view query. Use mask = 0x40 to include in the bundle.
last_updated_on
class-attribute
instance-attribute
¶
The date-time when this table/view was last updated. Note: Since views are
eventually consistent a view might still be out-of-date even if it was recently
updated. Use mask = 0x80 to include in the bundle. This is returned in the
ISO8601 format like 2000-01-01T00:00:00.000Z
.
combined_sql
class-attribute
instance-attribute
¶
The SQL that is combination of a the input SQL, FacetRequests, AdditionalFilters, Sorting, and Pagination. Use mask = 0x100 to include in the bundle.
actions_required
class-attribute
instance-attribute
¶
The first 50 actions required to download the files that are part of the query. Use mask = 0x200 to include them in the bundle.
Functions¶
fill_from_dict
classmethod
¶
fill_from_dict(data: Dict[str, Any]) -> QueryResultBundle
Create a QueryResultBundle from a dictionary response.
Source code in synapseclient/models/table_components.py
1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 |
|
synapseclient.models.QueryResultOutput
dataclass
¶
The result of querying Synapse with an included part_mask
. This class contains a
subnet of the available items that may be returned by specifying a part_mask
.
Source code in synapseclient/models/table_components.py
51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 |
|
Attributes¶
count
class-attribute
instance-attribute
¶
The total number of rows that match the query. Use mask = 0x2 to include in the bundle.
sum_file_sizes
class-attribute
instance-attribute
¶
sum_file_sizes: Optional[SumFileSizes] = None
The sum of the file size for all files in the given view query. Use mask = 0x40 to include in the bundle.
last_updated_on
class-attribute
instance-attribute
¶
The date-time when this table/view was last updated. Note: Since views are
eventually consistent a view might still be out-of-date even if it was recently
updated. Use mask = 0x80 to include in the bundle. This is returned in the
ISO8601 format like 2000-01-01T00:00:00.000Z
.
Functions¶
fill_from_dict
classmethod
¶
fill_from_dict(result: DATA_FRAME_TYPE, data: Dict[str, Any]) -> QueryResultOutput
Create a QueryResultOutput from a result DataFrame and dictionary response.
PARAMETER | DESCRIPTION |
---|---|
result
|
The pandas DataFrame result from the query.
TYPE:
|
data
|
The dictionary response from the REST API containing metadata. |
RETURNS | DESCRIPTION |
---|---|
QueryResultOutput
|
A QueryResultOutput instance. |
Source code in synapseclient/models/table_components.py
75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 |
|
synapseclient.models.Row
dataclass
¶
Represents a single row of a TableEntity.
This result is modeled from: https://rest-docs.synapse.org/rest/org/sagebionetworks/repo/model/table/Row.html
Source code in synapseclient/models/table_components.py
522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 |
|
Attributes¶
row_id
class-attribute
instance-attribute
¶
The immutable ID issued to a new row.
version_number
class-attribute
instance-attribute
¶
The version number of this row. Each row version is immutable, so when a row is updated a new version is created.
etag
class-attribute
instance-attribute
¶
For queries against EntityViews with query.includeEntityEtag=true, this field will contain the etag of the entity. Will be null for all other cases.
values
class-attribute
instance-attribute
¶
The values for each column of this row. To delete a row, set this to an empty list: []
Functions¶
to_boolean
¶
to_boolean(value)
Convert a string to boolean, case insensitively, where true values are: true, t, and 1 and false values are: false, f, 0. Raise a ValueError for all other values.
Source code in synapseclient/models/table_components.py
544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 |
|
cast_values
staticmethod
¶
cast_values(values, headers)
Convert a row of table query results from strings to the correct column type.
See: https://rest-docs.synapse.org/rest/org/sagebionetworks/repo/model/table/ColumnType.html
Source code in synapseclient/models/table_components.py
565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 |
|
fill_from_dict
classmethod
¶
Create a Row from a dictionary response.
Source code in synapseclient/models/table_components.py
617 618 619 620 621 622 623 624 625 |
|
synapseclient.models.RowSet
dataclass
¶
Represents a set of row of a TableEntity.
This result is modeled from: https://rest-docs.synapse.org/rest/org/sagebionetworks/repo/model/table/RowSet.html
Source code in synapseclient/models/table_components.py
716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 |
|
Attributes¶
concrete_type
class-attribute
instance-attribute
¶
The concrete type of this object
table_id
class-attribute
instance-attribute
¶
The ID of the TableEntity than owns these rows
etag
class-attribute
instance-attribute
¶
Any RowSet returned from Synapse will contain the current etag of the change set. To update any rows from a RowSet the etag must be provided with the POST.
headers
class-attribute
instance-attribute
¶
headers: Optional[List[SelectColumn]] = None
The list of SelectColumns that describes the rows of this set.
rows
class-attribute
instance-attribute
¶
The Rows of this set. The index of each row value aligns with the index of each header.
Functions¶
cast_row
classmethod
¶
Cast the values in a single row to their appropriate column types.
This method takes a row dictionary containing string values from a table query response and converts them to the correct Python types based on the column headers. For example, converts string "123" to integer 123 for INTEGER columns, or string "true" to boolean True for BOOLEAN columns.
PARAMETER | DESCRIPTION |
---|---|
row
|
A dictionary representing a single table row with keys that need to be cast to proper types. |
headers
|
A list of header dictionaries, each containing column metadata including 'columnType' which determines how to cast the corresponding value in the row. |
RETURNS | DESCRIPTION |
---|---|
Dict[str, Any]
|
The same row dictionary with the 'values' field updated to contain |
Dict[str, Any]
|
properly typed values instead of strings. |
Source code in synapseclient/models/table_components.py
740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 |
|
cast_row_set
classmethod
¶
Cast the values in multiple rows to their appropriate column types.
This method takes a list of row dictionaries containing string values from a table query
response and converts them to the correct Python types based on the column headers.
It applies the same type casting logic as cast_row
to each row in the collection.
PARAMETER | DESCRIPTION |
---|---|
rows
|
A list of row dictionaries, each representing a single table row with field contains a list of string values that need to be cast to proper types. |
headers
|
A list of header dictionaries, each containing column metadata including 'columnType' which determines how to cast the corresponding values in each row. |
RETURNS | DESCRIPTION |
---|---|
List[Row]
|
A list of row dictionaries with the 'values' field in each row updated to |
List[Row]
|
contain properly typed values instead of strings. |
Source code in synapseclient/models/table_components.py
765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 |
|
fill_from_dict
classmethod
¶
Create a RowSet from a dictionary response.
Source code in synapseclient/models/table_components.py
788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 |
|
synapseclient.models.SelectColumn
dataclass
¶
A column model contains the metadata of a single column of a TableEntity.
This result is modeled from: https://rest-docs.synapse.org/rest/org/sagebionetworks/repo/model/table/SelectColumn.html
Source code in synapseclient/models/table_components.py
651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 |
|
Attributes¶
name
class-attribute
instance-attribute
¶
The required display name of the column
column_type
class-attribute
instance-attribute
¶
column_type: Optional[ColumnType] = None
The column type determines the type of data that can be stored in a column. Switching between types (using a transaction with TableUpdateTransactionRequest in the "changes" list) is generally allowed except for switching to "_LIST" suffixed types. In such cases, a new column must be created and data must be copied over manually
id
class-attribute
instance-attribute
¶
The optional ID of the select column, if this is a direct column selected
Functions¶
fill_from_dict
classmethod
¶
fill_from_dict(data: Dict[str, Any]) -> SelectColumn
Create a SelectColumn from a dictionary response.
Source code in synapseclient/models/table_components.py
672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 |
|
synapseclient.models.ColumnChange
dataclass
¶
A change to a column in a table. This is used in the TableSchemaChangeRequest
to
indicate what changes should be made to the columns in the table.
Source code in synapseclient/models/table_components.py
246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 |
|
Attributes¶
old_column_id
class-attribute
instance-attribute
¶
The ID of the old ColumnModel to be replaced with the new. Set to null to indicate a new column should be added without replacing an old column.
new_column_id
class-attribute
instance-attribute
¶
The ID of the new ColumnModel to replace the old. Set to null to indicate the old column should be removed without being replaced.
Functions¶
to_synapse_request
¶
to_synapse_request()
Converts the request to a request expected of the Synapse REST API.
Source code in synapseclient/models/table_components.py
261 262 263 264 265 266 267 268 |
|
synapseclient.models.PartialRow
dataclass
¶
A partial row to be added to a table. This is used in the PartialRowSet
to
indicate what rows should be updated in a table during the upsert operation.
Source code in synapseclient/models/table_components.py
138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 |
|
Functions¶
to_synapse_request
¶
to_synapse_request()
Converts the request to a request expected of the Synapse REST API.
Source code in synapseclient/models/table_components.py
149 150 151 152 153 154 155 156 157 |
|
size
¶
size() -> int
Returns the size of the PartialRow in bytes. This is not an exact size but follows the calculation as used in the Rest API:
Source code in synapseclient/models/table_components.py
159 160 161 162 163 164 165 166 167 168 169 170 171 172 |
|
synapseclient.models.PartialRowSet
dataclass
¶
A set of partial rows to be added to a table. This is used in the
AppendableRowSetRequest
to indicate what rows should be updated in a table
during the upsert operation.
Source code in synapseclient/models/table_components.py
175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 |
|
Functions¶
to_synapse_request
¶
to_synapse_request()
Converts the request to a request expected of the Synapse REST API.
Source code in synapseclient/models/table_components.py
187 188 189 190 191 192 193 |
|
synapseclient.models.TableSchemaChangeRequest
dataclass
¶
A request to change the schema of a table. This is used to change the columns in a
table. This request is used in the TableUpdateTransaction
to indicate what
changes should be made to the columns in the table.
Source code in synapseclient/models/table_components.py
271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 |
|
Functions¶
to_synapse_request
¶
to_synapse_request()
Converts the request to a request expected of the Synapse REST API.
Source code in synapseclient/models/table_components.py
284 285 286 287 288 289 290 291 |
|
synapseclient.models.AppendableRowSetRequest
dataclass
¶
A request to append rows to a table. This is used to append rows to a table. This
request is used in the TableUpdateTransaction
to indicate what rows should
be upserted in the table.
Source code in synapseclient/models/table_components.py
196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 |
|
Functions¶
to_synapse_request
¶
to_synapse_request()
Converts the request to a request expected of the Synapse REST API.
Source code in synapseclient/models/table_components.py
208 209 210 211 212 213 214 |
|
synapseclient.models.UploadToTableRequest
dataclass
¶
A request to upload a file to a table. This is used to insert any rows via a CSV
file into a table. This request is used in the TableUpdateTransaction
.
Source code in synapseclient/models/table_components.py
217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 |
|
Functions¶
to_synapse_request
¶
to_synapse_request()
Converts the request to a request expected of the Synapse REST API.
Source code in synapseclient/models/table_components.py
231 232 233 234 235 236 237 238 239 240 241 242 243 |
|
synapseclient.models.TableUpdateTransaction
dataclass
¶
Bases: AsynchronousCommunicator
A request to update a table. This is used to update a table with a set of changes.
After calling the send_job_and_wait_async
method the results
attribute will be
filled in based off https://rest-docs.synapse.org/rest/org/sagebionetworks/repo/model/table/TableUpdateTransactionResponse.html.
Source code in synapseclient/models/table_components.py
323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 |
|
Attributes¶
entities_with_changes_applied
class-attribute
instance-attribute
¶
This will be an array of https://rest-docs.synapse.org/rest/org/sagebionetworks/repo/model/table/TableUpdateResponse.html.
Functions¶
to_synapse_request
¶
to_synapse_request()
Converts the request to a request expected of the Synapse REST API.
Source code in synapseclient/models/table_components.py
350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 |
|
fill_from_dict
¶
Converts a response from the REST API into this dataclass.
PARAMETER | DESCRIPTION |
---|---|
synapse_response
|
The response from the REST API that matches https://rest-docs.synapse.org/rest/org/sagebionetworks/repo/model/table/TableUpdateTransactionResponse.html |
RETURNS | DESCRIPTION |
---|---|
Self
|
An instance of this class. |
Source code in synapseclient/models/table_components.py
367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 |
|
synapseclient.models.CsvTableDescriptor
dataclass
¶
Derived from https://rest-docs.synapse.org/rest/org/sagebionetworks/repo/model/table/CsvTableDescriptor.html
Source code in synapseclient/models/table_components.py
106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 |
|
Attributes¶
separator
class-attribute
instance-attribute
¶
separator: str = ','
The delimiter to be used for separating entries in the resulting file. The default character ',' will be used if this is not provided by the caller. For tab-separated values use ' '
quote_character
class-attribute
instance-attribute
¶
quote_character: str = '"'
The character to be used for quoted elements in the resulting file. The default character '"' will be used if this is not provided by the caller.
escape_character
class-attribute
instance-attribute
¶
escape_character: str = '\\'
The escape character to be used for escaping a separator or quote in the resulting file. The default character '\' will be used if this is not provided by the caller.
line_end
class-attribute
instance-attribute
¶
The line feed terminator to be used for the resulting file. The default value of ' ' will be used if this is not provided by the caller.
is_first_line_header
class-attribute
instance-attribute
¶
is_first_line_header: bool = True
Is the first line a header? The default value of 'true' will be used if this is not provided by the caller.
Functions¶
to_synapse_request
¶
to_synapse_request()
Converts the request to a request expected of the Synapse REST API.
Source code in synapseclient/models/table_components.py
125 126 127 128 129 130 131 132 133 134 135 |
|
synapseclient.models.mixins.table_components.csv_to_pandas_df
¶
csv_to_pandas_df(filepath: Union[str, BytesIO], separator: str = DEFAULT_SEPARATOR, quote_char: str = DEFAULT_QUOTE_CHARACTER, escape_char: str = DEFAULT_ESCAPSE_CHAR, contain_headers: bool = True, lines_to_skip: int = 0, date_columns: Optional[List[str]] = None, list_columns: Optional[List[str]] = None, row_id_and_version_in_index: bool = True, dtype: Optional[Dict[str, Any]] = None, **kwargs) -> DATA_FRAME_TYPE
Convert a csv file to a pandas dataframe
PARAMETER | DESCRIPTION |
---|---|
filepath
|
The path to the file. |
separator
|
The separator for the file, Defaults to
TYPE:
|
quote_char
|
The quote character for the file,
Defaults to
TYPE:
|
escape_char
|
The escape character for the file,
Defaults to
TYPE:
|
contain_headers
|
Whether the file contains headers,
Defaults to
TYPE:
|
lines_to_skip
|
The number of lines to skip at the beginning of the file,
Defaults to
TYPE:
|
date_columns
|
The names of the date columns in the file |
list_columns
|
The names of the list columns in the file |
row_id_and_version_in_index
|
Whether the file contains rowId and
version in the index, Defaults to
TYPE:
|
dtype
|
The data type for the file, Defaults to |
**kwargs
|
Additional keyword arguments to pass to pandas.read_csv. See https://pandas.pydata.org/docs/reference/api/pandas.read_csv.html for complete list of supported arguments.
DEFAULT:
|
RETURNS | DESCRIPTION |
---|---|
DATA_FRAME_TYPE
|
A pandas dataframe |
Source code in synapseclient/models/mixins/table_components.py
4258 4259 4260 4261 4262 4263 4264 4265 4266 4267 4268 4269 4270 4271 4272 4273 4274 4275 4276 4277 4278 4279 4280 4281 4282 4283 4284 4285 4286 4287 4288 4289 4290 4291 4292 4293 4294 4295 4296 4297 4298 4299 4300 4301 4302 4303 4304 4305 4306 4307 4308 4309 4310 4311 4312 4313 4314 4315 4316 4317 4318 4319 4320 4321 4322 4323 4324 4325 4326 4327 4328 4329 4330 4331 4332 4333 4334 4335 4336 4337 4338 4339 4340 4341 4342 4343 4344 4345 4346 4347 4348 4349 4350 4351 4352 4353 4354 4355 4356 |
|