Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
111 changes: 110 additions & 1 deletion dtable_events/automations/actions.py
Original file line number Diff line number Diff line change
Expand Up @@ -4321,7 +4321,110 @@ def do_action(self):
else:
auto_rule_logger.warning('google calendar action %s not supported', self.action_type)
return



class ArchiveAction(BaseAction):

def __init__(self, auto_rule, action_type, data):
super().__init__(auto_rule, action_type, data)

def can_do_action(self):
dtable_settings = self.auto_rule.dtable_metadata.get('settings') or {}
if not dtable_settings.get('enable_archive'):
auto_rule_logger.info(f"dtable {self.auto_rule.dtable_uuid} not enable_archive in settings")
return False
permissions = self.auto_rule.dtable_web_api.internal_dtable_permission(self.auto_rule.dtable_uuid, 'enable_big_data_feature')
if not permissions.get('enable_big_data_feature'):
auto_rule_logger.info(f"dtable {self.auto_rule.dtable_uuid} no permission to arhive rows")
return False
return True

def cron_archive(self):
view_filters = self.auto_rule.view_info.get('filters', [])
if view_filters:
view_filter_conjunction = self.auto_rule.view_info.get('filter_conjunction', 'And')
view_filter_conditions = {
'filters': view_filters,
'filter_conjunction': view_filter_conjunction
}
where_clause = BaseSQLGenerator(self.auto_rule.table_info['name'], self.auto_rule.table_info['columns'], filter_conditions=view_filter_conditions)._filter2sql()
if where_clause:
where = where_clause[where_clause.find('WHERE')+len('WHERE'):]
else:
where = ''
else:
where = ''
auto_rule_logger.info(f"rule {self.auto_rule.rule_id} archive WHERE={where}")
try:
resp_json = self.auto_rule.dtable_db_api.import_archive(self.auto_rule.table_info['name'], where)
task_id = resp_json.get('task_id')
success = resp_json.get('success')
if not success:
self.auto_rule.append_warning({
'type': 'archive_failed',
'action_type': self.action_type
})
auto_rule_logger.error(f"rule {self.auto_rule.rule_id} archive where {where} error resp {resp_json}")
else:
auto_rule_logger.info(f"rule {self.auto_rule.rule_id} archive task_id {task_id}")
except Exception as e:
self.auto_rule.append_warning({
'type': 'archive_failed',
'action_type': self.action_type
})
auto_rule_logger.exception(f"rule {self.auto_rule.rule_id} archive where {where} error {e}")

def condition_cron_archive(self):
view_filters = self.auto_rule.view_info.get('filters', [])
view_filter_conjunction = self.auto_rule.view_info.get('filter_conjunction', 'And')
view_filter_conditions = {
'filters': view_filters,
'filter_conjunction': view_filter_conjunction
}
filters = self.auto_rule.trigger.get('filters', [])
filter_conjunction = self.auto_rule.trigger.get('filter_conjunction', 'And')
rule_filter_conditions = {
'filters': filters,
'filter_conjunction': filter_conjunction
}
filter_conditions = {
'filter_groups': [view_filter_conditions, rule_filter_conditions],
'group_conjunction': 'And',
'start': 0,
'limit': 500,
}
where_clause = BaseSQLGenerator(self.auto_rule.table_info['name'], self.auto_rule.table_info['columns'], filter_condition_groups=filter_conditions)._groupfilter2sql()
if where_clause:
where = where_clause[where_clause.find('WHERE')+len('WHERE'):]
else:
where = ''
auto_rule_logger.info(f"rule {self.auto_rule.rule_id} archive WHERE={where}")
try:
resp_json = self.auto_rule.dtable_db_api.import_archive(self.auto_rule.table_info['name'], where)
task_id = resp_json.get('task_id')
success = resp_json.get('success')
if not success:
self.auto_rule.append_warning({
'type': 'archive_failed',
'action_type': self.action_type
})
auto_rule_logger.error(f"rule {self.auto_rule.rule_id} archive where {where} error resp {resp_json}")
else:
auto_rule_logger.info(f"rule {self.auto_rule.rule_id} archive task_id {task_id}")
except Exception as e:
self.auto_rule.append_warning({
'type': 'archive_failed',
'action_type': self.action_type
})
auto_rule_logger.exception(f"rule {self.auto_rule.rule_id} archive where {where} error {e}")

def do_action(self):
if not self.can_do_action():
return
if self.auto_rule.trigger.get('condition') == CONDITION_PERIODICALLY_BY_CONDITION:
self.condition_cron_archive()
else:
self.cron_archive()


class RuleInvalidException(Exception):
Expand Down Expand Up @@ -4724,6 +4827,9 @@ def can_condition_trigger_action(self, action):
if self.run_condition == PER_UPDATE:
return True
return False
elif action_type == 'archive':
if self.run_condition in CRON_CONDITIONS:
return True
return False

def do_actions(self, db_session, with_test=False):
Expand Down Expand Up @@ -4971,6 +5077,9 @@ def do_actions(self, db_session, with_test=False):
}
GoogleCalendar(self, action_info.get('type'), self.data, config, function_type).do_action()

elif action_info.get('type') == 'archive':
ArchiveAction(self, action_info.get('type'), self.data).do_action()

except RuleInvalidException as e:
auto_rule_logger.warning('auto rule %s with data %s, invalid error: %s', self.rule_id, self.data, e)
self.task_run_success = False
Expand Down
15 changes: 15 additions & 0 deletions dtable_events/utils/dtable_db_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -295,3 +295,18 @@ def query_backup_task_status(self):
url = '%s/api/v1/backup/%s/task/?from=dtable_events' % (self.dtable_db_url, self.dtable_uuid)
response = requests.get(url, headers=self.admin_headers)
return parse_response(response)

def import_archive(self, table_name, where):
url = '%s/api/v1/import/%s/?from=dtable_events' % (self.dtable_db_url, self.dtable_uuid)
data = {
'table_name': table_name,
'where': where
}
response = requests.post(url, headers=self.admin_headers, json=data)
return parse_response(response)

def query_archive_task(self, task_id):
url = '%s/api/v1/import/%s/task/?from=dtable_events' % (self.dtable_db_url, self.dtable_uuid)
params = {'task_id': task_id}
response = requests.get(url, headers=self.admin_headers, params=params)
return parse_response(response)
11 changes: 11 additions & 0 deletions dtable_events/utils/dtable_web_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,3 +251,14 @@ def internal_roles(self):
headers = {'Authorization': 'Token ' + token}
resp = requests.get(url, headers=headers)
return parse_response(resp).get('roles', [])

def internal_dtable_permission(self, dtable_uuid, permission):
logger.debug(f"internal_dtable_permission: {dtable_uuid} permission: {permission}")
url = '%(server_url)s/api/v2.1/internal-dtable-permission/?from=dtable_events' % {
'server_url': self.dtable_web_service_url
}
token = jwt.encode({'is_internal': True}, DTABLE_PRIVATE_KEY, algorithm='HS256')
headers = {'Authorization': 'Token ' + token}
params = {'dtable_uuid': dtable_uuid, 'permission': permission}
resp = requests.get(url, headers=headers, params=params)
return parse_response(resp)