desc
stringlengths 3
26.7k
| decl
stringlengths 11
7.89k
| bodies
stringlengths 8
553k
|
---|---|---|
'It should raise AssertionError if a table not selected'
| def test_remote_update_asserts_current_table(self):
| args = ([1], 'args')
kwargs = {'kwargs': True}
with self.assertRaises(AssertionError):
(res, adapter) = self._test_adapter_method('remote_update', create=True, args=args, kwargs=kwargs)
|
'It should call generic executor with proper args'
| def test_execute_postgresql(self):
| expect = ('query', 'execute', 'metadata')
with mock.patch.object(self.dbsource, '_execute_generic', autospec=True) as execute:
execute.return_value = ('rows', 'cols')
self.dbsource.execute(*expect)
execute.assert_called_once_with(*expect)
|
'It should call the adapter correctly if old kwargs provided'
| def test_execute_calls_adapter_old_api(self):
| expect = [None, None, 'metadata']
with mock.patch.object(self.dbsource, 'execute_postgresql', autospec=True) as psql:
psql.return_value = ('rows', 'cols')
self.dbsource.execute(sqlparams='params', sqlquery='query', *expect)
(expect[0], expect[1]) = ('query', 'params')
psql.assert_called_once_with(*expect)
|
'It should return open connection for use'
| def test_conn_open(self):
| with mock.patch.object(self.dbsource, 'connection_open', autospec=True) as connection:
res = self.dbsource.conn_open()
self.assertEqual(res, connection().__enter__())
|
'Extract all images in order from an HTML field in a generator.
:param str html_content:
HTML contents from where to extract the images.
:param int limit:
Only get up to this number of images.
:param bool fail:
If ``True``, exceptions will be raised.'
| @api.model
def imgs_from_html(self, html_content, limit=None, fail=False):
| try:
doc = html.fromstring(html_content)
except (TypeError, etree.XMLSyntaxError, etree.ParserError):
if fail:
raise
else:
_logger.exception('Failure parsing this HTML:\n%s', html_content)
return
query = '\n //img[@src] |\n //*[contains(translate(@style, "BACKGROUND", "background"),\n \'background\')]\n [contains(translate(@style, "URL", "url"), \'url(\')]\n '
rgx = '\n url\\(\\s* # Start function\n (?P<url>[^)]*) # URL string\n \\s*\\) # End function\n '
rgx = re.compile(rgx, (re.IGNORECASE | re.VERBOSE))
for (lap, element) in enumerate(doc.xpath(query)):
if (limit and (lap >= limit)):
break
if (element.tag == 'img'):
(yield element.attrib['src'])
else:
for rule in element.attrib['style'].split(';'):
parts = rule.split(':', 1)
try:
if (parts[0].strip().lower() in {'background', 'background-image'}):
(yield rgx.search(parts[1]).group('url').strip('"\''))
except (IndexError, AttributeError):
pass
|
'Images correctly found in <img> elements and backgrounds.'
| def test_mixed_images_found(self):
| content = u'\n <div>\n <!-- src-less img -->\n <img/>\n <p/>\n <img src="/path/0"/>\n <img src="/path/1"/>\n <img src="/path/2"/>\n <img src="/path/3"/>\n <section style="background : URL(\'/path/4\');;background;\xf6;">\n <div style=\'BACKGROUND-IMAGE:url(/path/5)\'>\n <p style="background:uRl("/path/6")">\n <img src="/path/7"/>\n </p>\n </div>\n </section>\n </div>\n '
for (n, url) in enumerate(self.imgs_from_html(content)):
self.assertEqual(('/path/%d' % n), url)
self.assertEqual(n, 7)
for (n, url) in enumerate(self.imgs_from_html(content, 1)):
self.assertEqual(('/path/%d' % n), url)
self.assertEqual(n, 0)
|
'Empty HTML handled correctly.'
| @mute_logger(ir_fields_converter.__name__)
def test_empty_html(self):
| for (laps, text) in self.imgs_from_html(''):
self.assertTrue(False)
with self.assertRaises(etree.XMLSyntaxError):
list(self.imgs_from_html('', fail=True))
|
'``False`` HTML handled correctly.'
| @mute_logger(ir_fields_converter.__name__)
def test_false_html(self):
| for (laps, text) in self.imgs_from_html(False):
self.assertTrue(False)
with self.assertRaises(TypeError):
list(self.imgs_from_html(False, fail=True))
|
'Bad HTML handled correctly.'
| @mute_logger(ir_fields_converter.__name__)
def test_bad_html(self):
| for (laps, text) in self.imgs_from_html('<<bad>'):
self.assertTrue(False)
with self.assertRaises(etree.ParserError):
list(self.imgs_from_html('<<bad>', fail=True))
|
'This method allows the system to iterate over a RecordSet with each
of the records being browsed in the language specified by the model\'s
_language_path attribute. Of course, this is a cache killer. It was
conceived to make translations in rml reports work again as using
setLang() in the report does not work as expected anymore in 8.0 due
to the way that caching works in the new API'
| def with_language_path(self, path=None):
| path = (path or self._language_path)
for record in self:
if (not path):
(yield record)
continue
lang = record
for part in path.split('.'):
lang = lang[part]
if (not lang):
(yield record)
break
else:
(yield record.with_context(lang=lang))
|
'Get system parameter from config'
| def test_get_param(self):
| res = self.ICP.search([('key', '=', 'ircp_from_config')])
self.assertFalse(res)
value = self.ICP.get_param('ircp_from_config')
self.assertEqual(value, 'config_value')
res = self.ICP.search([('key', '=', 'ircp_from_config')])
self.assertEqual(len(res), 1)
self.assertEqual(res.value, 'config_value')
|
'We can\'t set parameters that are in config file'
| def test_set_param_1(self):
| self.ICP.set_param('ircp_from_config', 'new_value')
value = self.ICP.get_param('ircp_from_config')
self.assertEqual(value, 'config_value')
res = self.ICP.search([('key', '=', 'ircp_from_config')])
self.assertEqual(len(res), 1)
res.write({'value': 'new_value'})
value = self.ICP.get_param('ircp_from_config')
self.assertEqual(value, 'config_value')
res = self.ICP.search([('key', '=', 'ircp_from_config')])
self.assertEqual(len(res), 1)
res.unlink()
res = self.ICP.search([('key', '=', 'ircp_from_config')])
self.assertEqual(len(res), 0)
value = self.ICP.get_param('ircp_from_config')
self.assertEqual(value, 'config_value')
res = self.ICP.search([('key', '=', 'ircp_from_config')])
self.assertEqual(len(res), 1)
|
'We can set parameters that are not in config file'
| def test_set_param_2(self):
| self.ICP.set_param('some.param', 'new_value')
self.assertEqual(self.ICP.get_param('some.param'), 'new_value')
res = self.ICP.search([('key', '=', 'some.param')])
res.unlink()
res = self.ICP.search([('key', '=', 'some.param')])
self.assertFalse(res)
|
'Empty config values cause error'
| def test_empty(self):
| with self.assertRaises(UserError):
self.ICP.get_param('ircp_empty')
self.assertEqual(self.ICP.get_param('ircp_nonexistant'), False)
|
'It should close the connection'
| def test_connection_close_sqlite(self):
| connection = mock.MagicMock()
res = self.dbsource.connection_close_sqlite(connection)
self.assertEqual(res, connection.close())
|
'It should call SQLAlchemy open'
| def test_connection_open_sqlite(self):
| with mock.patch.object(self.dbsource, '_connection_open_sqlalchemy') as parent_method:
self.dbsource.connection_open_sqlite()
parent_method.assert_called_once_with()
|
'It should pass args to SQLAlchemy execute'
| def test_excecute_sqlite(self):
| expect = ('sqlquery', 'sqlparams', 'metadata')
with mock.patch.object(self.dbsource, '_execute_sqlalchemy') as parent_method:
self.dbsource.execute_sqlite(*expect)
parent_method.assert_called_once_with(*expect)
|
'Always match. Duplicates will be fished out by message_id'
| def search_matches(self, cr, uid, conf, mail_message, mail_message_org):
| return [True]
|
'Returns ids found for model with mail_message'
| def search_matches(self, cr, uid, conf, mail_message, mail_message_org):
| return []
|
'Do whatever it takes to handle a match'
| def handle_match(self, cr, uid, connection, object_id, folder, mail_message, mail_message_org, msgid, context=None):
| return folder.server_id.attach_mail(connection, object_id, folder, mail_message, msgid)
|
'Return ids of objects matched'
| @api.multi
def handle_folder(self, connection, folder):
| matched_object_ids = []
for this in self:
_logger.info('start checking for emails in %s server %s', folder.path, this.name)
match_algorithm = folder.get_algorithm()
if (connection.select(folder.path)[0] != 'OK'):
_logger.error('Could not open mailbox %s on %s', folder.path, this.server)
connection.select()
continue
(result, msgids) = this.get_msgids(connection)
if (result != 'OK'):
_logger.error('Could not search mailbox %s on %s', folder.path, this.server)
continue
for msgid in msgids[0].split():
matched_object_ids += this.apply_matching(connection, folder, msgid, match_algorithm)
_logger.info('finished checking for emails in %s server %s', folder.path, this.name)
return matched_object_ids
|
'Return imap ids of messages to process'
| @api.multi
def get_msgids(self, connection):
| return connection.search(None, 'UNDELETED')
|
'Return ids of objects matched'
| @api.multi
def apply_matching(self, connection, folder, msgid, match_algorithm):
| matched_object_ids = []
for this in self:
(result, msgdata) = connection.fetch(msgid, '(RFC822)')
if (result != 'OK'):
_logger.error('Could not fetch %s in %s on %s', msgid, folder.path, this.server)
continue
mail_message = self.env['mail.thread'].message_parse(msgdata[0][1], save_original=this.original)
if self.env['mail.message'].search([('message_id', '=', mail_message['message_id'])]):
continue
found_ids = match_algorithm.search_matches(self.env.cr, self.env.uid, folder, mail_message, msgdata[0][1])
if (found_ids and ((len(found_ids) == 1) or folder.match_first)):
try:
self.env.cr.execute('savepoint apply_matching')
match_algorithm.handle_match(self.env.cr, self.env.uid, connection, found_ids[0], folder, mail_message, msgdata[0][1], msgid, self.env.context)
self.env.cr.execute('release savepoint apply_matching')
matched_object_ids += found_ids[:1]
except Exception:
self.env.cr.execute('rollback to savepoint apply_matching')
_logger.exception('Failed to fetch mail %s from %s', msgid, this.name)
elif folder.flag_nonmatching:
connection.store(msgid, '+FLAGS', '\\FLAGGED')
return matched_object_ids
|
'Return ids of messages created'
| @api.multi
def attach_mail(self, connection, object_id, folder, mail_message, msgid):
| mail_message_ids = []
for this in self:
partner_id = None
if (folder.model_id.model == 'res.partner'):
partner_id = object_id
if ('partner_id' in self.env[folder.model_id.model]._columns):
partner_id = self.env[folder.model_id.model].browse(object_id).partner_id.id
attachments = []
if (this.attach and mail_message.get('attachments')):
for attachment in mail_message['attachments']:
(fname, fcontent) = attachment
if isinstance(fcontent, unicode):
fcontent = fcontent.encode('utf-8')
data_attach = {'name': fname, 'datas': base64.b64encode(str(fcontent)), 'datas_fname': fname, 'description': _('Mail attachment'), 'res_model': folder.model_id.model, 'res_id': object_id}
attachments.append(self.env['ir.attachment'].create(data_attach))
mail_message_ids.append(self.env['mail.message'].create({'author_id': partner_id, 'model': folder.model_id.model, 'res_id': object_id, 'type': 'email', 'body': mail_message.get('body'), 'subject': mail_message.get('subject'), 'email_from': mail_message.get('from'), 'date': mail_message.get('date'), 'message_id': mail_message.get('message_id'), 'attachment_ids': [(6, 0, [a.id for a in attachments])]}))
if folder.delete_matching:
connection.store(msgid, '+FLAGS', '\\DELETED')
return mail_message_ids
|
'Override write to verify that access to the `Immutable` group is
not given or removed by users without access'
| @api.multi
def write(self, vals):
| if ((not vals.get('users')) or self.env.user.has_group(IMMUTABLE)):
return super(ResGroups, self).write(vals)
immutable = self.env.ref(IMMUTABLE, raise_if_not_found=False)
if (immutable and (immutable in self)):
raise AccessError(_('You must be a member of the `Immutable` group to grant access to it'))
return super(ResGroups, self).write(vals)
|
'Check to see if the user being edited is Immutable and if so,
make sure that the user performing the action has access'
| def _check_immutable(self):
| if self.has_group(IMMUTABLE):
if (not self.env.user.has_group(IMMUTABLE)):
raise AccessError(_('You do not have permission to alter an Immutable User'))
|
'Override write to verify that there are no alterations to users
whom are members of the `Immutable` group'
| @api.multi
def write(self, vals):
| for rec in self:
rec._check_immutable()
immutable = self.env.ref(IMMUTABLE)
has_group = self.env.user.has_group(IMMUTABLE)
if (vals.get(('in_group_%s' % immutable.id)) and (not has_group)):
raise AccessError(_('You must be a member of the `Immutable` group to grant access to it'))
return super(ResUsers, self).write(vals)
|
'Override unlink to verify that there are no deletions of users
whom are members of the `Immutable` group'
| @api.multi
def unlink(self):
| for rec in self:
rec._check_immutable()
return super(ResUsers, self).unlink()
|
'It should verify that `Administrator` can add users to the
immutable group by default'
| def test_can_add_immutable(self):
| self.user.write({('in_group_%s' % self.immutable.id): True})
self.assertTrue(self.user.has_group('user_immutable.group_immutable'))
|
'It should verify that other users cannot add to the immutable
group'
| def test_non_immutable_cannot_add_immutable(self):
| with self.assertRaises(AccessError):
self.user.sudo(self.user.id).write({('in_group_%s' % self.immutable.id): True})
|
'It should verify that immutable users can alter users in the
immutable group'
| def test_immutable_can_alter_immutable(self):
| self.user.write({('in_group_%s' % self.immutable.id): True})
exp = 'Princess Peach'
self.user.write({'name': exp})
self.assertEquals(self.user.name, exp)
|
'It should make sure non `Immutable` members cannot unlink other
`Immutable` Members'
| def test_immutable_cannot_be_unlinked(self):
| with self.assertRaises(AccessError):
self.env.ref('base.user_root').sudo(self.user.id).unlink()
|
'It should make sure `Immutable` members can unlink other
`Immutable` Members'
| def test_immutable_can_be_unlinked_by_immutable(self):
| user = self.user.copy()
user.write({('in_group_%s' % self.immutable.id): True})
self.assertTrue(user.unlink())
|
'It should raise `AccessError` when trying called by a user
outside the `Immutable` group on an `Immutable` user'
| def test_check_immutable(self):
| with self.assertRaises(AccessError):
self.env.ref('base.user_root').sudo(self.user.id)._check_immutable()
|
'It should make sure that `Administrator` can add users to the
immutable group by default'
| def test_can_add_immutable(self):
| self.immutable.write({'users': [(4, [self.user.id])]})
self.assertTrue(self.user.has_group('user_immutable.group_immutable'))
|
'It should make sure that other users cannot add to the immutable
group'
| def test_non_immutable_cannot_add_immutable(self):
| immutable = self.env.ref('user_immutable.group_immutable')
with self.assertRaises(AccessError):
immutable.sudo(self.user.id).write({'users': [self.user.id]})
|
'Remove old super_calendar records'
| def _clear_super_calendar_records(self):
| super_calendar_pool = self.env['super.calendar']
super_calendar_list = super_calendar_pool.search([])
super_calendar_list.unlink()
|
'At every CRON execution, every \'super calendar\' data is deleted and
regenerated again.'
| @api.multi
def generate_calendar_records(self):
| self._clear_super_calendar_records()
configurator_list = self.search([])
for configurator in configurator_list:
for line in configurator.line_ids:
configurator._generate_record_from_line(line)
_logger.info('Calendar generated')
return True
|
'Create super_calendar records from super_calendar_configurator_line
objects.'
| @api.multi
def _generate_record_from_line(self, line):
| super_calendar_pool = self.env['super.calendar']
values = self._get_record_values_from_line(line)
for record in values:
super_calendar_pool.create(values[record])
|
'Get super_calendar fields values from super_calendar_configurator_line
objects.
Check if the User value is a res.users.'
| @api.multi
def _get_record_values_from_line(self, line):
| res = {}
current_pool = self.env[line.name.model]
domain = ((line.domain and safe_eval(line.domain)) or [])
current_record_list = current_pool.search(domain)
for cur_rec in current_record_list:
f_user = line.user_field_id.name
f_descr = line.description_field_id.name
f_date_start = line.date_start_field_id.name
f_date_stop = line.date_stop_field_id.name
f_duration = line.duration_field_id.name
if (f_user and cur_rec[f_user] and (cur_rec[f_user]._model._name != 'res.users')):
raise exceptions.ValidationError((_("The 'User' field of record %s (%s) does not refer to res.users") % (cur_rec[f_descr], line.name.model)))
if ((cur_rec[f_descr] or line.description_code) and cur_rec[f_date_start]):
duration = False
if (line.date_start_field_id.ttype == 'date'):
date_format = tools.DEFAULT_SERVER_DATE_FORMAT
else:
date_format = tools.DEFAULT_SERVER_DATETIME_FORMAT
date_start = datetime.strptime(cur_rec[f_date_start], date_format)
if ((not line.duration_field_id) and line.date_stop_field_id and cur_rec[f_date_start] and cur_rec[f_date_stop]):
if (line.date_stop_field_id.ttype == 'date'):
date_format = tools.DEFAULT_SERVER_DATE_FORMAT
else:
date_format = tools.DEFAULT_SERVER_DATETIME_FORMAT
date_stop = datetime.strptime(cur_rec[f_date_stop], date_format)
date_diff = (date_stop - date_start)
duration = (date_diff.total_seconds() / 3600)
elif line.duration_field_id:
duration = cur_rec[f_duration]
if (line.description_type != 'code'):
name = cur_rec[f_descr]
else:
parse_dict = {'o': cur_rec}
mytemplate = Template(line.description_code)
name = mytemplate.render(**parse_dict)
if (line.date_start_field_id.ttype == 'date'):
tz = timezone((self._context.get('tz') or self.env.user.tz or 'UTC'))
local_date_start = tz.localize(date_start)
utc_date_start = local_date_start.astimezone(utc)
date_start = utc_date_start
date_start = datetime.strftime(date_start, tools.DEFAULT_SERVER_DATETIME_FORMAT)
super_calendar_values = {'name': name, 'date_start': date_start, 'duration': duration, 'user_id': (f_user and cur_rec[f_user].id), 'configurator_id': self.id, 'res_id': ((line.name.model + ',') + str(cur_rec['id'])), 'model_id': line.name.id}
res[cur_rec] = super_calendar_values
return res
|
'Test if record values are correctly computed'
| def test_get_record_values_from_line(self):
| values_partner_a = {'configurator_id': self.super_calendar_configurator.id, 'date_start': self.partner_A.write_date, 'duration': False, 'model_id': self.partner_model.id, 'name': self.partner_A.name, 'res_id': ((self.partner_model.model + ',') + str(self.partner_A.id)), 'user_id': False}
self.assertEqual(self.super_calendar_configurator._get_record_values_from_line(self.super_calendar_configurator.line_ids[0])[self.partner_A], values_partner_a)
self.date_stop_field = self.ModelFieldsObj.search([('name', '=', 'date'), ('model', '=', 'res.partner')])
start_date = datetime.strptime(self.partner_A.write_date, DEFAULT_SERVER_DATETIME_FORMAT)
stop_date = datetime.strptime(self.partner_A.date, DEFAULT_SERVER_DATE_FORMAT)
date_diff = (stop_date - start_date)
self.super_calendar_configurator_line.write({'date_stop_field_id': self.date_stop_field.id})
values_partner_a['duration'] = (date_diff.total_seconds() / 3600)
self.assertEqual(self.super_calendar_configurator._get_record_values_from_line(self.super_calendar_configurator.line_ids[0])[self.partner_A], values_partner_a)
self.super_calendar_configurator2 = self.SuperCalendarConfiguratorObj.create({'name': 'Partners 2'})
self.super_calendar_configurator_line2 = self.SuperCalendarConfiguratorLineObj.create({'name': self.partner_model.id, 'date_start_field_id': self.date_start_field.id, 'description_type': 'code', 'description_code': '${o.email}', 'configurator_id': self.super_calendar_configurator2.id, 'domain': [('name', '=', self.partner_A.name)]})
values_partner_a['name'] = self.partner_A.email
values_partner_a['duration'] = False
values_partner_a['configurator_id'] = self.super_calendar_configurator2.id
self.assertEqual(self.super_calendar_configurator2._get_record_values_from_line(self.super_calendar_configurator2.line_ids[0])[self.partner_A], values_partner_a)
|
'Test if calendar records are effectively created'
| def test_generate_calendar_records(self):
| self.super_calendar_configurator.generate_calendar_records()
super_calendar_record = self.SuperCalendarObj.search([('name', '=', self.partner_A.name)])
self.assertEqual(super_calendar_record.date_start, self.partner_A.write_date)
|
'be sure we browse ints, ids laread is normalized'
| @classmethod
def _browse(cls, ids, env, prefetch=None):
| return super(ResUsers, cls)._browse([(i if (not isinstance(i, BaseSuspendSecurityUid)) else super(BaseSuspendSecurityUid, i).__int__()) for i in ids], env, prefetch=prefetch)
|
'Check that the sso_key is not copied on copy'
| @unittest.skipIf(os.environ.get('TRAVIS'), "When run by travis, tests runs on a database with all required addons from server-tools and their dependencies installed. Even if `auth_from_http_remote_user` does not require the `mail` module, The previous installation of the mail module has created the column `notification_email_send` as REQUIRED into the table res_partner. BTW, it's no more possible to copy a res_user without an intefirty error")
def test_copy(self):
| res_users_obj = self.registry('res.users')
vals = {'sso_key': '123'}
res_users_obj.write(self.cr, self.uid, self.uid, vals)
read_vals = res_users_obj.read(self.cr, self.uid, self.uid, ['sso_key'])
self.assertDictContainsSubset(vals, read_vals)
copy = res_users_obj.copy(self.cr, self.uid, self.uid)
read_vals = res_users_obj.read(self.cr, self.uid, copy, ['sso_key'])
self.assertFalse(read_vals.get('sso_key'))
|
'It should return a list of stages'
| def test_get_states(self):
| test_stage = self.env['base.kanban.stage'].with_context({})
self.assertEqual(test_stage._get_states(), [('draft', 'New'), ('open', 'In Progress'), ('pending', 'Pending'), ('done', 'Done'), ('cancelled', 'Cancelled'), ('exception', 'Exception')])
|
'Import data and returns error msg or empty string'
| def _import_data(self, cr, uid, flds, data, model_obj, table_obj, log):
| def find_m2o(field_list):
'"Find index of first column with a one2many field'
for (i, x) in enumerate(field_list):
if (((len(x) > 3) and (x[(-3):] == ':id')) or (x[(-3):] == '/id')):
return i
return (-1)
def append_to_log(log, level, obj_id='', msg='', rel_id=''):
if ('_id_' in obj_id):
obj_id = (('.'.join(obj_id.split('_')[:(-2)]) + ': ') + obj_id.split('_')[(-1)])
if ((': .' in msg) and (not rel_id)):
rel_id = msg[(msg.find(': .') + 3):]
if ('_id_' in rel_id):
rel_id = (('.'.join(rel_id.split('_')[:(-2)]) + ': ') + rel_id.split('_')[(-1)])
msg = msg[:msg.find(': .')]
log['last_log'].append(('%s|%s DCTB |%s DCTB |%s' % (level.ljust(5), obj_id, rel_id, msg)))
_logger.debug(data)
cols = list(flds)
errmsg = str()
if table_obj.raise_import_errors:
model_obj.import_data(cr, uid, cols, [data], noupdate=table_obj.noupdate)
else:
try:
model_obj.import_data(cr, uid, cols, [data], noupdate=table_obj.noupdate)
except:
errmsg = str(sys.exc_info()[1])
if (errmsg and (not table_obj.ignore_rel_errors)):
append_to_log(log, 'ERROR', data, errmsg)
log['last_error_count'] += 1
return False
if (errmsg and table_obj.ignore_rel_errors):
append_to_log(log, 'WARN', data, errmsg)
log['last_warn_count'] += 1
i = find_m2o(cols)
if (i >= 0):
del cols[i]
del data[i]
self._import_data(cr, uid, cols, data, model_obj, table_obj, log)
else:
append_to_log(log, 'ERROR', data, 'Removed all m2o keys and still fails.')
log['last_error_count'] += 1
return False
return True
|
'Export a zip file containing the module based on the information
provided in the prototype, using the templates chosen in the wizard.'
| @api.model
def action_export(self, ids):
| if isinstance(ids, (int, long)):
ids = [ids]
wizard = self.browse(ids)
active_model = self._context.get('active_model')
msg = '%s has to be called from a "module_prototyper" , not a "%s"'
assert (active_model == 'module_prototyper'), (msg % (self, active_model))
prototypes = self.env[active_model].browse([self._context.get('active_id')])
zip_details = self.zip_files(wizard, prototypes)
if (len(prototypes) == 1):
zip_name = prototypes[0].name
else:
zip_name = 'prototyper_export'
wizard.write({'name': ('%s.zip' % (zip_name,)), 'state': 'get', 'data': base64.encodestring(zip_details.stringIO.getvalue())})
return {'type': 'ir.actions.act_window', 'res_model': 'module_prototyper.module.export', 'view_mode': 'form', 'view_type': 'form', 'res_id': wizard.id, 'views': [(False, 'form')], 'target': 'new'}
|
'Takes a set of file and zips them.
:param file_details: tuple (filename, file_content)
:return: tuple (zip_file, stringIO)'
| @staticmethod
def zip_files(wizard, prototypes):
| zip_details = namedtuple('Zip_details', ['zip_file', 'stringIO'])
out = StringIO.StringIO()
with zipfile.ZipFile(out, 'w') as target:
for prototype in prototypes:
prototype.setup_env(wizard.api_version)
file_details = prototype.generate_files()
for (filename, file_content) in file_details:
if isinstance(file_content, unicode):
file_content = file_content.encode('utf-8')
filename = os.path.join(prototype.name, filename)
info = zipfile.ZipInfo(filename)
info.compress_type = zipfile.ZIP_DEFLATED
info.external_attr = 2175008768
target.writestr(info, file_content)
return zip_details(zip_file=target, stringIO=out)
|
'Extract the content of default description'
| def get_default_description(self):
| filepath = ('%s/../data/README.rst' % (os.path.dirname(__file__),))
with open(filepath, 'r') as content_file:
content = content_file.read()
return content
|
'Set the Jinja2 environment.
The environment will helps the system to find the templates to render.
:param api_version: module_prototyper.api_version, odoo api
:return: jinja2.Environment instance.'
| @api.model
def setup_env(self, api_version):
| if (self._env is None):
self._env = Environment(lstrip_blocks=True, trim_blocks=True, loader=FileSystemLoader(os.path.join(self.template_path, api_version.name)))
self._api_version = api_version
return self._env
|
'Mock the list of fields into dictionary.
It allows us to add or change attributes of the fields.
:return: None'
| def set_field_descriptions(self):
| for field in self.field_ids:
field_description = {}
field_description.update({attr_name: getattr(field, attr_name) for attr_name in dir(field) if (not (attr_name[0] == '_'))})
field_description['name'] = self.unprefix(field.name)
self._field_descriptions[field] = field_description
|
'Generates the files from the details of the prototype.
:return: tuple'
| @api.model
def generate_files(self):
| assert (self._env is not None), 'Run set_env(api_version) before to generate files.'
self._data_files = []
self._demo_files = []
self._field_descriptions = {}
self.set_field_descriptions()
file_details = []
file_details.extend(self.generate_models_details())
file_details.extend(self.generate_views_details())
file_details.extend(self.generate_menus_details())
file_details.append(self.generate_module_init_file_details())
file_details.extend(self.generate_data_files())
file_details.append(self.generate_module_openerp_file_details())
if self.icon_image:
file_details.append(self.save_icon())
return file_details
|
'Save the icon of the prototype as a image.
The image is used afterwards as the icon of the exported module.
:return: FileDetails instance'
| @api.model
def save_icon(self):
| return self.File_details(os.path.join('static', 'description', 'icon.jpg'), base64.b64decode(self.icon_image))
|
'Wrapper to generate the __openerp__.py file of the module.'
| @api.model
def generate_module_openerp_file_details(self):
| fn_inc_ext = ('%s.py' % (self._api_version.manifest_file_name,))
return self.generate_file_details(fn_inc_ext, ('%s.template' % (fn_inc_ext,)), prototype=self, data_files=self._data_files, demo_fiels=self._demo_files)
|
'Wrapper to generate the __init__.py file of the module.'
| @api.model
def generate_module_init_file_details(self):
| return self.generate_file_details('__init__.py', '__init__.py.template', models=bool(self.field_ids))
|
'Finds the models from the list of fields and generates
the __init__ file and each models files (one by class).'
| @api.model
def generate_models_details(self):
| files = []
relations = {}
field_descriptions = (self._field_descriptions or {})
for field in field_descriptions.itervalues():
model = field.get('model_id')
relations.setdefault(model, []).append(field)
files.append(self.generate_models_init_details(relations.keys()))
for (model, custom_fields) in relations.iteritems():
files.append(self.generate_model_details(model, custom_fields))
return files
|
'Wrapper to generate the __init__.py file in models folder.'
| @api.model
def generate_models_init_details(self, ir_models):
| return self.generate_file_details('models/__init__.py', 'models/__init__.py.template', models=[self.friendly_name(ir_model.model) for ir_model in ir_models])
|
'Wrapper to generate the views files.'
| @api.model
def generate_views_details(self):
| relations = {}
for view in self.view_ids:
relations.setdefault(view.model, []).append(view)
views_details = []
for (model, views) in relations.iteritems():
filepath = ('views/%s_view.xml' % (self.friendly_name(self.unprefix(model)),))
views_details.append(self.generate_file_details(filepath, 'views/model_views.xml.template', views=views))
self._data_files.append(filepath)
return views_details
|
'Wrapper to generate the menus files.'
| @api.model
def generate_menus_details(self):
| relations = {}
for menu in self.menu_ids:
if (menu.action and menu.action.res_model):
model = self.unprefix(menu.action.res_model)
else:
model = 'ir_ui'
relations.setdefault(model, []).append(menu)
menus_details = []
for (model_name, menus) in relations.iteritems():
model_name = self.unprefix(model_name)
filepath = ('views/%s_menus.xml' % (self.friendly_name(model_name),))
menus_details.append(self.generate_file_details(filepath, 'views/model_menus.xml.template', menus=menus))
self._data_files.append(filepath)
return menus_details
|
'Wrapper to generate the python file for the model.
:param model: ir.model record.
:param field_descriptions: list of ir.model.fields records.
:return: FileDetails instance.'
| @api.model
def generate_model_details(self, model, field_descriptions):
| python_friendly_name = self.friendly_name(self.unprefix(model.model))
return self.generate_file_details(('models/%s.py' % (python_friendly_name,)), 'models/model_name.py.template', name=python_friendly_name, model=model, fields=field_descriptions)
|
'Generate data and demo files'
| @api.model
def generate_data_files(self):
| (data, demo) = ({}, {})
filters = ([(data, ir_filter) for ir_filter in self.data_ids] + [(demo, ir_filter) for ir_filter in self.demo_ids])
for (target, ir_filter) in filters:
model = ir_filter.model_id
model_obj = self.env[model]
target.setdefault(model, model_obj.browse([]))
target[model] |= model_obj.search(safe_eval(ir_filter.domain))
res = []
for (prefix, model_data, file_list) in [('data', data, self._data_files), ('demo', demo, self._demo_files)]:
for (model_name, records) in model_data.iteritems():
fname = self.friendly_name(self.unprefix(model_name))
filename = ('%s/%s.xml' % (prefix, fname))
self._data_files.append(filename)
res.append(self.generate_file_details(filename, 'data/model_name.xml.template', model=model_name, records=records))
return res
|
'Fix a domain according to unprefixing of fields'
| @classmethod
def fixup_domain(cls, domain):
| res = []
for elem in domain:
if (len(elem) == 3):
elem = list(elem)
elem[0] = cls.unprefix(elem[0])
res.append(elem)
return res
|
'generate file details from jinja2 template.
:param filename: name of the file the content is related to
:param template: path to the file to render the content
:param kwargs: arguments of the template
:return: File_details instance'
| @api.model
def generate_file_details(self, filename, template, **kwargs):
| template = self._env.get_template(template)
kwargs.update({'export_year': date.today().year, 'author': self.author, 'website': self.website, 'license_text': licenses.get_license_text(self.license), 'cr': self._cr, 'fixup_arch': self.fixup_arch, 'is_prefixed': self.is_prefixed, 'unprefix': self.unprefix, 'wrap': wrap})
return self.File_details(filename, template.render(kwargs))
|
'Test generate_files returns a tuple.'
| def test_generate_files(self):
| self.prototype.setup_env(self.api_version)
details = self.prototype.generate_files()
self.assertIsInstance(details, list)
for file_details in details:
self.assertIsInstance(file_details, tuple)
self.assertIsInstance(file_details.filename, basestring)
self.assertIsInstance(file_details.filecontent, basestring)
(name, contents) = file_details
if name.endswith('.py'):
contents = contents.encode('utf-8')
ast.parse(contents)
if pep8:
checker = pep8.Checker(name, contents.splitlines(True))
res = checker.check_all()
self.assertFalse(res, ('Python file %s has pep8 errors:\n%s\n%s' % (name, checker.report.messages, repr(contents))))
elif name.endswith('.xml'):
lxml.etree.fromstring(contents)
|
'test the jinja2 environment is set.'
| def test_set_env(self):
| self.assertIsNone(self.prototype._env)
self.prototype.setup_env(self.api_version)
self.assertIsInstance(self.prototype._env, Environment)
self.assertEqual(self.api_version, self.prototype._api_version)
|
'Test if the returns match the pattern.'
| def test_friendly_name_return(self):
| name = 'res.partner'
self.assertEqual(self.prototype.friendly_name(name), name.replace('.', '_'))
|
'Test if the assertion raises.'
| def test_action_export_assert_for_wrong_active_model(self):
| exporter = self.main_model.with_context(active_model='t_active_model').create({})
self.assertRaises(AssertionError, exporter.action_export, [exporter.id])
|
'Test if the wizard is updated during the process.'
| def test_action_export_update_wizard(self):
| exporter = self.main_model.with_context(active_model=self.prototype_model._name, active_id=self.prototype.id).create({})
exporter.action_export(exporter.id)
self.assertEqual(exporter.state, 'get')
self.assertEqual(exporter.name, ('%s.zip' % (self.prototype.name,)))
|
'Test the method return of the method that generate the zip file.'
| def test_zip_files_returns_tuple(self):
| ret = self.main_model.zip_files(self.exporter, [self.prototype])
self.assertIsInstance(ret, tuple)
self.assertIsInstance(ret.zip_file, zipfile.ZipFile)
self.assertIsInstance(ret.stringIO, StringIO.StringIO)
|
'Update all the users concerned by the roles identified by `ids`.'
| @api.multi
def update_users(self):
| users = self.mapped('user_ids')
users.set_groups_from_roles()
return True
|
'Set (replace) the groups following the roles defined on users.
If no role is defined on the user, its groups are let untouched.'
| @api.multi
def set_groups_from_roles(self):
| for user in self:
if (not user.role_line_ids):
continue
group_ids = []
role_lines = user.role_line_ids.filtered((lambda rec: rec.is_enabled))
for role_line in role_lines:
role = role_line.role_id
if role:
group_ids.append(role.group_id.id)
group_ids.extend(role.implied_ids.ids)
group_ids = list(set(group_ids))
vals = {'groups_id': [(6, 0, group_ids)]}
super(ResUsers, user).write(vals)
return True
|
'Set debug = True, so that group_no_one is not filtered out of the
user\'s groups'
| @api.model
def _visible_menu_ids(self, debug=False):
| if (not debug):
debug = self.env.user.has_group('base_technical_features.group_technical_features')
return super(IrUiMenu, self)._visible_menu_ids(debug=debug)
|
'Return True for users in the technical features group when
membership of the original group is checked, even if debug mode
is not enabled.'
| def user_has_groups(self, groups):
| if (('base.group_no_one' in groups.split(',')) and self.env.user.has_group('base_technical_features.group_technical_features')):
return True
return super(Base, self).user_has_groups(groups)
|
'Only display the technical features checkbox in the user
preferences if the user has access to them'
| @api.multi
@api.depends('groups_id')
def get_show_technical_features(self):
| users = self.env.ref('base.group_no_one').users
for user in self:
user.show_technical_features = (user in users)
|
'Map user membership to boolean field value'
| @api.multi
@api.depends('groups_id')
def get_technical_features(self):
| users = self.env.ref('base_technical_features.group_technical_features').users
for user in self:
user.technical_features = (user in users)
|
'Map boolean field value to group membership, but checking
access'
| @api.multi
def set_technical_features(self):
| group = self.env.ref('base_technical_features.group_technical_features')
for user in self:
if (self.env.ref('base.group_no_one') not in user.groups_id):
raise AccessError(_('The user does not have access to technical features.'))
if user.technical_features:
self.sudo().write({'groups_id': [(4, group.id)]})
else:
self.sudo().write({'groups_id': [(3, group.id)]})
|
'A technical feature is visible to the user with the technical features group'
| def test_01_visible_menus(self):
| menu_obj = self.env['ir.ui.menu'].with_context({'ir.ui.menu.full_list': True})
menu_id = menu_obj.search([('groups_id', '=', self.env.ref('base.group_no_one').id)], limit=1).id
self.env.user.write({'technical_features': False})
self.assertNotIn(menu_id, menu_obj._visible_menu_ids())
self.env.user.write({'technical_features': True})
self.assertIn(menu_id, menu_obj._visible_menu_ids())
|
'A technical field is visible when its form is loaded by a user with the technical features group'
| def test02_visible_fields(self):
| def get_partner_field_invisible():
xml = etree.fromstring(self.env['res.users'].fields_view_get(view_id=self.env.ref('base.view_users_form').id)['arch'].encode('utf-8'))
return xml.xpath('//div/group/field[@name="partner_id"]')[0].get('invisible')
self.env.user.write({'technical_features': False})
self.assertEqual(get_partner_field_invisible(), '1')
self.env.user.write({'technical_features': True})
self.assertEqual(get_partner_field_invisible(), None)
|
'Setting the user pref raises an access error if the user is not in group_no_one'
| def test03_user_access(self):
| user = self.env['res.users'].create({'name': 'Test user technical features', 'login': 'testusertechnicalfeatures', 'groups_id': [(6, 0, [])]})
with api.Environment.manage():
env = api.Environment(self.env.cr, user.id, self.env.context)
with self.assertRaises(AccessError):
env['res.users'].browse(user.id).write({'technical_features': True})
with self.assertRaises(AccessError):
user.write({'technical_features': True})
user.write({'groups_id': [(4, self.env.ref('base.group_no_one').id)]})
with api.Environment.manage():
env = api.Environment(self.env.cr, user.id, self.env.context)
env['res.users'].browse(user.id).write({'technical_features': True})
|
'this function crashes for undefined models'
| @api.multi
def _inherited_models(self, field_name, arg):
| result = dict(((i, []) for i in self.ids))
existing_model_ids = [this.id for this in self if (this.model in self.env)]
super_result = super(IrModel, self.browse(existing_model_ids))._inherited_models(field_name, arg)
result.update(super_result)
return result
|
'Unlink models upon manual confirmation.'
| @api.multi
def purge(self):
| context_flags = {MODULE_UNINSTALL_FLAG: True, 'no_drop_table': True}
for line in self:
self.env.cr.execute('SELECT id, model from ir_model WHERE model = %s', (line.name,))
row = self.env.cr.fetchone()
if (not row):
continue
self.logger.info('Purging model %s', row[1])
attachments = self.env['ir.attachment'].search([('res_model', '=', line.name)])
if attachments:
self.env.cr.execute('UPDATE ir_attachment SET res_model = NULL WHERE id in %s', (tuple(attachments.ids),))
self.env['ir.model.constraint'].search([('model', '=', line.name)]).unlink()
relations = self.env['ir.model.fields'].search([('relation', '=', row[1])]).with_context(**context_flags)
for relation in relations:
try:
relation.unlink()
except KeyError:
pass
except AttributeError:
pass
self.env['ir.model.relation'].search([('model', '=', line.name)]).with_context(**context_flags).unlink()
self.env['ir.model'].browse([row[0]]).with_context(**context_flags).unlink()
line.write({'purged': True})
return True
|
'Search for models that cannot be instantiated.'
| @api.model
def find(self):
| res = []
self.env.cr.execute('SELECT model from ir_model')
for (model,) in self.env.cr.fetchall():
if (model not in self.env):
res.append((0, 0, {'name': model}))
if (not res):
raise UserError(_('No orphaned models found'))
return res
|
'this function crashes for xmlids on undefined models or fields
referring to undefined models'
| @api.model
def _module_data_uninstall(self, modules_to_remove):
| for this in self.search([('module', 'in', modules_to_remove)]):
if (this.model == 'ir.model.fields'):
field = self.env[this.model].with_context(**{MODULE_UNINSTALL_FLAG: True}).browse(this.res_id)
if ((not field.exists()) or (field.model not in self.env)):
this.unlink()
continue
if (this.model not in self.env):
this.unlink()
return super(IrModelData, self)._module_data_uninstall(modules_to_remove)
|
'Uninstall modules upon manual confirmation, then reload
the database.'
| @api.multi
def purge(self):
| module_names = self.filtered((lambda x: (not x.purged))).mapped('name')
modules = self.env['ir.module.module'].search([('name', 'in', module_names)])
if (not modules):
return True
self.logger.info('Purging modules %s', ', '.join(module_names))
modules.button_uninstall()
self.env.cr.commit()
RegistryManager.new(self.env.cr.dbname, update_module=True)
modules.unlink()
return self.write({'purged': True})
|
'Unlink columns upon manual confirmation.'
| @api.multi
def purge(self):
| for line in self:
if line.purged:
continue
model_pool = self.env[line.model_id.model]
self.env.cr.execute('SELECT count(attname) FROM pg_attribute WHERE attrelid = ( SELECT oid FROM pg_class WHERE relname = %s ) AND attname = %s', (model_pool._table, line.name))
if (not self.env.cr.fetchone()[0]):
continue
self.logger.info('Dropping column %s from table %s', line.name, model_pool._table)
self.env.cr.execute('ALTER TABLE %s DROP COLUMN %s', (IdentifierAdapter(model_pool._table), IdentifierAdapter(line.name)))
line.write({'purged': True})
self.env.cr.commit()
return True
|
'From openobject-server/openerp/osv/orm.py
Iterate on the database columns to identify columns
of fields which have been removed'
| @api.model
def get_orphaned_columns(self, model_pools):
| columns = list(set([column for model_pool in model_pools for column in model_pool._columns if (not (isinstance(model_pool._columns[column], fields.fields.function) and (not model_pool._columns[column].store)))]))
columns += models.MAGIC_COLUMNS
columns += self.blacklist.get(model_pools[0]._table, [])
self.env.cr.execute("SELECT a.attname FROM pg_class c, pg_attribute a WHERE c.relname=%s AND c.oid=a.attrelid AND a.attisdropped=False AND pg_catalog.format_type(a.atttypid, a.atttypmod) NOT IN ('cid', 'tid', 'oid', 'xid') AND a.attname NOT IN %s", (model_pools[0]._table, tuple(columns)))
return [column for (column,) in self.env.cr.fetchall()]
|
'Search for columns that are not in the corresponding model.
Group models by table to prevent false positives for columns
that are only in some of the models sharing the same table.
Example of this is \'sale_id\' not being a field of stock.picking.in'
| @api.model
def find(self):
| res = []
table2model = {}
for model in self.env['ir.model'].search([]):
if (model.model not in self.env):
continue
model_pool = self.env[model.model]
if (not model_pool._auto):
continue
table2model.setdefault(model_pool._table, (model.id, []))[1].append(model_pool)
for (table, model_spec) in table2model.iteritems():
for column in self.get_orphaned_columns(model_spec[1]):
res.append((0, 0, {'name': column, 'model_id': model_spec[0]}))
if (not res):
raise UserError(_('No orphaned columns found'))
return res
|
'Unlink data entries upon manual confirmation.'
| @api.multi
def purge(self):
| to_unlink = self.filtered((lambda x: ((not x.purged) and x.data_id)))
self.logger.info('Purging data entries: %s', to_unlink.mapped('name'))
to_unlink.mapped('data_id').unlink()
return self.write({'purged': True})
|
'Collect all rows from ir_model_data that refer
to a nonexisting model, or to a nonexisting
row in the model\'s table.'
| @api.model
def find(self):
| res = []
data_ids = []
unknown_models = []
self.env.cr.execute('SELECT DISTINCT(model) FROM ir_model_data')
for (model,) in self.env.cr.fetchall():
if (not model):
continue
if (model not in self.env):
unknown_models.append(model)
continue
self.env.cr.execute('\n SELECT id FROM ir_model_data\n WHERE model = %s\n AND res_id IS NOT NULL\n AND NOT EXISTS (\n SELECT id FROM %s WHERE id=ir_model_data.res_id)\n ', (model, IdentifierAdapter(self.env[model]._table)))
data_ids.extend((data_row for (data_row,) in self.env.cr.fetchall()))
data_ids += self.env['ir.model.data'].search([('model', 'in', unknown_models)]).ids
for data in self.env['ir.model.data'].browse(data_ids):
res.append((0, 0, {'data_id': data.id, 'name': ('%s.%s, object of type %s' % (data.module, data.name, data.model))}))
if (not res):
raise UserError(_('No orphaned data entries found'))
return res
|
'Unlink tables upon manual confirmation.'
| @api.multi
def purge(self):
| tables = self.mapped('name')
for line in self:
if line.purged:
continue
self.env.cr.execute("\n SELECT conname, confrelid::regclass, af.attname AS fcol,\n conrelid::regclass, a.attname AS col\n FROM pg_attribute af, pg_attribute a,\n (SELECT conname, conrelid, confrelid,conkey[i] AS conkey,\n confkey[i] AS confkey\n FROM (select conname, conrelid, confrelid, conkey,\n confkey, generate_series(1,array_upper(conkey,1)) AS i\n FROM pg_constraint WHERE contype = 'f') ss) ss2\n WHERE af.attnum = confkey AND af.attrelid = confrelid AND\n a.attnum = conkey AND a.attrelid = conrelid\n AND confrelid::regclass = '%s'::regclass;\n ", (IdentifierAdapter(line.name, quote=False),))
for constraint in self.env.cr.fetchall():
if (constraint[3] in tables):
self.logger.info('Dropping constraint %s on table %s (to be dropped)', constraint[0], constraint[3])
self.env.cr.execute('ALTER TABLE %s DROP CONSTRAINT %s', (IdentifierAdapter(constraint[3]), IdentifierAdapter(constraint[0])))
self.logger.info('Dropping table %s', line.name)
self.env.cr.execute('DROP TABLE %s', (IdentifierAdapter(line.name),))
line.write({'purged': True})
return True
|
'Search for tables that cannot be instantiated.
Ignore views for now.'
| @api.model
def find(self):
| known_tables = ['wkf_witm_trans']
for model in self.env['ir.model'].search([]):
if (model.model not in self.env):
continue
model_pool = self.env[model.model]
known_tables.append(model_pool._table)
known_tables += [column._sql_names(model_pool)[0] for column in model_pool._columns.values() if ((column._type == 'many2many') and hasattr(column, '_rel'))]
self.env.cr.execute("\n SELECT table_name FROM information_schema.tables\n WHERE table_schema = 'public' AND table_type = 'BASE TABLE'\n AND table_name NOT IN %s", (tuple(known_tables),))
res = [(0, 0, {'name': row[0]}) for row in self.env.cr.fetchall()]
if (not res):
raise UserError(_('No orphaned tables found'))
return res
|
'Search for models that cannot be instantiated.'
| @api.model
def find(self):
| res = []
for menu in self.env['ir.ui.menu'].with_context(active_test=False).search([('action', '!=', False)]):
if (menu.action.type != 'ir.actions.act_window'):
continue
if ((menu.action.res_model and (menu.action.res_model not in self.env)) or (menu.action.src_model and (menu.action.src_model not in self.env))):
res.append((0, 0, {'name': menu.complete_name, 'menu_id': menu.id}))
if (not res):
raise UserError(_('No dangling menu entries found'))
return res
|
'Test that gravatar is pinged for image'
| @mock.patch(('%s.urllib2' % MODULE_LOCATION))
def test_get_gravatar_base64_opens_correct_uri(self, mk):
| self.model_obj._get_gravatar_base64(self.partner_vals['email'])
expect = hashlib.md5(self.partner_vals['email']).hexdigest()
mk.urlopen.assert_called_once_with(self.url.format(expect))
|
'Test that image result is read'
| @mock.patch(('%s.base64' % MODULE_LOCATION))
@mock.patch(('%s.urllib2' % MODULE_LOCATION))
def test_get_gravatar_base64_returns_encoded_image(self, mk, b64_mk):
| expect = 'Expect'
b64_mk.encodestring.return_value = expect
result = self.model_obj._get_gravatar_base64(self.partner_vals['email'])
self.assertEquals(expect, result)
|
'Test that the resulting gravatar is written to user'
| def test_get_gravatar_image_writes_image(self):
| with mock.patch.object(ResUsers, 'write') as write_mk:
user_id = self._test_record()
with mock.patch.object(user_id, '_get_gravatar_base64') as mk:
expect = 'Expect'
mk.side_effect = ['Fail', expect]
user_id.get_gravatar_image()
write_mk.assert_called_once_with({'image': expect})
|
'Get a related model from its name, for better UI.'
| @api.multi
@api.depends('model')
def _compute_model_id(self):
| for s in self:
s.model_id = self.env['ir.model'].search([('model', '=', s.model)])
|
'Password in clear text.'
| def get_password(self):
| try:
return self._decode_password(self.password)
except Warning as warn:
raise Warning(_(('%s \nAccount: %s %s %s ' % (warn, self.login, self.name, self.technical_name))))
|
'Data in dict form.'
| def get_data(self):
| return self._parse_data(self.data)
|
'Ensure valid input in data field.'
| @api.constrains('data')
def _check_data(self):
| for account in self:
if account.data:
parsed = account._parse_data(account.data)
if (not account._validate_data(parsed)):
raise ValidationError(_('Data not valid'))
|
'Encode password from clear text.'
| def _inverse_set_password(self):
| for rec in self:
rec.password = rec._encode_password(rec.clear_password, rec.environment)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.