repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
naototty/pyflag | src/plugins_old/MemoryForensics/Volatility-1.3_Linux_rc.1/forensics/win32/domcachedump.py | 7 | 4338 | # Volatility
# Copyright (c) 2008 Volatile Systems
# Copyright (c) 2008 Brendan Dolan-Gavitt <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
"""
@author: Brendan Dolan-Gavitt
@license: GNU General Public License 2.0 or later
@contact: [email protected]
"""
from forensics.win32.rawreg import *
from forensics.win32.hive2 import HiveAddressSpace,HiveFileAddressSpace
from forensics.win32.hashdump import get_bootkey
from forensics.win32.lsasecrets import get_secret_by_name,get_lsa_key
from Crypto.Hash import HMAC
from Crypto.Cipher import ARC4
from struct import unpack
def get_nlkm(secaddr, lsakey, profile):
return get_secret_by_name(secaddr, 'NL$KM', lsakey, profile)
def decrypt_hash(edata, nlkm, ch):
hmac_md5 = HMAC.new(nlkm,ch)
rc4key = hmac_md5.digest()
rc4 = ARC4.new(rc4key)
data = rc4.encrypt(edata)
return data
def parse_cache_entry(cache_data):
(uname_len, domain_len) = unpack("<HH", cache_data[:4])
(domain_name_len,) = unpack("<H", cache_data[60:62])
ch = cache_data[64:80]
enc_data = cache_data[96:]
return (uname_len, domain_len, domain_name_len, enc_data, ch)
def parse_decrypted_cache(dec_data, uname_len,
domain_len, domain_name_len):
uname_off = 72
pad = 2 * ( ( uname_len / 2 ) % 2 )
domain_off = uname_off + uname_len + pad
pad = 2 * ( ( domain_len / 2 ) % 2 )
domain_name_off = domain_off + domain_len + pad
hash = dec_data[:0x10]
username = dec_data[uname_off:uname_off+uname_len]
username = username.decode('utf-16-le')
domain = dec_data[domain_off:domain_off+domain_len]
domain = domain.decode('utf-16-le')
domain_name = dec_data[domain_name_off:domain_name_off+domain_name_len]
domain_name = domain_name.decode('utf-16-le')
return (username, domain, domain_name, hash)
def dump_hashes(sysaddr, secaddr, profile):
bootkey = get_bootkey(sysaddr, profile)
if not bootkey:
return None
lsakey = get_lsa_key(secaddr, bootkey, profile)
if not lsakey:
return None
nlkm = get_nlkm(secaddr, lsakey, profile)
if not nlkm:
return None
root = get_root(secaddr, profile)
if not root:
return None
cache = open_key(root, ["Cache"])
if not cache:
return None
hashes = []
for v in values(cache):
if v.Name == "NL$Control": continue
data = v.vm.read(v.Data, v.DataLength)
(uname_len, domain_len, domain_name_len,
enc_data, ch) = parse_cache_entry(data)
# Skip if nothing in this cache entry
if uname_len == 0:
continue
dec_data = decrypt_hash(enc_data, nlkm, ch)
(username, domain, domain_name,
hash) = parse_decrypted_cache(dec_data, uname_len,
domain_len, domain_name_len)
hashes.append((username, domain, domain_name, hash))
return hashes
def dump_memory_hashes(addr_space, types, syshive, sechive, profile):
sysaddr = HiveAddressSpace(addr_space, types, syshive)
secaddr = HiveAddressSpace(addr_space, types, sechive)
for (u, d, dn, hash) in dump_hashes(sysaddr, secaddr, profile):
print "%s:%s:%s:%s" % (u.lower(), hash.encode('hex'),
d.lower(), dn.lower())
def dump_file_hashes(syshive_fname, sechive_fname, profile):
sysaddr = HiveFileAddressSpace(syshive_fname)
secaddr = HiveFileAddressSpace(sechive_fname)
for (u, d, dn, hash) in dump_hashes(sysaddr, secaddr, profile):
print "%s:%s:%s:%s" % (u.lower(), hash.encode('hex'),
d.lower(), dn.lower())
| gpl-2.0 |
litchfield/django | django/test/testcases.py | 3 | 55668 | from __future__ import unicode_literals
import difflib
import errno
import json
import os
import posixpath
import re
import socket
import sys
import threading
import unittest
import warnings
from collections import Counter
from copy import copy
from functools import wraps
from unittest.util import safe_repr
from django.apps import apps
from django.conf import settings
from django.core import mail
from django.core.exceptions import ImproperlyConfigured, ValidationError
from django.core.handlers.wsgi import WSGIHandler, get_path_info
from django.core.management import call_command
from django.core.management.color import no_style
from django.core.management.sql import emit_post_migrate_signal
from django.core.servers.basehttp import WSGIRequestHandler, WSGIServer
from django.core.urlresolvers import clear_url_caches, set_urlconf
from django.db import DEFAULT_DB_ALIAS, connection, connections, transaction
from django.forms.fields import CharField
from django.http import QueryDict
from django.test.client import Client
from django.test.html import HTMLParseError, parse_html
from django.test.signals import setting_changed, template_rendered
from django.test.utils import (
CaptureQueriesContext, ContextList, compare_xml, modify_settings,
override_settings,
)
from django.utils import six
from django.utils.decorators import classproperty
from django.utils.deprecation import (
RemovedInDjango20Warning, RemovedInDjango110Warning,
)
from django.utils.encoding import force_text
from django.utils.six.moves.urllib.parse import (
unquote, urlparse, urlsplit, urlunsplit,
)
from django.utils.six.moves.urllib.request import url2pathname
from django.views.static import serve
__all__ = ('TestCase', 'TransactionTestCase',
'SimpleTestCase', 'skipIfDBFeature', 'skipUnlessDBFeature')
def to_list(value):
"""
Puts value into a list if it's not already one.
Returns an empty list if value is None.
"""
if value is None:
value = []
elif not isinstance(value, list):
value = [value]
return value
def assert_and_parse_html(self, html, user_msg, msg):
try:
dom = parse_html(html)
except HTMLParseError as e:
standardMsg = '%s\n%s' % (msg, e.msg)
self.fail(self._formatMessage(user_msg, standardMsg))
return dom
class _AssertNumQueriesContext(CaptureQueriesContext):
def __init__(self, test_case, num, connection):
self.test_case = test_case
self.num = num
super(_AssertNumQueriesContext, self).__init__(connection)
def __exit__(self, exc_type, exc_value, traceback):
super(_AssertNumQueriesContext, self).__exit__(exc_type, exc_value, traceback)
if exc_type is not None:
return
executed = len(self)
self.test_case.assertEqual(
executed, self.num,
"%d queries executed, %d expected\nCaptured queries were:\n%s" % (
executed, self.num,
'\n'.join(
query['sql'] for query in self.captured_queries
)
)
)
class _AssertTemplateUsedContext(object):
def __init__(self, test_case, template_name):
self.test_case = test_case
self.template_name = template_name
self.rendered_templates = []
self.rendered_template_names = []
self.context = ContextList()
def on_template_render(self, sender, signal, template, context, **kwargs):
self.rendered_templates.append(template)
self.rendered_template_names.append(template.name)
self.context.append(copy(context))
def test(self):
return self.template_name in self.rendered_template_names
def message(self):
return '%s was not rendered.' % self.template_name
def __enter__(self):
template_rendered.connect(self.on_template_render)
return self
def __exit__(self, exc_type, exc_value, traceback):
template_rendered.disconnect(self.on_template_render)
if exc_type is not None:
return
if not self.test():
message = self.message()
if len(self.rendered_templates) == 0:
message += ' No template was rendered.'
else:
message += ' Following templates were rendered: %s' % (
', '.join(self.rendered_template_names))
self.test_case.fail(message)
class _AssertTemplateNotUsedContext(_AssertTemplateUsedContext):
def test(self):
return self.template_name not in self.rendered_template_names
def message(self):
return '%s was rendered.' % self.template_name
class _CursorFailure(object):
def __init__(self, cls_name, wrapped):
self.cls_name = cls_name
self.wrapped = wrapped
def __call__(self):
raise AssertionError(
"Database queries aren't allowed in SimpleTestCase. "
"Either use TestCase or TransactionTestCase to ensure proper test isolation or "
"set %s.allow_database_queries to True to silence this failure." % self.cls_name
)
class SimpleTestCase(unittest.TestCase):
# The class we'll use for the test client self.client.
# Can be overridden in derived classes.
client_class = Client
_overridden_settings = None
_modified_settings = None
# Tests shouldn't be allowed to query the database since
# this base class doesn't enforce any isolation.
allow_database_queries = False
@classmethod
def setUpClass(cls):
super(SimpleTestCase, cls).setUpClass()
if cls._overridden_settings:
cls._cls_overridden_context = override_settings(**cls._overridden_settings)
cls._cls_overridden_context.enable()
if cls._modified_settings:
cls._cls_modified_context = modify_settings(cls._modified_settings)
cls._cls_modified_context.enable()
if not cls.allow_database_queries:
for alias in connections:
connection = connections[alias]
connection.cursor = _CursorFailure(cls.__name__, connection.cursor)
@classmethod
def tearDownClass(cls):
if not cls.allow_database_queries:
for alias in connections:
connection = connections[alias]
connection.cursor = connection.cursor.wrapped
if hasattr(cls, '_cls_modified_context'):
cls._cls_modified_context.disable()
delattr(cls, '_cls_modified_context')
if hasattr(cls, '_cls_overridden_context'):
cls._cls_overridden_context.disable()
delattr(cls, '_cls_overridden_context')
super(SimpleTestCase, cls).tearDownClass()
def __call__(self, result=None):
"""
Wrapper around default __call__ method to perform common Django test
set up. This means that user-defined Test Cases aren't required to
include a call to super().setUp().
"""
testMethod = getattr(self, self._testMethodName)
skipped = (getattr(self.__class__, "__unittest_skip__", False) or
getattr(testMethod, "__unittest_skip__", False))
if not skipped:
try:
self._pre_setup()
except Exception:
result.addError(self, sys.exc_info())
return
super(SimpleTestCase, self).__call__(result)
if not skipped:
try:
self._post_teardown()
except Exception:
result.addError(self, sys.exc_info())
return
def _pre_setup(self):
"""Performs any pre-test setup. This includes:
* Creating a test client.
* If the class has a 'urls' attribute, replace ROOT_URLCONF with it.
* Clearing the mail test outbox.
"""
self.client = self.client_class()
self._urlconf_setup()
mail.outbox = []
def _urlconf_setup(self):
if hasattr(self, 'urls'):
warnings.warn(
"SimpleTestCase.urls is deprecated and will be removed in "
"Django 1.10. Use @override_settings(ROOT_URLCONF=...) "
"in %s instead." % self.__class__.__name__,
RemovedInDjango110Warning, stacklevel=2)
set_urlconf(None)
self._old_root_urlconf = settings.ROOT_URLCONF
settings.ROOT_URLCONF = self.urls
clear_url_caches()
def _post_teardown(self):
"""Performs any post-test things. This includes:
* Putting back the original ROOT_URLCONF if it was changed.
"""
self._urlconf_teardown()
def _urlconf_teardown(self):
if hasattr(self, '_old_root_urlconf'):
set_urlconf(None)
settings.ROOT_URLCONF = self._old_root_urlconf
clear_url_caches()
def settings(self, **kwargs):
"""
A context manager that temporarily sets a setting and reverts to the original value when exiting the context.
"""
return override_settings(**kwargs)
def modify_settings(self, **kwargs):
"""
A context manager that temporarily applies changes a list setting and
reverts back to the original value when exiting the context.
"""
return modify_settings(**kwargs)
def assertRedirects(self, response, expected_url, status_code=302,
target_status_code=200, host=None, msg_prefix='',
fetch_redirect_response=True):
"""Asserts that a response redirected to a specific URL, and that the
redirect URL can be loaded.
Note that assertRedirects won't work for external links since it uses
TestClient to do a request (use fetch_redirect_response=False to check
such links without fetching them).
"""
if host is not None:
warnings.warn(
"The host argument is deprecated and no longer used by assertRedirects",
RemovedInDjango20Warning, stacklevel=2
)
if msg_prefix:
msg_prefix += ": "
if hasattr(response, 'redirect_chain'):
# The request was a followed redirect
self.assertTrue(len(response.redirect_chain) > 0,
msg_prefix + "Response didn't redirect as expected: Response"
" code was %d (expected %d)" %
(response.status_code, status_code))
self.assertEqual(response.redirect_chain[0][1], status_code,
msg_prefix + "Initial response didn't redirect as expected:"
" Response code was %d (expected %d)" %
(response.redirect_chain[0][1], status_code))
url, status_code = response.redirect_chain[-1]
scheme, netloc, path, query, fragment = urlsplit(url)
self.assertEqual(response.status_code, target_status_code,
msg_prefix + "Response didn't redirect as expected: Final"
" Response code was %d (expected %d)" %
(response.status_code, target_status_code))
else:
# Not a followed redirect
self.assertEqual(response.status_code, status_code,
msg_prefix + "Response didn't redirect as expected: Response"
" code was %d (expected %d)" %
(response.status_code, status_code))
url = response.url
scheme, netloc, path, query, fragment = urlsplit(url)
if fetch_redirect_response:
redirect_response = response.client.get(path, QueryDict(query),
secure=(scheme == 'https'))
# Get the redirection page, using the same client that was used
# to obtain the original response.
self.assertEqual(redirect_response.status_code, target_status_code,
msg_prefix + "Couldn't retrieve redirection page '%s':"
" response code was %d (expected %d)" %
(path, redirect_response.status_code, target_status_code))
if url != expected_url:
# For temporary backwards compatibility, try to compare with a relative url
e_scheme, e_netloc, e_path, e_query, e_fragment = urlsplit(expected_url)
relative_url = urlunsplit(('', '', e_path, e_query, e_fragment))
if url == relative_url:
warnings.warn(
"assertRedirects had to strip the scheme and domain from the "
"expected URL, as it was always added automatically to URLs "
"before Django 1.9. Please update your expected URLs by "
"removing the scheme and domain.",
RemovedInDjango20Warning, stacklevel=2)
expected_url = relative_url
self.assertEqual(url, expected_url,
msg_prefix + "Response redirected to '%s', expected '%s'" %
(url, expected_url))
def _assert_contains(self, response, text, status_code, msg_prefix, html):
# If the response supports deferred rendering and hasn't been rendered
# yet, then ensure that it does get rendered before proceeding further.
if (hasattr(response, 'render') and callable(response.render)
and not response.is_rendered):
response.render()
if msg_prefix:
msg_prefix += ": "
self.assertEqual(response.status_code, status_code,
msg_prefix + "Couldn't retrieve content: Response code was %d"
" (expected %d)" % (response.status_code, status_code))
if response.streaming:
content = b''.join(response.streaming_content)
else:
content = response.content
if not isinstance(text, bytes) or html:
text = force_text(text, encoding=response.charset)
content = content.decode(response.charset)
text_repr = "'%s'" % text
else:
text_repr = repr(text)
if html:
content = assert_and_parse_html(self, content, None,
"Response's content is not valid HTML:")
text = assert_and_parse_html(self, text, None,
"Second argument is not valid HTML:")
real_count = content.count(text)
return (text_repr, real_count, msg_prefix)
def assertContains(self, response, text, count=None, status_code=200,
msg_prefix='', html=False):
"""
Asserts that a response indicates that some content was retrieved
successfully, (i.e., the HTTP status code was as expected), and that
``text`` occurs ``count`` times in the content of the response.
If ``count`` is None, the count doesn't matter - the assertion is true
if the text occurs at least once in the response.
"""
text_repr, real_count, msg_prefix = self._assert_contains(
response, text, status_code, msg_prefix, html)
if count is not None:
self.assertEqual(real_count, count,
msg_prefix + "Found %d instances of %s in response"
" (expected %d)" % (real_count, text_repr, count))
else:
self.assertTrue(real_count != 0,
msg_prefix + "Couldn't find %s in response" % text_repr)
def assertNotContains(self, response, text, status_code=200,
msg_prefix='', html=False):
"""
Asserts that a response indicates that some content was retrieved
successfully, (i.e., the HTTP status code was as expected), and that
``text`` doesn't occurs in the content of the response.
"""
text_repr, real_count, msg_prefix = self._assert_contains(
response, text, status_code, msg_prefix, html)
self.assertEqual(real_count, 0,
msg_prefix + "Response should not contain %s" % text_repr)
def assertFormError(self, response, form, field, errors, msg_prefix=''):
"""
Asserts that a form used to render the response has a specific field
error.
"""
if msg_prefix:
msg_prefix += ": "
# Put context(s) into a list to simplify processing.
contexts = to_list(response.context)
if not contexts:
self.fail(msg_prefix + "Response did not use any contexts to "
"render the response")
# Put error(s) into a list to simplify processing.
errors = to_list(errors)
# Search all contexts for the error.
found_form = False
for i, context in enumerate(contexts):
if form not in context:
continue
found_form = True
for err in errors:
if field:
if field in context[form].errors:
field_errors = context[form].errors[field]
self.assertTrue(err in field_errors,
msg_prefix + "The field '%s' on form '%s' in"
" context %d does not contain the error '%s'"
" (actual errors: %s)" %
(field, form, i, err, repr(field_errors)))
elif field in context[form].fields:
self.fail(msg_prefix + "The field '%s' on form '%s'"
" in context %d contains no errors" %
(field, form, i))
else:
self.fail(msg_prefix + "The form '%s' in context %d"
" does not contain the field '%s'" %
(form, i, field))
else:
non_field_errors = context[form].non_field_errors()
self.assertTrue(err in non_field_errors,
msg_prefix + "The form '%s' in context %d does not"
" contain the non-field error '%s'"
" (actual errors: %s)" %
(form, i, err, non_field_errors))
if not found_form:
self.fail(msg_prefix + "The form '%s' was not used to render the"
" response" % form)
def assertFormsetError(self, response, formset, form_index, field, errors,
msg_prefix=''):
"""
Asserts that a formset used to render the response has a specific error.
For field errors, specify the ``form_index`` and the ``field``.
For non-field errors, specify the ``form_index`` and the ``field`` as
None.
For non-form errors, specify ``form_index`` as None and the ``field``
as None.
"""
# Add punctuation to msg_prefix
if msg_prefix:
msg_prefix += ": "
# Put context(s) into a list to simplify processing.
contexts = to_list(response.context)
if not contexts:
self.fail(msg_prefix + 'Response did not use any contexts to '
'render the response')
# Put error(s) into a list to simplify processing.
errors = to_list(errors)
# Search all contexts for the error.
found_formset = False
for i, context in enumerate(contexts):
if formset not in context:
continue
found_formset = True
for err in errors:
if field is not None:
if field in context[formset].forms[form_index].errors:
field_errors = context[formset].forms[form_index].errors[field]
self.assertTrue(err in field_errors,
msg_prefix + "The field '%s' on formset '%s', "
"form %d in context %d does not contain the "
"error '%s' (actual errors: %s)" %
(field, formset, form_index, i, err,
repr(field_errors)))
elif field in context[formset].forms[form_index].fields:
self.fail(msg_prefix + "The field '%s' "
"on formset '%s', form %d in "
"context %d contains no errors" %
(field, formset, form_index, i))
else:
self.fail(msg_prefix + "The formset '%s', form %d in "
"context %d does not contain the field '%s'" %
(formset, form_index, i, field))
elif form_index is not None:
non_field_errors = context[formset].forms[form_index].non_field_errors()
self.assertFalse(len(non_field_errors) == 0,
msg_prefix + "The formset '%s', form %d in "
"context %d does not contain any non-field "
"errors." % (formset, form_index, i))
self.assertTrue(err in non_field_errors,
msg_prefix + "The formset '%s', form %d "
"in context %d does not contain the "
"non-field error '%s' "
"(actual errors: %s)" %
(formset, form_index, i, err,
repr(non_field_errors)))
else:
non_form_errors = context[formset].non_form_errors()
self.assertFalse(len(non_form_errors) == 0,
msg_prefix + "The formset '%s' in "
"context %d does not contain any "
"non-form errors." % (formset, i))
self.assertTrue(err in non_form_errors,
msg_prefix + "The formset '%s' in context "
"%d does not contain the "
"non-form error '%s' (actual errors: %s)" %
(formset, i, err, repr(non_form_errors)))
if not found_formset:
self.fail(msg_prefix + "The formset '%s' was not used to render "
"the response" % formset)
def _assert_template_used(self, response, template_name, msg_prefix):
if response is None and template_name is None:
raise TypeError('response and/or template_name argument must be provided')
if msg_prefix:
msg_prefix += ": "
if template_name is not None and response is not None and not hasattr(response, 'templates'):
raise ValueError(
"assertTemplateUsed() and assertTemplateNotUsed() are only "
"usable on responses fetched using the Django test Client."
)
if not hasattr(response, 'templates') or (response is None and template_name):
if response:
template_name = response
response = None
# use this template with context manager
return template_name, None, msg_prefix
template_names = [t.name for t in response.templates if t.name is not
None]
return None, template_names, msg_prefix
def assertTemplateUsed(self, response=None, template_name=None, msg_prefix='', count=None):
"""
Asserts that the template with the provided name was used in rendering
the response. Also usable as context manager.
"""
context_mgr_template, template_names, msg_prefix = self._assert_template_used(
response, template_name, msg_prefix)
if context_mgr_template:
# Use assertTemplateUsed as context manager.
return _AssertTemplateUsedContext(self, context_mgr_template)
if not template_names:
self.fail(msg_prefix + "No templates used to render the response")
self.assertTrue(template_name in template_names,
msg_prefix + "Template '%s' was not a template used to render"
" the response. Actual template(s) used: %s" %
(template_name, ', '.join(template_names)))
if count is not None:
self.assertEqual(template_names.count(template_name), count,
msg_prefix + "Template '%s' was expected to be rendered %d "
"time(s) but was actually rendered %d time(s)." %
(template_name, count, template_names.count(template_name)))
def assertTemplateNotUsed(self, response=None, template_name=None, msg_prefix=''):
"""
Asserts that the template with the provided name was NOT used in
rendering the response. Also usable as context manager.
"""
context_mgr_template, template_names, msg_prefix = self._assert_template_used(
response, template_name, msg_prefix)
if context_mgr_template:
# Use assertTemplateNotUsed as context manager.
return _AssertTemplateNotUsedContext(self, context_mgr_template)
self.assertFalse(template_name in template_names,
msg_prefix + "Template '%s' was used unexpectedly in rendering"
" the response" % template_name)
def assertRaisesMessage(self, expected_exception, expected_message, *args, **kwargs):
"""
Asserts that the message in a raised exception matches the passed
value.
Args:
expected_exception: Exception class expected to be raised.
expected_message: expected error message string value.
args: Function to be called and extra positional args.
kwargs: Extra kwargs.
"""
# callable_obj was a documented kwarg in Django 1.8 and older.
callable_obj = kwargs.pop('callable_obj', None)
if callable_obj:
warnings.warn(
'The callable_obj kwarg is deprecated. Pass the callable '
'as a positional argument instead.', RemovedInDjango20Warning
)
args = (callable_obj,) + args
return six.assertRaisesRegex(self, expected_exception,
re.escape(expected_message), *args, **kwargs)
def assertFieldOutput(self, fieldclass, valid, invalid, field_args=None,
field_kwargs=None, empty_value=''):
"""
Asserts that a form field behaves correctly with various inputs.
Args:
fieldclass: the class of the field to be tested.
valid: a dictionary mapping valid inputs to their expected
cleaned values.
invalid: a dictionary mapping invalid inputs to one or more
raised error messages.
field_args: the args passed to instantiate the field
field_kwargs: the kwargs passed to instantiate the field
empty_value: the expected clean output for inputs in empty_values
"""
if field_args is None:
field_args = []
if field_kwargs is None:
field_kwargs = {}
required = fieldclass(*field_args, **field_kwargs)
optional = fieldclass(*field_args,
**dict(field_kwargs, required=False))
# test valid inputs
for input, output in valid.items():
self.assertEqual(required.clean(input), output)
self.assertEqual(optional.clean(input), output)
# test invalid inputs
for input, errors in invalid.items():
with self.assertRaises(ValidationError) as context_manager:
required.clean(input)
self.assertEqual(context_manager.exception.messages, errors)
with self.assertRaises(ValidationError) as context_manager:
optional.clean(input)
self.assertEqual(context_manager.exception.messages, errors)
# test required inputs
error_required = [force_text(required.error_messages['required'])]
for e in required.empty_values:
with self.assertRaises(ValidationError) as context_manager:
required.clean(e)
self.assertEqual(context_manager.exception.messages,
error_required)
self.assertEqual(optional.clean(e), empty_value)
# test that max_length and min_length are always accepted
if issubclass(fieldclass, CharField):
field_kwargs.update({'min_length': 2, 'max_length': 20})
self.assertIsInstance(fieldclass(*field_args, **field_kwargs),
fieldclass)
def assertHTMLEqual(self, html1, html2, msg=None):
"""
Asserts that two HTML snippets are semantically the same.
Whitespace in most cases is ignored, and attribute ordering is not
significant. The passed-in arguments must be valid HTML.
"""
dom1 = assert_and_parse_html(self, html1, msg,
'First argument is not valid HTML:')
dom2 = assert_and_parse_html(self, html2, msg,
'Second argument is not valid HTML:')
if dom1 != dom2:
standardMsg = '%s != %s' % (
safe_repr(dom1, True), safe_repr(dom2, True))
diff = ('\n' + '\n'.join(difflib.ndiff(
six.text_type(dom1).splitlines(),
six.text_type(dom2).splitlines())))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertHTMLNotEqual(self, html1, html2, msg=None):
"""Asserts that two HTML snippets are not semantically equivalent."""
dom1 = assert_and_parse_html(self, html1, msg,
'First argument is not valid HTML:')
dom2 = assert_and_parse_html(self, html2, msg,
'Second argument is not valid HTML:')
if dom1 == dom2:
standardMsg = '%s == %s' % (
safe_repr(dom1, True), safe_repr(dom2, True))
self.fail(self._formatMessage(msg, standardMsg))
def assertInHTML(self, needle, haystack, count=None, msg_prefix=''):
needle = assert_and_parse_html(self, needle, None,
'First argument is not valid HTML:')
haystack = assert_and_parse_html(self, haystack, None,
'Second argument is not valid HTML:')
real_count = haystack.count(needle)
if count is not None:
self.assertEqual(real_count, count,
msg_prefix + "Found %d instances of '%s' in response"
" (expected %d)" % (real_count, needle, count))
else:
self.assertTrue(real_count != 0,
msg_prefix + "Couldn't find '%s' in response" % needle)
def assertJSONEqual(self, raw, expected_data, msg=None):
"""
Asserts that the JSON fragments raw and expected_data are equal.
Usual JSON non-significant whitespace rules apply as the heavyweight
is delegated to the json library.
"""
try:
data = json.loads(raw)
except ValueError:
self.fail("First argument is not valid JSON: %r" % raw)
if isinstance(expected_data, six.string_types):
try:
expected_data = json.loads(expected_data)
except ValueError:
self.fail("Second argument is not valid JSON: %r" % expected_data)
self.assertEqual(data, expected_data, msg=msg)
def assertJSONNotEqual(self, raw, expected_data, msg=None):
"""
Asserts that the JSON fragments raw and expected_data are not equal.
Usual JSON non-significant whitespace rules apply as the heavyweight
is delegated to the json library.
"""
try:
data = json.loads(raw)
except ValueError:
self.fail("First argument is not valid JSON: %r" % raw)
if isinstance(expected_data, six.string_types):
try:
expected_data = json.loads(expected_data)
except ValueError:
self.fail("Second argument is not valid JSON: %r" % expected_data)
self.assertNotEqual(data, expected_data, msg=msg)
def assertXMLEqual(self, xml1, xml2, msg=None):
"""
Asserts that two XML snippets are semantically the same.
Whitespace in most cases is ignored, and attribute ordering is not
significant. The passed-in arguments must be valid XML.
"""
try:
result = compare_xml(xml1, xml2)
except Exception as e:
standardMsg = 'First or second argument is not valid XML\n%s' % e
self.fail(self._formatMessage(msg, standardMsg))
else:
if not result:
standardMsg = '%s != %s' % (safe_repr(xml1, True), safe_repr(xml2, True))
self.fail(self._formatMessage(msg, standardMsg))
def assertXMLNotEqual(self, xml1, xml2, msg=None):
"""
Asserts that two XML snippets are not semantically equivalent.
Whitespace in most cases is ignored, and attribute ordering is not
significant. The passed-in arguments must be valid XML.
"""
try:
result = compare_xml(xml1, xml2)
except Exception as e:
standardMsg = 'First or second argument is not valid XML\n%s' % e
self.fail(self._formatMessage(msg, standardMsg))
else:
if result:
standardMsg = '%s == %s' % (safe_repr(xml1, True), safe_repr(xml2, True))
self.fail(self._formatMessage(msg, standardMsg))
class TransactionTestCase(SimpleTestCase):
# Subclasses can ask for resetting of auto increment sequence before each
# test case
reset_sequences = False
# Subclasses can enable only a subset of apps for faster tests
available_apps = None
# Subclasses can define fixtures which will be automatically installed.
fixtures = None
# If transactions aren't available, Django will serialize the database
# contents into a fixture during setup and flush and reload them
# during teardown (as flush does not restore data from migrations).
# This can be slow; this flag allows enabling on a per-case basis.
serialized_rollback = False
# Since tests will be wrapped in a transaction, or serialized if they
# are not available, we allow queries to be run.
allow_database_queries = True
def _pre_setup(self):
"""Performs any pre-test setup. This includes:
* If the class has an 'available_apps' attribute, restricting the app
registry to these applications, then firing post_migrate -- it must
run with the correct set of applications for the test case.
* If the class has a 'fixtures' attribute, installing these fixtures.
"""
super(TransactionTestCase, self)._pre_setup()
if self.available_apps is not None:
apps.set_available_apps(self.available_apps)
setting_changed.send(sender=settings._wrapped.__class__,
setting='INSTALLED_APPS',
value=self.available_apps,
enter=True)
for db_name in self._databases_names(include_mirrors=False):
emit_post_migrate_signal(verbosity=0, interactive=False, db=db_name)
try:
self._fixture_setup()
except Exception:
if self.available_apps is not None:
apps.unset_available_apps()
setting_changed.send(sender=settings._wrapped.__class__,
setting='INSTALLED_APPS',
value=settings.INSTALLED_APPS,
enter=False)
raise
@classmethod
def _databases_names(cls, include_mirrors=True):
# If the test case has a multi_db=True flag, act on all databases,
# including mirrors or not. Otherwise, just on the default DB.
if getattr(cls, 'multi_db', False):
return [alias for alias in connections
if include_mirrors or not connections[alias].settings_dict['TEST']['MIRROR']]
else:
return [DEFAULT_DB_ALIAS]
def _reset_sequences(self, db_name):
conn = connections[db_name]
if conn.features.supports_sequence_reset:
sql_list = conn.ops.sequence_reset_by_name_sql(
no_style(), conn.introspection.sequence_list())
if sql_list:
with transaction.atomic(using=db_name):
cursor = conn.cursor()
for sql in sql_list:
cursor.execute(sql)
def _fixture_setup(self):
for db_name in self._databases_names(include_mirrors=False):
# Reset sequences
if self.reset_sequences:
self._reset_sequences(db_name)
# If we need to provide replica initial data from migrated apps,
# then do so.
if self.serialized_rollback and hasattr(connections[db_name], "_test_serialized_contents"):
if self.available_apps is not None:
apps.unset_available_apps()
connections[db_name].creation.deserialize_db_from_string(
connections[db_name]._test_serialized_contents
)
if self.available_apps is not None:
apps.set_available_apps(self.available_apps)
if self.fixtures:
# We have to use this slightly awkward syntax due to the fact
# that we're using *args and **kwargs together.
call_command('loaddata', *self.fixtures,
**{'verbosity': 0, 'database': db_name})
def _should_reload_connections(self):
return True
def _post_teardown(self):
"""Performs any post-test things. This includes:
* Flushing the contents of the database, to leave a clean slate. If
the class has an 'available_apps' attribute, post_migrate isn't fired.
* Force-closing the connection, so the next test gets a clean cursor.
"""
try:
self._fixture_teardown()
super(TransactionTestCase, self)._post_teardown()
if self._should_reload_connections():
# Some DB cursors include SQL statements as part of cursor
# creation. If you have a test that does a rollback, the effect
# of these statements is lost, which can affect the operation of
# tests (e.g., losing a timezone setting causing objects to be
# created with the wrong time). To make sure this doesn't
# happen, get a clean connection at the start of every test.
for conn in connections.all():
conn.close()
finally:
if self.available_apps is not None:
apps.unset_available_apps()
setting_changed.send(sender=settings._wrapped.__class__,
setting='INSTALLED_APPS',
value=settings.INSTALLED_APPS,
enter=False)
def _fixture_teardown(self):
# Allow TRUNCATE ... CASCADE and don't emit the post_migrate signal
# when flushing only a subset of the apps
for db_name in self._databases_names(include_mirrors=False):
# Flush the database
call_command('flush', verbosity=0, interactive=False,
database=db_name, reset_sequences=False,
allow_cascade=self.available_apps is not None,
inhibit_post_migrate=self.available_apps is not None)
def assertQuerysetEqual(self, qs, values, transform=repr, ordered=True, msg=None):
items = six.moves.map(transform, qs)
if not ordered:
return self.assertEqual(Counter(items), Counter(values), msg=msg)
values = list(values)
# For example qs.iterator() could be passed as qs, but it does not
# have 'ordered' attribute.
if len(values) > 1 and hasattr(qs, 'ordered') and not qs.ordered:
raise ValueError("Trying to compare non-ordered queryset "
"against more than one ordered values")
return self.assertEqual(list(items), values, msg=msg)
def assertNumQueries(self, num, func=None, *args, **kwargs):
using = kwargs.pop("using", DEFAULT_DB_ALIAS)
conn = connections[using]
context = _AssertNumQueriesContext(self, num, conn)
if func is None:
return context
with context:
func(*args, **kwargs)
def connections_support_transactions():
"""
Returns True if all connections support transactions.
"""
return all(conn.features.supports_transactions
for conn in connections.all())
class TestCase(TransactionTestCase):
"""
Similar to TransactionTestCase, but uses `transaction.atomic()` to achieve
test isolation.
In most situation, TestCase should be prefered to TransactionTestCase as
it allows faster execution. However, there are some situations where using
TransactionTestCase might be necessary (e.g. testing some transactional
behavior).
On database backends with no transaction support, TestCase behaves as
TransactionTestCase.
"""
@classmethod
def _enter_atomics(cls):
"""Helper method to open atomic blocks for multiple databases"""
atomics = {}
for db_name in cls._databases_names():
atomics[db_name] = transaction.atomic(using=db_name)
atomics[db_name].__enter__()
return atomics
@classmethod
def _rollback_atomics(cls, atomics):
"""Rollback atomic blocks opened through the previous method"""
for db_name in reversed(cls._databases_names()):
transaction.set_rollback(True, using=db_name)
atomics[db_name].__exit__(None, None, None)
@classmethod
def setUpClass(cls):
super(TestCase, cls).setUpClass()
if not connections_support_transactions():
return
cls.cls_atomics = cls._enter_atomics()
if cls.fixtures:
for db_name in cls._databases_names(include_mirrors=False):
try:
call_command('loaddata', *cls.fixtures, **{
'verbosity': 0,
'commit': False,
'database': db_name,
})
except Exception:
cls._rollback_atomics(cls.cls_atomics)
raise
cls.setUpTestData()
@classmethod
def tearDownClass(cls):
if connections_support_transactions():
cls._rollback_atomics(cls.cls_atomics)
for conn in connections.all():
conn.close()
super(TestCase, cls).tearDownClass()
@classmethod
def setUpTestData(cls):
"""Load initial data for the TestCase"""
pass
def _should_reload_connections(self):
if connections_support_transactions():
return False
return super(TestCase, self)._should_reload_connections()
def _fixture_setup(self):
if not connections_support_transactions():
# If the backend does not support transactions, we should reload
# class data before each test
self.setUpTestData()
return super(TestCase, self)._fixture_setup()
assert not self.reset_sequences, 'reset_sequences cannot be used on TestCase instances'
self.atomics = self._enter_atomics()
def _fixture_teardown(self):
if not connections_support_transactions():
return super(TestCase, self)._fixture_teardown()
self._rollback_atomics(self.atomics)
class CheckCondition(object):
"""Descriptor class for deferred condition checking"""
def __init__(self, cond_func):
self.cond_func = cond_func
def __get__(self, obj, objtype):
return self.cond_func()
def _deferredSkip(condition, reason):
def decorator(test_func):
if not (isinstance(test_func, type) and
issubclass(test_func, unittest.TestCase)):
@wraps(test_func)
def skip_wrapper(*args, **kwargs):
if condition():
raise unittest.SkipTest(reason)
return test_func(*args, **kwargs)
test_item = skip_wrapper
else:
# Assume a class is decorated
test_item = test_func
test_item.__unittest_skip__ = CheckCondition(condition)
test_item.__unittest_skip_why__ = reason
return test_item
return decorator
def skipIfDBFeature(*features):
"""
Skip a test if a database has at least one of the named features.
"""
return _deferredSkip(
lambda: any(getattr(connection.features, feature, False) for feature in features),
"Database has feature(s) %s" % ", ".join(features)
)
def skipUnlessDBFeature(*features):
"""
Skip a test unless a database has all the named features.
"""
return _deferredSkip(
lambda: not all(getattr(connection.features, feature, False) for feature in features),
"Database doesn't support feature(s): %s" % ", ".join(features)
)
def skipUnlessAnyDBFeature(*features):
"""
Skip a test unless a database has any of the named features.
"""
return _deferredSkip(
lambda: not any(getattr(connection.features, feature, False) for feature in features),
"Database doesn't support any of the feature(s): %s" % ", ".join(features)
)
class QuietWSGIRequestHandler(WSGIRequestHandler):
"""
Just a regular WSGIRequestHandler except it doesn't log to the standard
output any of the requests received, so as to not clutter the output for
the tests' results.
"""
def log_message(*args):
pass
class FSFilesHandler(WSGIHandler):
"""
WSGI middleware that intercepts calls to a directory, as defined by one of
the *_ROOT settings, and serves those files, publishing them under *_URL.
"""
def __init__(self, application):
self.application = application
self.base_url = urlparse(self.get_base_url())
super(FSFilesHandler, self).__init__()
def _should_handle(self, path):
"""
Checks if the path should be handled. Ignores the path if:
* the host is provided as part of the base_url
* the request's path isn't under the media path (or equal)
"""
return path.startswith(self.base_url[2]) and not self.base_url[1]
def file_path(self, url):
"""
Returns the relative path to the file on disk for the given URL.
"""
relative_url = url[len(self.base_url[2]):]
return url2pathname(relative_url)
def get_response(self, request):
from django.http import Http404
if self._should_handle(request.path):
try:
return self.serve(request)
except Http404:
pass
return super(FSFilesHandler, self).get_response(request)
def serve(self, request):
os_rel_path = self.file_path(request.path)
os_rel_path = posixpath.normpath(unquote(os_rel_path))
# Emulate behavior of django.contrib.staticfiles.views.serve() when it
# invokes staticfiles' finders functionality.
# TODO: Modify if/when that internal API is refactored
final_rel_path = os_rel_path.replace('\\', '/').lstrip('/')
return serve(request, final_rel_path, document_root=self.get_base_dir())
def __call__(self, environ, start_response):
if not self._should_handle(get_path_info(environ)):
return self.application(environ, start_response)
return super(FSFilesHandler, self).__call__(environ, start_response)
class _StaticFilesHandler(FSFilesHandler):
"""
Handler for serving static files. A private class that is meant to be used
solely as a convenience by LiveServerThread.
"""
def get_base_dir(self):
return settings.STATIC_ROOT
def get_base_url(self):
return settings.STATIC_URL
class _MediaFilesHandler(FSFilesHandler):
"""
Handler for serving the media files. A private class that is meant to be
used solely as a convenience by LiveServerThread.
"""
def get_base_dir(self):
return settings.MEDIA_ROOT
def get_base_url(self):
return settings.MEDIA_URL
class LiveServerThread(threading.Thread):
"""
Thread for running a live http server while the tests are running.
"""
def __init__(self, host, possible_ports, static_handler, connections_override=None):
self.host = host
self.port = None
self.possible_ports = possible_ports
self.is_ready = threading.Event()
self.error = None
self.static_handler = static_handler
self.connections_override = connections_override
super(LiveServerThread, self).__init__()
def run(self):
"""
Sets up the live server and databases, and then loops over handling
http requests.
"""
if self.connections_override:
# Override this thread's database connections with the ones
# provided by the main thread.
for alias, conn in self.connections_override.items():
connections[alias] = conn
try:
# Create the handler for serving static and media files
handler = self.static_handler(_MediaFilesHandler(WSGIHandler()))
# Go through the list of possible ports, hoping that we can find
# one that is free to use for the WSGI server.
for index, port in enumerate(self.possible_ports):
try:
self.httpd = WSGIServer(
(self.host, port), QuietWSGIRequestHandler)
except socket.error as e:
if (index + 1 < len(self.possible_ports) and
e.errno == errno.EADDRINUSE):
# This port is already in use, so we go on and try with
# the next one in the list.
continue
else:
# Either none of the given ports are free or the error
# is something else than "Address already in use". So
# we let that error bubble up to the main thread.
raise
else:
# A free port was found.
self.port = port
break
self.httpd.set_app(handler)
self.is_ready.set()
self.httpd.serve_forever()
except Exception as e:
self.error = e
self.is_ready.set()
def terminate(self):
if hasattr(self, 'httpd'):
# Stop the WSGI server
self.httpd.shutdown()
self.httpd.server_close()
class LiveServerTestCase(TransactionTestCase):
"""
Does basically the same as TransactionTestCase but also launches a live
http server in a separate thread so that the tests may use another testing
framework, such as Selenium for example, instead of the built-in dummy
client.
Note that it inherits from TransactionTestCase instead of TestCase because
the threads do not share the same transactions (unless if using in-memory
sqlite) and each thread needs to commit all their transactions so that the
other thread can see the changes.
"""
static_handler = _StaticFilesHandler
@classproperty
def live_server_url(cls):
return 'http://%s:%s' % (
cls.server_thread.host, cls.server_thread.port)
@classmethod
def setUpClass(cls):
super(LiveServerTestCase, cls).setUpClass()
connections_override = {}
for conn in connections.all():
# If using in-memory sqlite databases, pass the connections to
# the server thread.
if conn.vendor == 'sqlite' and conn.is_in_memory_db(conn.settings_dict['NAME']):
# Explicitly enable thread-shareability for this connection
conn.allow_thread_sharing = True
connections_override[conn.alias] = conn
# Launch the live server's thread
specified_address = os.environ.get(
'DJANGO_LIVE_TEST_SERVER_ADDRESS', 'localhost:8081')
# The specified ports may be of the form '8000-8010,8080,9200-9300'
# i.e. a comma-separated list of ports or ranges of ports, so we break
# it down into a detailed list of all possible ports.
possible_ports = []
try:
host, port_ranges = specified_address.split(':')
for port_range in port_ranges.split(','):
# A port range can be of either form: '8000' or '8000-8010'.
extremes = list(map(int, port_range.split('-')))
assert len(extremes) in [1, 2]
if len(extremes) == 1:
# Port range of the form '8000'
possible_ports.append(extremes[0])
else:
# Port range of the form '8000-8010'
for port in range(extremes[0], extremes[1] + 1):
possible_ports.append(port)
except Exception:
msg = 'Invalid address ("%s") for live server.' % specified_address
six.reraise(ImproperlyConfigured, ImproperlyConfigured(msg), sys.exc_info()[2])
cls.server_thread = LiveServerThread(host, possible_ports,
cls.static_handler,
connections_override=connections_override)
cls.server_thread.daemon = True
cls.server_thread.start()
# Wait for the live server to be ready
cls.server_thread.is_ready.wait()
if cls.server_thread.error:
# Clean up behind ourselves, since tearDownClass won't get called in
# case of errors.
cls._tearDownClassInternal()
raise cls.server_thread.error
@classmethod
def _tearDownClassInternal(cls):
# There may not be a 'server_thread' attribute if setUpClass() for some
# reasons has raised an exception.
if hasattr(cls, 'server_thread'):
# Terminate the live server's thread
cls.server_thread.terminate()
cls.server_thread.join()
# Restore sqlite in-memory database connections' non-shareability
for conn in connections.all():
if conn.vendor == 'sqlite' and conn.is_in_memory_db(conn.settings_dict['NAME']):
conn.allow_thread_sharing = False
@classmethod
def tearDownClass(cls):
cls._tearDownClassInternal()
super(LiveServerTestCase, cls).tearDownClass()
| bsd-3-clause |
tomlanyon/dnspython | tests/test_grange.py | 4 | 2916 | # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import sys
sys.path.insert(0, '../')
try:
import unittest2 as unittest
except ImportError:
import unittest
import dns
import dns.exception
import dns.grange
class GRangeTestCase(unittest.TestCase):
def testFromText1(self):
start, stop, step = dns.grange.from_text('1-1')
self.assertEqual(start, 1)
self.assertEqual(stop, 1)
self.assertEqual(step, 1)
def testFromText2(self):
start, stop, step = dns.grange.from_text('1-4')
self.assertEqual(start, 1)
self.assertEqual(stop, 4)
self.assertEqual(step, 1)
def testFromText3(self):
start, stop, step = dns.grange.from_text('4-255')
self.assertEqual(start, 4)
self.assertEqual(stop, 255)
self.assertEqual(step, 1)
def testFromText4(self):
start, stop, step = dns.grange.from_text('1-1/1')
self.assertEqual(start, 1)
self.assertEqual(stop, 1)
self.assertEqual(step, 1)
def testFromText5(self):
start, stop, step = dns.grange.from_text('1-4/2')
self.assertEqual(start, 1)
self.assertEqual(stop, 4)
self.assertEqual(step, 2)
def testFromText6(self):
start, stop, step = dns.grange.from_text('4-255/77')
self.assertEqual(start, 4)
self.assertEqual(stop, 255)
self.assertEqual(step, 77)
def testFailFromText1(self):
def bad():
start = 2
stop = 1
step = 1
dns.grange.from_text('%d-%d/%d' % (start, stop, step))
self.assertRaises(AssertionError, bad)
def testFailFromText2(self):
def bad():
start = '-1'
stop = 3
step = 1
dns.grange.from_text('%s-%d/%d' % (start, stop, step))
self.assertRaises(dns.exception.SyntaxError, bad)
def testFailFromText3(self):
def bad():
start = 1
stop = 4
step = '-2'
dns.grange.from_text('%d-%d/%s' % (start, stop, step))
self.assertRaises(dns.exception.SyntaxError, bad)
if __name__ == '__main__':
unittest.main()
| isc |
willingc/oh-mainline | vendor/packages/kombu/kombu/__init__.py | 15 | 3835 | """Messaging library for Python"""
from __future__ import absolute_import
from collections import namedtuple
version_info_t = namedtuple(
'version_info_t', ('major', 'minor', 'micro', 'releaselevel', 'serial'),
)
VERSION = version_info_t(3, 0, 22, '', '')
__version__ = '{0.major}.{0.minor}.{0.micro}{0.releaselevel}'.format(VERSION)
__author__ = 'Ask Solem'
__contact__ = '[email protected]'
__homepage__ = 'http://kombu.readthedocs.org'
__docformat__ = 'restructuredtext en'
# -eof meta-
import os
import sys
if sys.version_info < (2, 6): # pragma: no cover
raise Exception('Kombu 3.1 requires Python versions 2.6 or later.')
STATICA_HACK = True
globals()['kcah_acitats'[::-1].upper()] = False
if STATICA_HACK: # pragma: no cover
# This is never executed, but tricks static analyzers (PyDev, PyCharm,
# pylint, etc.) into knowing the types of these symbols, and what
# they contain.
from kombu.connection import Connection, BrokerConnection # noqa
from kombu.entity import Exchange, Queue, binding # noqa
from kombu.messaging import Consumer, Producer # noqa
from kombu.pools import connections, producers # noqa
from kombu.utils.url import parse_url # noqa
from kombu.common import eventloop, uuid # noqa
from kombu.serialization import ( # noqa
enable_insecure_serializers,
disable_insecure_serializers,
)
# Lazy loading.
# - See werkzeug/__init__.py for the rationale behind this.
from types import ModuleType
all_by_module = {
'kombu.connection': ['Connection', 'BrokerConnection'],
'kombu.entity': ['Exchange', 'Queue', 'binding'],
'kombu.messaging': ['Consumer', 'Producer'],
'kombu.pools': ['connections', 'producers'],
'kombu.utils.url': ['parse_url'],
'kombu.common': ['eventloop', 'uuid'],
'kombu.serialization': ['enable_insecure_serializers',
'disable_insecure_serializers'],
}
object_origins = {}
for module, items in all_by_module.items():
for item in items:
object_origins[item] = module
class module(ModuleType):
def __getattr__(self, name):
if name in object_origins:
module = __import__(object_origins[name], None, None, [name])
for extra_name in all_by_module[module.__name__]:
setattr(self, extra_name, getattr(module, extra_name))
return getattr(module, name)
return ModuleType.__getattribute__(self, name)
def __dir__(self):
result = list(new_module.__all__)
result.extend(('__file__', '__path__', '__doc__', '__all__',
'__docformat__', '__name__', '__path__', 'VERSION',
'__package__', '__version__', '__author__',
'__contact__', '__homepage__', '__docformat__'))
return result
# 2.5 does not define __package__
try:
package = __package__
except NameError: # pragma: no cover
package = 'kombu'
# keep a reference to this module so that it's not garbage collected
old_module = sys.modules[__name__]
new_module = sys.modules[__name__] = module(__name__)
new_module.__dict__.update({
'__file__': __file__,
'__path__': __path__,
'__doc__': __doc__,
'__all__': tuple(object_origins),
'__version__': __version__,
'__author__': __author__,
'__contact__': __contact__,
'__homepage__': __homepage__,
'__docformat__': __docformat__,
'__package__': package,
'version_info_t': version_info_t,
'VERSION': VERSION})
if os.environ.get('KOMBU_LOG_DEBUG'): # pragma: no cover
os.environ.update(KOMBU_LOG_CHANNEL='1', KOMBU_LOG_CONNECTION='1')
from .utils import debug
debug.setup_logging()
| agpl-3.0 |
wkschwartz/django | django/db/models/fields/mixins.py | 40 | 1801 | from django.core import checks
NOT_PROVIDED = object()
class FieldCacheMixin:
"""Provide an API for working with the model's fields value cache."""
def get_cache_name(self):
raise NotImplementedError
def get_cached_value(self, instance, default=NOT_PROVIDED):
cache_name = self.get_cache_name()
try:
return instance._state.fields_cache[cache_name]
except KeyError:
if default is NOT_PROVIDED:
raise
return default
def is_cached(self, instance):
return self.get_cache_name() in instance._state.fields_cache
def set_cached_value(self, instance, value):
instance._state.fields_cache[self.get_cache_name()] = value
def delete_cached_value(self, instance):
del instance._state.fields_cache[self.get_cache_name()]
class CheckFieldDefaultMixin:
_default_hint = ('<valid default>', '<invalid default>')
def _check_default(self):
if self.has_default() and self.default is not None and not callable(self.default):
return [
checks.Warning(
"%s default should be a callable instead of an instance "
"so that it's not shared between all field instances." % (
self.__class__.__name__,
),
hint=(
'Use a callable instead, e.g., use `%s` instead of '
'`%s`.' % self._default_hint
),
obj=self,
id='fields.E010',
)
]
else:
return []
def check(self, **kwargs):
errors = super().check(**kwargs)
errors.extend(self._check_default())
return errors
| bsd-3-clause |
dataxu/ansible | lib/ansible/modules/network/illumos/dladm_vnic.py | 56 | 6606 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Adam Števko <[email protected]>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: dladm_vnic
short_description: Manage VNICs on Solaris/illumos systems.
description:
- Create or delete VNICs on Solaris/illumos systems.
version_added: "2.2"
author: Adam Števko (@xen0l)
options:
name:
description:
- VNIC name.
required: true
link:
description:
- VNIC underlying link name.
required: true
temporary:
description:
- Specifies that the VNIC is temporary. Temporary VNICs
do not persist across reboots.
required: false
default: false
choices: [ "true", "false" ]
mac:
description:
- Sets the VNIC's MAC address. Must be valid unicast MAC address.
required: false
default: false
aliases: [ "macaddr" ]
vlan:
description:
- Enable VLAN tagging for this VNIC. The VLAN tag will have id
I(vlan).
required: false
default: false
aliases: [ "vlan_id" ]
state:
description:
- Create or delete Solaris/illumos VNIC.
required: false
default: "present"
choices: [ "present", "absent" ]
'''
EXAMPLES = '''
# Create 'vnic0' VNIC over 'bnx0' link
- dladm_vnic:
name: vnic0
link: bnx0
state: present
# Create VNIC with specified MAC and VLAN tag over 'aggr0'
- dladm_vnic:
name: vnic1
link: aggr0
mac: '00:00:5E:00:53:23'
vlan: 4
# Remove 'vnic0' VNIC
- dladm_vnic:
name: vnic0
link: bnx0
state: absent
'''
RETURN = '''
name:
description: VNIC name
returned: always
type: string
sample: "vnic0"
link:
description: VNIC underlying link name
returned: always
type: string
sample: "igb0"
state:
description: state of the target
returned: always
type: string
sample: "present"
temporary:
description: VNIC's persistence
returned: always
type: boolean
sample: "True"
mac:
description: MAC address to use for VNIC
returned: if mac is specified
type: string
sample: "00:00:5E:00:53:42"
vlan:
description: VLAN to use for VNIC
returned: success
type: int
sample: 42
'''
import re
from ansible.module_utils.basic import AnsibleModule
class VNIC(object):
UNICAST_MAC_REGEX = r'^[a-f0-9][2-9a-f0]:([a-f0-9]{2}:){4}[a-f0-9]{2}$'
def __init__(self, module):
self.module = module
self.name = module.params['name']
self.link = module.params['link']
self.mac = module.params['mac']
self.vlan = module.params['vlan']
self.temporary = module.params['temporary']
self.state = module.params['state']
def vnic_exists(self):
cmd = [self.module.get_bin_path('dladm', True)]
cmd.append('show-vnic')
cmd.append(self.name)
(rc, _, _) = self.module.run_command(cmd)
if rc == 0:
return True
else:
return False
def create_vnic(self):
cmd = [self.module.get_bin_path('dladm', True)]
cmd.append('create-vnic')
if self.temporary:
cmd.append('-t')
if self.mac:
cmd.append('-m')
cmd.append(self.mac)
if self.vlan:
cmd.append('-v')
cmd.append(self.vlan)
cmd.append('-l')
cmd.append(self.link)
cmd.append(self.name)
return self.module.run_command(cmd)
def delete_vnic(self):
cmd = [self.module.get_bin_path('dladm', True)]
cmd.append('delete-vnic')
if self.temporary:
cmd.append('-t')
cmd.append(self.name)
return self.module.run_command(cmd)
def is_valid_unicast_mac(self):
mac_re = re.match(self.UNICAST_MAC_REGEX, self.mac)
return mac_re is None
def is_valid_vlan_id(self):
return 0 <= self.vlan <= 4095
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True),
link=dict(required=True),
mac=dict(default=None, aliases=['macaddr']),
vlan=dict(default=None, aliases=['vlan_id']),
temporary=dict(default=False, type='bool'),
state=dict(default='present', choices=['absent', 'present']),
),
supports_check_mode=True
)
vnic = VNIC(module)
rc = None
out = ''
err = ''
result = {}
result['name'] = vnic.name
result['link'] = vnic.link
result['state'] = vnic.state
result['temporary'] = vnic.temporary
if vnic.mac is not None:
if vnic.is_valid_unicast_mac():
module.fail_json(msg='Invalid unicast MAC address',
mac=vnic.mac,
name=vnic.name,
state=vnic.state,
link=vnic.link,
vlan=vnic.vlan)
result['mac'] = vnic.mac
if vnic.vlan is not None:
if vnic.is_valid_vlan_id():
module.fail_json(msg='Invalid VLAN tag',
mac=vnic.mac,
name=vnic.name,
state=vnic.state,
link=vnic.link,
vlan=vnic.vlan)
result['vlan'] = vnic.vlan
if vnic.state == 'absent':
if vnic.vnic_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = vnic.delete_vnic()
if rc != 0:
module.fail_json(name=vnic.name, msg=err, rc=rc)
elif vnic.state == 'present':
if not vnic.vnic_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = vnic.create_vnic()
if rc is not None and rc != 0:
module.fail_json(name=vnic.name, msg=err, rc=rc)
if rc is None:
result['changed'] = False
else:
result['changed'] = True
if out:
result['stdout'] = out
if err:
result['stderr'] = err
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
njwilson23/scipy | scipy/stats/mstats_extras.py | 67 | 14221 | """
Additional statistics functions with support for masked arrays.
"""
# Original author (2007): Pierre GF Gerard-Marchant
from __future__ import division, print_function, absolute_import
__all__ = ['compare_medians_ms',
'hdquantiles', 'hdmedian', 'hdquantiles_sd',
'idealfourths',
'median_cihs','mjci','mquantiles_cimj',
'rsh',
'trimmed_mean_ci',]
import numpy as np
from numpy import float_, int_, ndarray
import numpy.ma as ma
from numpy.ma import MaskedArray
from . import mstats_basic as mstats
from scipy.stats.distributions import norm, beta, t, binom
def hdquantiles(data, prob=list([.25,.5,.75]), axis=None, var=False,):
"""
Computes quantile estimates with the Harrell-Davis method.
The quantile estimates are calculated as a weighted linear combination
of order statistics.
Parameters
----------
data : array_like
Data array.
prob : sequence, optional
Sequence of quantiles to compute.
axis : int or None, optional
Axis along which to compute the quantiles. If None, use a flattened
array.
var : bool, optional
Whether to return the variance of the estimate.
Returns
-------
hdquantiles : MaskedArray
A (p,) array of quantiles (if `var` is False), or a (2,p) array of
quantiles and variances (if `var` is True), where ``p`` is the
number of quantiles.
"""
def _hd_1D(data,prob,var):
"Computes the HD quantiles for a 1D array. Returns nan for invalid data."
xsorted = np.squeeze(np.sort(data.compressed().view(ndarray)))
# Don't use length here, in case we have a numpy scalar
n = xsorted.size
hd = np.empty((2,len(prob)), float_)
if n < 2:
hd.flat = np.nan
if var:
return hd
return hd[0]
v = np.arange(n+1) / float(n)
betacdf = beta.cdf
for (i,p) in enumerate(prob):
_w = betacdf(v, (n+1)*p, (n+1)*(1-p))
w = _w[1:] - _w[:-1]
hd_mean = np.dot(w, xsorted)
hd[0,i] = hd_mean
#
hd[1,i] = np.dot(w, (xsorted-hd_mean)**2)
#
hd[0, prob == 0] = xsorted[0]
hd[0, prob == 1] = xsorted[-1]
if var:
hd[1, prob == 0] = hd[1, prob == 1] = np.nan
return hd
return hd[0]
# Initialization & checks
data = ma.array(data, copy=False, dtype=float_)
p = np.array(prob, copy=False, ndmin=1)
# Computes quantiles along axis (or globally)
if (axis is None) or (data.ndim == 1):
result = _hd_1D(data, p, var)
else:
if data.ndim > 2:
raise ValueError("Array 'data' must be at most two dimensional, "
"but got data.ndim = %d" % data.ndim)
result = ma.apply_along_axis(_hd_1D, axis, data, p, var)
return ma.fix_invalid(result, copy=False)
def hdmedian(data, axis=-1, var=False):
"""
Returns the Harrell-Davis estimate of the median along the given axis.
Parameters
----------
data : ndarray
Data array.
axis : int, optional
Axis along which to compute the quantiles. If None, use a flattened
array.
var : bool, optional
Whether to return the variance of the estimate.
"""
result = hdquantiles(data,[0.5], axis=axis, var=var)
return result.squeeze()
def hdquantiles_sd(data, prob=list([.25,.5,.75]), axis=None):
"""
The standard error of the Harrell-Davis quantile estimates by jackknife.
Parameters
----------
data : array_like
Data array.
prob : sequence, optional
Sequence of quantiles to compute.
axis : int, optional
Axis along which to compute the quantiles. If None, use a flattened
array.
Returns
-------
hdquantiles_sd : MaskedArray
Standard error of the Harrell-Davis quantile estimates.
"""
def _hdsd_1D(data,prob):
"Computes the std error for 1D arrays."
xsorted = np.sort(data.compressed())
n = len(xsorted)
#.........
hdsd = np.empty(len(prob), float_)
if n < 2:
hdsd.flat = np.nan
vv = np.arange(n) / float(n-1)
betacdf = beta.cdf
for (i,p) in enumerate(prob):
_w = betacdf(vv, (n+1)*p, (n+1)*(1-p))
w = _w[1:] - _w[:-1]
mx_ = np.fromiter([np.dot(w,xsorted[np.r_[list(range(0,k)),
list(range(k+1,n))].astype(int_)])
for k in range(n)], dtype=float_)
mx_var = np.array(mx_.var(), copy=False, ndmin=1) * n / float(n-1)
hdsd[i] = float(n-1) * np.sqrt(np.diag(mx_var).diagonal() / float(n))
return hdsd
# Initialization & checks
data = ma.array(data, copy=False, dtype=float_)
p = np.array(prob, copy=False, ndmin=1)
# Computes quantiles along axis (or globally)
if (axis is None):
result = _hdsd_1D(data, p)
else:
if data.ndim > 2:
raise ValueError("Array 'data' must be at most two dimensional, "
"but got data.ndim = %d" % data.ndim)
result = ma.apply_along_axis(_hdsd_1D, axis, data, p)
return ma.fix_invalid(result, copy=False).ravel()
def trimmed_mean_ci(data, limits=(0.2,0.2), inclusive=(True,True),
alpha=0.05, axis=None):
"""
Selected confidence interval of the trimmed mean along the given axis.
Parameters
----------
data : array_like
Input data.
limits : {None, tuple}, optional
None or a two item tuple.
Tuple of the percentages to cut on each side of the array, with respect
to the number of unmasked data, as floats between 0. and 1. If ``n``
is the number of unmasked data before trimming, then
(``n * limits[0]``)th smallest data and (``n * limits[1]``)th
largest data are masked. The total number of unmasked data after
trimming is ``n * (1. - sum(limits))``.
The value of one limit can be set to None to indicate an open interval.
Defaults to (0.2, 0.2).
inclusive : (2,) tuple of boolean, optional
If relative==False, tuple indicating whether values exactly equal to
the absolute limits are allowed.
If relative==True, tuple indicating whether the number of data being
masked on each side should be rounded (True) or truncated (False).
Defaults to (True, True).
alpha : float, optional
Confidence level of the intervals.
Defaults to 0.05.
axis : int, optional
Axis along which to cut. If None, uses a flattened version of `data`.
Defaults to None.
Returns
-------
trimmed_mean_ci : (2,) ndarray
The lower and upper confidence intervals of the trimmed data.
"""
data = ma.array(data, copy=False)
trimmed = mstats.trimr(data, limits=limits, inclusive=inclusive, axis=axis)
tmean = trimmed.mean(axis)
tstde = mstats.trimmed_stde(data,limits=limits,inclusive=inclusive,axis=axis)
df = trimmed.count(axis) - 1
tppf = t.ppf(1-alpha/2.,df)
return np.array((tmean - tppf*tstde, tmean+tppf*tstde))
def mjci(data, prob=[0.25,0.5,0.75], axis=None):
"""
Returns the Maritz-Jarrett estimators of the standard error of selected
experimental quantiles of the data.
Parameters
----------
data : ndarray
Data array.
prob : sequence, optional
Sequence of quantiles to compute.
axis : int or None, optional
Axis along which to compute the quantiles. If None, use a flattened
array.
"""
def _mjci_1D(data, p):
data = np.sort(data.compressed())
n = data.size
prob = (np.array(p) * n + 0.5).astype(int_)
betacdf = beta.cdf
mj = np.empty(len(prob), float_)
x = np.arange(1,n+1, dtype=float_) / n
y = x - 1./n
for (i,m) in enumerate(prob):
W = betacdf(x,m-1,n-m) - betacdf(y,m-1,n-m)
C1 = np.dot(W,data)
C2 = np.dot(W,data**2)
mj[i] = np.sqrt(C2 - C1**2)
return mj
data = ma.array(data, copy=False)
if data.ndim > 2:
raise ValueError("Array 'data' must be at most two dimensional, "
"but got data.ndim = %d" % data.ndim)
p = np.array(prob, copy=False, ndmin=1)
# Computes quantiles along axis (or globally)
if (axis is None):
return _mjci_1D(data, p)
else:
return ma.apply_along_axis(_mjci_1D, axis, data, p)
def mquantiles_cimj(data, prob=[0.25,0.50,0.75], alpha=0.05, axis=None):
"""
Computes the alpha confidence interval for the selected quantiles of the
data, with Maritz-Jarrett estimators.
Parameters
----------
data : ndarray
Data array.
prob : sequence, optional
Sequence of quantiles to compute.
alpha : float, optional
Confidence level of the intervals.
axis : int or None, optional
Axis along which to compute the quantiles.
If None, use a flattened array.
"""
alpha = min(alpha, 1-alpha)
z = norm.ppf(1-alpha/2.)
xq = mstats.mquantiles(data, prob, alphap=0, betap=0, axis=axis)
smj = mjci(data, prob, axis=axis)
return (xq - z * smj, xq + z * smj)
def median_cihs(data, alpha=0.05, axis=None):
"""
Computes the alpha-level confidence interval for the median of the data.
Uses the Hettmasperger-Sheather method.
Parameters
----------
data : array_like
Input data. Masked values are discarded. The input should be 1D only,
or `axis` should be set to None.
alpha : float, optional
Confidence level of the intervals.
axis : int or None, optional
Axis along which to compute the quantiles. If None, use a flattened
array.
Returns
-------
median_cihs :
Alpha level confidence interval.
"""
def _cihs_1D(data, alpha):
data = np.sort(data.compressed())
n = len(data)
alpha = min(alpha, 1-alpha)
k = int(binom._ppf(alpha/2., n, 0.5))
gk = binom.cdf(n-k,n,0.5) - binom.cdf(k-1,n,0.5)
if gk < 1-alpha:
k -= 1
gk = binom.cdf(n-k,n,0.5) - binom.cdf(k-1,n,0.5)
gkk = binom.cdf(n-k-1,n,0.5) - binom.cdf(k,n,0.5)
I = (gk - 1 + alpha)/(gk - gkk)
lambd = (n-k) * I / float(k + (n-2*k)*I)
lims = (lambd*data[k] + (1-lambd)*data[k-1],
lambd*data[n-k-1] + (1-lambd)*data[n-k])
return lims
data = ma.rray(data, copy=False)
# Computes quantiles along axis (or globally)
if (axis is None):
result = _cihs_1D(data.compressed(), alpha)
else:
if data.ndim > 2:
raise ValueError("Array 'data' must be at most two dimensional, "
"but got data.ndim = %d" % data.ndim)
result = ma.apply_along_axis(_cihs_1D, axis, data, alpha)
return result
def compare_medians_ms(group_1, group_2, axis=None):
"""
Compares the medians from two independent groups along the given axis.
The comparison is performed using the McKean-Schrader estimate of the
standard error of the medians.
Parameters
----------
group_1 : array_like
First dataset.
group_2 : array_like
Second dataset.
axis : int, optional
Axis along which the medians are estimated. If None, the arrays are
flattened. If `axis` is not None, then `group_1` and `group_2`
should have the same shape.
Returns
-------
compare_medians_ms : {float, ndarray}
If `axis` is None, then returns a float, otherwise returns a 1-D
ndarray of floats with a length equal to the length of `group_1`
along `axis`.
"""
(med_1, med_2) = (ma.median(group_1,axis=axis), ma.median(group_2,axis=axis))
(std_1, std_2) = (mstats.stde_median(group_1, axis=axis),
mstats.stde_median(group_2, axis=axis))
W = np.abs(med_1 - med_2) / ma.sqrt(std_1**2 + std_2**2)
return 1 - norm.cdf(W)
def idealfourths(data, axis=None):
"""
Returns an estimate of the lower and upper quartiles.
Uses the ideal fourths algorithm.
Parameters
----------
data : array_like
Input array.
axis : int, optional
Axis along which the quartiles are estimated. If None, the arrays are
flattened.
Returns
-------
idealfourths : {list of floats, masked array}
Returns the two internal values that divide `data` into four parts
using the ideal fourths algorithm either along the flattened array
(if `axis` is None) or along `axis` of `data`.
"""
def _idf(data):
x = data.compressed()
n = len(x)
if n < 3:
return [np.nan,np.nan]
(j,h) = divmod(n/4. + 5/12.,1)
j = int(j)
qlo = (1-h)*x[j-1] + h*x[j]
k = n - j
qup = (1-h)*x[k] + h*x[k-1]
return [qlo, qup]
data = ma.sort(data, axis=axis).view(MaskedArray)
if (axis is None):
return _idf(data)
else:
return ma.apply_along_axis(_idf, axis, data)
def rsh(data, points=None):
"""
Evaluates Rosenblatt's shifted histogram estimators for each point
on the dataset 'data'.
Parameters
----------
data : sequence
Input data. Masked values are ignored.
points : sequence or None, optional
Sequence of points where to evaluate Rosenblatt shifted histogram.
If None, use the data.
"""
data = ma.array(data, copy=False)
if points is None:
points = data
else:
points = np.array(points, copy=False, ndmin=1)
if data.ndim != 1:
raise AttributeError("The input array should be 1D only !")
n = data.count()
r = idealfourths(data, axis=None)
h = 1.2 * (r[-1]-r[0]) / n**(1./5)
nhi = (data[:,None] <= points[None,:] + h).sum(0)
nlo = (data[:,None] < points[None,:] - h).sum(0)
return (nhi-nlo) / (2.*n*h)
| bsd-3-clause |
beernarrd/gramps | gramps/gen/filters/rules/citation/_hasnote.py | 6 | 1806 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2002-2007 Donald N. Allingham
# Copyright (C) 2007-2008 Brian G. Matherly
# Copyright (C) 2008 Jerome Rapinat
# Copyright (C) 2008 Benny Malengier
# Copyright (C) 2011 Tim G L Lyons
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from .._hasnotebase import HasNoteBase
#-------------------------------------------------------------------------
# "Sources having notes"
#-------------------------------------------------------------------------
class HasNote(HasNoteBase):
"""Citations having notes"""
name = _('Citations having <count> notes')
description = _("Matches citations having a certain number of notes")
| gpl-2.0 |
seanjohnson20/BudgetBuddy | routes.py | 1 | 18267 | # coding: utf-8
## u.t.f.-8 line included so Jinja templates can handle '%.02f' in them.
from flask import Flask, render_template, request, g, session, flash, \
redirect, url_for, abort
from database import db_session, engine
from models import User, Accounts, Categories, Transactions, Goals
from flask import Flask
from flask.ext.mail import Message
from config import *
from forms import SignupForm, SigninForm, TransactionForm, AccountForm, CategoryForm, GoalForm, EditAcctForm, EditCatForm, EditGoalForm, EditTransForm
from functools import wraps
from sqlalchemy import func
from datetime import date, datetime
today = date.today()
app = Flask(__name__)
app.config.from_object('config')
## -----------------------No Login Required ------------------------------ ##
@app.route('/signup', methods=['GET', 'POST'])
def signup():
form = SignupForm()
if 'email' in session:
return redirect(url_for('profile'))
if request.method == 'POST':
if form.validate() == False:
return render_template('signup.html', form=form)
else:
newuser = User(form.firstname.data, form.lastname.data, form.email.data, form.password.data)
db_session.add(newuser)
db_session.commit()
session['email'] = newuser.email
flash('You have successfully signed up and you are logged in')
print (str(session['email']),'has been successfully signed up and logged in')
return redirect(url_for('home'))
elif request.method == 'GET':
print('Guest is on signup page.')
return render_template('signup.html', form=form)
@app.route('/signin', methods=['GET', 'POST'])
def signin():
form = SigninForm()
if request.method == 'POST':
if form.validate() == False:
return render_template('signin.html', form=form)
else:
session['email'] = form.email.data
flash('You are logged in')
print (str(session['email']),'is logged in')
return redirect(url_for('home'))
elif request.method == 'GET':
return render_template('signin.html', form=form)
@app.route("/")
def index():
if 'email' in session:
print (str(session['email']),'is on Index')
else:
print('Guest is on Index')
return render_template('index.html')
# End User - Terms of Use
@app.route("/terms/")
def terms():
if 'email' in session:
print (str(session['email']),'is on Terms.html')
else:
print('Guest is on Terms.html')
return render_template('terms.html')
@app.route("/about/")
def about():
if 'email' in session:
print (str(session['email']),'is on About')
else:
print('Guest is on About')
return render_template('about.html')
@app.route("/help/")
def help():
if 'email' in session:
print (str(session['email']),'is on Help')
else:
print('Guest is on Help')
return render_template('help.html')
## -----------------------Login Required ------------------------------ ##
def login_required(test):
@wraps(test)
def wrap(*args, **kwargs):
if 'email' in session:
return test(*args, **kwargs)
else:
flash('You need to login first')
print('Guest: You need to login first')
return redirect(url_for('signin'))
return wrap
@app.route("/home/")
@login_required
def home():
#goals by session user
con=engine.connect()
goals = con.execute("""select id, email, category, description, target, round(amount/((julianday(target)-julianday('now'))/30),2) as 'monthly', round((((julianday(target)-julianday(datetime('now')))/30)),1) as 'months',
round((julianday(target)-julianday(datetime('now'))),0) as 'days', round(goals.amount,0) as 'amount'
from goals
where email=:param
order by category""", {"param":session['email']} )
progress = con.execute("""select goals.category, goals.description, ifnull(round(sum(transactions.amount),2), 0) as 'sum', round(((julianday(target)-julianday(datetime('now')))/30),1) as 'months', round((julianday(target)-julianday(datetime('now'))),0) as 'days',
round(goals.amount,2) as 'goal', ifnull((sum(transactions.amount)/goals.amount*100),0) as 'progress'
from goals LEFT JOIN transactions on goals.category=transactions.category and goals.description=transactions.goal
and goals.email=transactions.email
where goals.email=:param
and goals.amount is not null
group by goals.category, goals.description
order by goals.category""", {"param":session['email']} )
#transactions by session user
transactions = con.execute("select * from transactions where email=:param order by trans_date desc", {"param":session['email']} )
#accounts by session user
accounts = con.execute("select accounts.id, accounts.name, ifnull(round(sum(transactions.amount),2),0) as 'sum' from accounts left join transactions on accounts.name=transactions.account and accounts.email=transactions.email where accounts.email=:param group by accounts.name order by accounts.name", {"param":session['email']} )
#categories by session user
categories = con.execute("select categories.id, categories.name, ifnull(round(sum(transactions.amount),2),0) as 'sum' from categories left join transactions on categories.name=transactions.category and categories.email=transactions.email where categories.email=:param group by categories.name order by categories.name", {"param":session['email']} )
print (str(session['email']),'is on Home')
return render_template('home.html', accounts=accounts, transactions=transactions, categories=categories, goals=goals, progress=progress, today=today)
@app.route("/add_goal/", methods=['GET', 'POST'])
@login_required
def add_goal():
print (str(session['email']),'is on add_goal')
email = session['email']
form = GoalForm()
form.category.choices = [(c.name,c.name) for c in Categories.query.filter_by(email=email)]
if request.method == 'POST':
if form.validate() == False:
print ("validate failed: ",session['email'], form.category.data, form.target.data, form.description.data, form.amount.data)
return render_template('addgoal.html', form=form)
else:
newgoal = Goals(str(session['email']), str(form.category.data), str(form.target.data), str(form.description.data), float(form.amount.data))
print ("validated goal data: ",session['email'], form.category.data, form.target.data, form.description.data, form.amount.data)
db_session.add(newgoal)
db_session.commit()
flash('You created a new goal.')
print (str(session['email']),'has successfully added a new goal')
return redirect('/home/')
print ("submit failed: ",session['email'], form.category.data, form.target.data, form.description.data, form.amount.data)
return render_template('addgoal.html', form=form)
"""
For each of the following (Acct, Cat, Goal, Trans) I have an Add Route which is self-explanatory,
an Edit Route, which I using expressly to pass an obj ID into a form, and a Mod Route,
which receives the populated form and updates the database on submit.
This seems kludgy to me, but it's the only way I could find to pass the ID and object appropriately.
"""
@app.route('/edit_goal/<int:id>/')
@login_required
def edit_goal(id):
email = session['email']
print 'Edit_goal says ID = ', id
print (str(session['email']),'is on edit_goal')
goal = Goals.query.get(id)
form = EditGoalForm(obj=goal)
form.category.choices = [(c.name,c.name) for c in Categories.query.filter_by(email=email)]
return render_template('editgoal.html', form=form)
@app.route("/mod_goal/", methods=['GET', 'POST'])
@login_required
def mod_goal():
email = session['email']
print (str(session['email']),'is on mod_goal')
form = EditGoalForm()
if request.method == 'POST':
db_session.query(Goals).filter_by(id = form.id.data).\
update({"category":form.category.data, "target":form.target.data, "description":form.description.data, "amount":form.amount.data}, synchronize_session=False)
db_session.commit()
flash('You modified a goal.')
print (str(session['email']),'has successfully modified a goal')
return redirect(url_for('home'))
return render_template('editgoal.html', form=form)
@app.route("/add_trans/", methods=['GET', 'POST'])
@login_required
def add_trans():
print (str(session['email']),'is on add_trans')
email = session['email']
form = TransactionForm()
form.account.choices = [(a.name,a.name) for a in Accounts.query.filter_by(email=email).order_by(Accounts.name)]
form.category.choices = [(c.name,c.name) for c in Categories.query.filter_by(email=email).order_by(Categories.name)]
form.goal.choices = [(g.description,g.description) for g in Goals.query.filter_by(email=email).order_by(Goals.description)]
if request.method == 'POST':
if form.validate() == False:
print ("validate failed: ",session['email'], form.account.data, form.category.data, form.notes.data, form.amount.data)
return render_template('addtrans.html', form=form)
else:
newtrans = Transactions(str(session['email']), str(form.account.data), str(form.category.data), str(form.goal.data), today, str(form.notes.data), float(form.amount.data))
print ("validated Transaction data: ", session['email'], form.account.data, form.category.data, str(today), form.notes.data, form.amount.data)
db_session.add(newtrans)
db_session.commit()
flash('You created a new transaction.')
print (str(session['email']),'has successfully added a new transaction')
return redirect('/home/',)
print ("submit failed: ",session['email'], form.account.data, form.category.data, today, form.notes.data, form.amount.data)
return render_template('addtrans.html', form=form)
@app.route('/edit_trans/<int:id>/')
@login_required
def edit_trans(id):
email = session['email']
print 'Edit_Trans says ID = ', id
print (str(session['email']),'is on edit_trans')
acct = Transactions.query.get(id)
form = EditTransForm(obj=acct)
form.account.choices = [(a.name,a.name) for a in Accounts.query.filter_by(email=email).order_by(Accounts.name)]
form.category.choices = [(c.name,c.name) for c in Categories.query.filter_by(email=email).order_by(Categories.name)]
form.goal.choices = [(g.description,g.description) for g in Goals.query.filter_by(email=email).order_by(Goals.description)]
return render_template('edittrans.html', form=form)
@app.route("/mod_trans/", methods=['GET', 'POST'])
@login_required
def mod_trans():
email = session['email']
print (str(session['email']),'is on mod_trans')
form = EditTransForm()
if request.method == 'POST':
db_session.query(Transactions).filter_by(id = form.id.data).\
update({"account":form.account.data, "category":form.category.data, "goal":form.goal.data, "notes":form.notes.data, "amount":form.amount.data}, synchronize_session=False)
db_session.commit()
flash('You modified a transaction.')
print (str(session['email']),'has successfully modified a transaction')
return redirect(url_for('home'))
return render_template('edittrans.html', form=form)
@app.route("/add_acct/", methods=['GET', 'POST'])
@login_required
def add_acct():
print (str(session['email']),'is on add_acct')
form = AccountForm()
if request.method == 'POST':
if form.validate() == False:
return render_template('addacct.html', form=form)
else:
newacct = Accounts(form.name.data, session['email'])
db_session.add(newacct)
db_session.commit()
flash('You created a new account.')
print (str(session['email']),'has successfully added a new account')
return redirect(url_for('home/'))
elif request.method == 'GET':
print (str(session['email']),'is still on add_acct')
return render_template('addacct.html', form=form)
@app.route('/edit_acct/<int:id>/')
@login_required
def edit_acct(id):
print 'Edit_Acct says ID = ', id
print (str(session['email']),'is on edit_acct')
acct = Accounts.query.get(id)
form = EditAcctForm(obj=acct)
return render_template('editacct.html', form=form)
@app.route("/mod_acct/", methods=['GET', 'POST'])
@login_required
def mod_acct():
print (str(session['email']),'is on mod_acct')
form = EditAcctForm()
if request.method == 'POST':
db_session.query(Accounts).filter_by(id = form.id.data).\
update({"name":form.name.data}, synchronize_session=False)
db_session.commit()
flash('You modified an account.')
print (str(session['email']),'has successfully modified an account')
return redirect(url_for('home'))
return render_template('editacct.html', form=form)
@app.route("/add_cat/", methods=['GET', 'POST'])
@login_required
def add_cat():
print (str(session['email']),'is on add_cat')
form = CategoryForm()
if request.method == 'POST':
if form.validate() == False:
return render_template('addcat.html', form=form)
else:
newcat = Categories(form.name.data, session['email'])
db_session.add(newcat)
db_session.commit()
flash('You created a new category.')
print (str(session['email']),'has successfully<BR>added a new category')
return redirect(url_for('home'))
elif request.method == 'GET':
print (str(session['email']),'is still on add_cat')
return render_template('addcat.html', form=form)
@app.route('/edit_cat/<int:id>/')
@login_required
def edit_cat(id):
print 'Edit_Cat says ID = ', id
print (str(session['email']),'is on edit_cat')
cat = Categories.query.get(id)
form = EditCatForm(obj=cat)
return render_template('editcat.html', form=form)
@app.route("/mod_cat/", methods=['GET', 'POST'])
@login_required
def mod_cat():
print (str(session['email']),'is on mod_acct')
form = EditAcctForm()
if request.method == 'POST':
db_session.query(Categories).filter_by(id = form.id.data).\
update({"name":form.name.data}, synchronize_session=False)
db_session.commit()
flash('You modified a category.')
print (str(session['email']),'has successfully modified a category')
return redirect(url_for('home'))
return render_template('editcat.html', form=form)
# Delete transactions:
@app.route('/delete_trans/<int:id>/',)
@login_required
def delete_trans(id):
con=engine.connect()
con.execute('delete from transactions where id =:id', {"id":id} )
flash('The transaction was deleted.')
print (str(session['email']), 'deleted transaction ID: ', id)
return redirect(url_for('home'))
# Delete account:
@app.route('/delete_acct/<int:id>/',)
@login_required
def delete_acct(id):
con=engine.connect()
con.execute('delete from accounts where id =:id', {"id":id} )
flash('The account was deleted.')
print (str(session['email']), 'deleted account ID: ', id)
return redirect(url_for('home'))
# Delete category:
@app.route('/delete_cat/<int:id>/',)
@login_required
def delete_cat(id):
con=engine.connect()
con.execute('delete from categories where id =:id', {"id":id} )
flash('The category was deleted.')
print (str(session['email']), 'deleted category ID: ',id)
return redirect(url_for('home'))
# Delete goal:
@app.route('/delete_goal/<int:id>/',)
@login_required
def delete_goal(id):
con=engine.connect()
con.execute('delete from goals where id =:id', {"id":id} )
flash('The goal was deleted.')
print (str(session['email']), 'deleted goal ID: ',id)
return redirect(url_for('home'))
@app.route("/profile/")
@login_required
def profile():
if 'email' in session:
print (str(session['email']),'is on Profile')
else:
print('Guest is on Profile ... and this should never happen')
usr = str(session['email'])
#transactions by session user
transactions = db_session.query(Transactions).filter(Transactions.email==usr).all()
#accounts by session user
accounts = db_session.query(Accounts).filter(Accounts.email==usr).all()
#categories by session user
categories = db_session.query(Categories).filter(Categories.email==usr).all()
cat_cnt = db_session.query(func.count(Categories.id)).filter(Categories.email==usr)
#progress by session user
con=engine.connect()
progress = con.execute("select transactions.category, sum(transactions.amount) as sum, goals.goal, ((sum(transactions.amount)/goals.goal )*100) as progress from transactions, goals where transactions.category=goals.category and transactions.email=:param group by transactions.category", {"param":session['email']} )
return render_template('profile.html', progress=progress)
@app.route('/signout')
@login_required
def signout():
if 'email' not in session:
return redirect(url_for('signin'))
flash('You have been logged out')
print (str(session['email']),'has been logged out.')
session.pop('email', None)
return redirect(url_for('index'))
## Error Handlers ##
@app.errorhandler(500)
def internal_error(error):
#db_session.rollback()
return render_template('500.html'), 500
@app.errorhandler(404)
def internal_error(error):
return render_template('404.html'), 404
if not app.debug:
file_handler = FileHandler('error.log')
file_handler.setFormatter(Formatter('%(asctime)s %(levelname)s: %(message)s '
'[in %(pathname)s:%(lineno)d]'))
app.logger.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('errors')
| gpl-3.0 |
aruneli/validation-tests | tests/validation_v2/cattlevalidationtest/core/test_services_sidekick.py | 1 | 36672 | from common_fixtures import * # NOQA
WEB_IMAGE_UUID = "docker:sangeetha/testlbsd:latest"
SSH_IMAGE_UUID = "docker:sangeetha/testclient:latest"
def create_env_with_sidekick(testname, client, service_scale, expose_port):
launch_config_consumed_service = {
"imageUuid": WEB_IMAGE_UUID}
# Adding service anti-affinity rule to workaround bug-1419
launch_config_service = {
"imageUuid": SSH_IMAGE_UUID,
"ports": [expose_port+":22/tcp"],
"labels": {
'io.rancher.scheduler.affinity:container_label_ne':
"io.rancher.stack_service.name" +
"=${stack_name}/${service_name}"
}
}
env, service, service_name, consumed_service_name = \
env_with_sidekick_config(testname, client, service_scale,
launch_config_consumed_service,
launch_config_service)
return env, service, service_name, consumed_service_name
def validate_sidekick(super_client, primary_service, service_name,
consumed_service_name, exposed_port=None, dnsname=None):
print "Validating service - " + service_name
containers = get_service_containers_with_name(super_client,
primary_service,
service_name)
assert len(containers) == primary_service.scale
print "Validating Consumed Services: " + consumed_service_name
consumed_containers = get_service_containers_with_name(
super_client, primary_service, consumed_service_name)
assert len(consumed_containers) == primary_service.scale
# For every container in the service , make sure that there is 1
# associated container from each of the consumed service with the same
# label and make sure that this container is the same host as the
# primary service container
for con in containers:
pri_host = con.hosts[0].id
label = con.labels["io.rancher.service.deployment.unit"]
print con.name + " - " + label + " - " + pri_host
secondary_con = get_service_container_with_label(
super_client, primary_service, consumed_service_name, label)
sec_host = secondary_con.hosts[0].id
print secondary_con.name + " - " + label + " - " + sec_host
assert sec_host == pri_host
if exposed_port is not None and dnsname is not None:
# Check for DNS resolution
secondary_con = get_service_containers_with_name(
super_client, primary_service, consumed_service_name)
validate_dns(super_client, containers, secondary_con, exposed_port,
dnsname)
def validate_dns(super_client, service_containers, consumed_service,
exposed_port, dnsname):
time.sleep(5)
for service_con in service_containers:
host = super_client.by_id('host', service_con.hosts[0].id)
expected_dns_list = []
expected_link_response = []
dns_response = []
print "Validating DNS for " + dnsname + " - container -" \
+ service_con.name
for con in consumed_service:
expected_dns_list.append(con.primaryIpAddress)
expected_link_response.append(con.externalId[:12])
print "Expected dig response List" + str(expected_dns_list)
print "Expected wget response List" + str(expected_link_response)
# Validate port mapping
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host.ipAddresses()[0].address, username="root",
password="root", port=int(exposed_port))
# Validate link containers
cmd = "wget -O result.txt --timeout=20 --tries=1 http://" + dnsname + \
":80/name.html;cat result.txt"
print cmd
stdin, stdout, stderr = ssh.exec_command(cmd)
response = stdout.readlines()
assert len(response) == 1
resp = response[0].strip("\n")
print "Actual wget Response" + str(resp)
assert resp in (expected_link_response)
# Validate DNS resolution using dig
cmd = "dig " + dnsname + " +short"
print cmd
stdin, stdout, stderr = ssh.exec_command(cmd)
response = stdout.readlines()
print "Actual dig Response" + str(response)
assert len(response) == len(expected_dns_list)
for resp in response:
dns_response.append(resp.strip("\n"))
for address in expected_dns_list:
assert address in dns_response
def create_env_with_multiple_sidekicks(testname, client, service_scale,
expose_port):
launch_config_consumed_service1 = {
"imageUuid": WEB_IMAGE_UUID}
launch_config_consumed_service2 = {
"imageUuid": WEB_IMAGE_UUID}
launch_config_service = {
"imageUuid": SSH_IMAGE_UUID,
"ports": [expose_port+":22/tcp"],
"labels": {
'io.rancher.scheduler.affinity:container_label_ne':
"io.rancher.stack_service.name" +
"=${stack_name}/${service_name}"
}}
random_name = random_str()
consumed_service_name1 = random_name.replace("-", "")
random_name = random_str()
consumed_service_name2 = random_name.replace("-", "")
launch_config_consumed_service1["name"] = consumed_service_name1
launch_config_consumed_service2["name"] = consumed_service_name2
# Create Environment
# random_name = random_str()
# env_name = random_name.replace("-", "")
env = client.create_environment(name=testname)
env = client.wait_success(env)
assert env.state == "active"
# Create service
random_name = random_str()
service_name = random_name.replace("-", "")
service = client.create_service(
name=service_name, environmentId=env.id,
launchConfig=launch_config_service, scale=service_scale,
secondaryLaunchConfigs=[launch_config_consumed_service1,
launch_config_consumed_service2]
)
service = client.wait_success(service)
assert service.state == "inactive"
consumed_service_name1 = \
env.name + "_" + service.name + "_" + consumed_service_name1
consumed_service_name2 = \
env.name + "_" + service.name + "_" + consumed_service_name2
service_name = env.name + "_" + service.name
return env, service, service_name, \
consumed_service_name1, consumed_service_name2
def env_with_sidekick_config(testname, client, service_scale,
launch_config_consumed_service,
launch_config_service):
env = client.create_environment(name=testname)
env = client.wait_success(env)
assert env.state == "active"
# Create service
random_name = random_str()
consumed_service_name = random_name.replace("-", "")
launch_config_consumed_service["name"] = consumed_service_name
random_name = random_str()
service_name = random_name.replace("-", "")
service = client.create_service(
name=service_name, environmentId=env.id,
launchConfig=launch_config_service, scale=service_scale,
secondaryLaunchConfigs=[launch_config_consumed_service])
service = client.wait_success(service)
assert service.state == "inactive"
consumed_service_name = \
env.name + "_" + service.name + "_" + consumed_service_name
service_name = env.name + "_" + service.name
return env, service, service_name, consumed_service_name
def create_env_with_sidekick_for_linking(testname, client, service_scale):
launch_config_consumed_service = {
"imageUuid": WEB_IMAGE_UUID}
launch_config_service = {
"imageUuid": WEB_IMAGE_UUID}
env, service, service_name, consumed_service_name = \
env_with_sidekick_config(testname, client, service_scale,
launch_config_consumed_service,
launch_config_service)
return env, service, service_name, consumed_service_name
def create_env_with_sidekick_anti_affinity(testname, client, service_scale):
launch_config_consumed_service = {
"imageUuid": WEB_IMAGE_UUID}
launch_config_service = {
"imageUuid": SSH_IMAGE_UUID,
"labels": {
'io.rancher.scheduler.affinity:container_label_ne':
"io.rancher.stack_service.name" +
"=${stack_name}/${service_name}"
}
}
env, service, service_name, consumed_service_name = \
env_with_sidekick_config(testname, client, service_scale,
launch_config_consumed_service,
launch_config_service)
return env, service, service_name, consumed_service_name
def env_with_sidekick(testname, super_client, client, service_scale,
exposed_port):
env, service, service_name, consumed_service_name = \
create_env_with_sidekick(testname, client, service_scale, exposed_port)
env = env.activateservices()
env = client.wait_success(env, 120)
assert env.state == "active"
service = client.wait_success(service, 120)
assert service.state == "active"
dnsname = service.secondaryLaunchConfigs[0].name
validate_sidekick(super_client, service, service_name,
consumed_service_name, exposed_port, dnsname)
return env, service, service_name, consumed_service_name
@pytest.mark.P0
@pytest.mark.Sidekick
@pytest.mark.incremental
class TestSideKickActivateEnv:
testname = "TestSideKickActivateEnv"
exposed_port = "7000"
service_scale = 2
@pytest.mark.create
def test_sidekick_activate_env_create(self, client, super_client):
env, service, service_name, consumed_service_name = \
create_env_with_sidekick(self.testname, client, self.service_scale,
self.exposed_port)
env = env.activateservices()
env = client.wait_success(env, 120)
assert env.state == "active"
service = client.wait_success(service, 120)
assert service.state == "active"
dnsname = service.secondaryLaunchConfigs[0].name
data = [env.uuid, service.uuid, service_name, consumed_service_name,
dnsname]
logger.info("data to save: %s", data)
save(data, self)
@pytest.mark.validate
def test_sidekick_activate_env_validate(self, client, super_client):
data = load(self)
env = client.list_environment(uuid=data[0])[0]
logger.info("env is: %s", format(env))
service = client.list_service(uuid=data[1])[0]
assert len(service) > 0
logger.info("service is: %s", format(service))
service_name = data[2]
logger.info("service_name is: %s", (service_name))
consumed_service_name = data[3]
logger.info("consumed service name is: %s",
(consumed_service_name))
dnsname = data[4]
logger.info("dns is: %s", (dnsname))
validate_sidekick(super_client, service, service_name,
consumed_service_name, self.exposed_port, dnsname)
@pytest.mark.P0
@pytest.mark.Sidekick
@pytest.mark.incremental
class TestMultipleSidekickActivateService:
testname = "TestMultipleSidekickActivateService"
exposed_port = "7003"
service_scale = 2
@pytest.mark.create
def test_multiple_sidekick_activate_service_create(self, client,
super_client):
env, service, service_name, consumed_service1, consumed_service2 =\
create_env_with_multiple_sidekicks(
self.testname, client, self.service_scale, self.exposed_port)
env = env.activateservices()
service = client.wait_success(service, 120)
assert service.state == "active"
dnsname = service.secondaryLaunchConfigs[0].name
data = [env.uuid, service.uuid, service_name, consumed_service1,
consumed_service2, dnsname]
logger.info("data to save: %s", data)
save(data, self)
@pytest.mark.validate
def test_multiple_sidekick_activate_service_validate(self, client,
super_client):
data = load(self)
env = client.list_environment(uuid=data[0])[0]
logger.info("env is: %s", format(env))
service = client.list_service(uuid=data[1])[0]
assert len(service) > 0
logger.info("service is: %s", format(service))
service_name = data[2]
logger.info("service_name is: %s", (service_name))
consumed_service1 = data[3]
assert len(consumed_service1) > 0
logger.info("consumed service1 name is: %s", (consumed_service1))
consumed_service2 = data[4]
assert len(consumed_service1) > 0
logger.info("consumed service1 is: %s", (consumed_service2))
dnsname = data[5]
logger.info("dns is: %s", (dnsname))
validate_sidekick(super_client, service, service_name,
consumed_service1, self.exposed_port, dnsname)
dnsname = service.secondaryLaunchConfigs[1].name
validate_sidekick(super_client, service, service_name,
consumed_service2, self.exposed_port, dnsname)
@pytest.mark.P0
@pytest.mark.Sidekick
@pytest.mark.incremental
class TestSidekickActivateEnv:
testname = "TestSidekickActivateEnv"
exposed_port = "7000"
service_scale = 2
@pytest.mark.create
def test_sidekick_activate_env_create(self, client, super_client):
env, service, service_name, consumed_service_name = \
create_env_with_sidekick(self.testname, client, self.service_scale,
self.exposed_port)
env = env.activateservices()
env = client.wait_success(env, 120)
assert env.state == "active"
service = client.wait_success(service, 120)
assert service.state == "active"
dnsname = service.secondaryLaunchConfigs[0].name
data = [env.uuid, service.uuid, service_name, consumed_service_name,
dnsname]
logger.info("data to save: %s", data)
save(data, self)
@pytest.mark.validate
def test_sidekick_activate_env_validate(self, client, super_client):
data = load(self)
env = client.list_environment(uuid=data[0])[0]
logger.info("env is: %s", format(env))
service = client.list_service(uuid=data[1])[0]
assert len(service) > 0
logger.info("service is: %s", format(service))
service_name = data[2]
logger.info("service_name is: %s", (service_name))
consumed_service_name = data[3]
logger.info("consumed_service_name is: %s",
(consumed_service_name))
dnsname = data[4]
logger.info("dns name : %s", (dnsname))
validate_sidekick(super_client, service, service_name,
consumed_service_name, self.exposed_port, dnsname)
@pytest.mark.P0
@pytest.mark.Sidekick
@pytest.mark.incremental
class TestSidekick:
testname = "TestSidekick"
service_scale = 2
@pytest.mark.create
def test_sidekick_create(self, client, super_client):
env, service, service_name, consumed_service_name = \
create_env_with_sidekick_for_linking(self.testname, client,
self.service_scale)
env = env.activateservices()
service = client.wait_success(service, 120)
assert service.state == "active"
data = [env.uuid, service.uuid, service_name, consumed_service_name]
logger.info("data to save: %s", data)
save(data, self)
@pytest.mark.validate
def test_sidekick_validate(self, client, super_client):
data = load(self)
env = client.list_environment(uuid=data[0])[0]
logger.info("env is: %s", format(env))
service = client.list_service(uuid=data[1])[0]
assert len(service) > 0
logger.info("service is: %s", format(service))
service_name = data[2]
logger.info("service_name is: %s", (service_name))
consumed_service_name = data[3]
logger.info("consumed_service_name is: %s",
(consumed_service_name))
validate_sidekick(super_client, service, service_name,
consumed_service_name)
@pytest.mark.P0
@pytest.mark.Sidekick
@pytest.mark.incremental
class TestSidekickWithAntiAffinity:
testname = "TestSidekickWithAntiAffinity"
service_scale = 2
@pytest.mark.create
def test_sidekick_with_anti_affinity_create(self, client, super_client):
env, service, service_name, consumed_service_name = \
create_env_with_sidekick_anti_affinity(self.testname, client,
self.service_scale)
env = env.activateservices()
service = client.wait_success(service, 120)
assert service.state == "active"
data = [env.uuid, service.uuid, service_name, consumed_service_name]
logger.info("data to save: %s", data)
save(data, self)
@pytest.mark.validate
def test_sidekick_with_anti_affinity_validate(self, client, super_client):
data = load(self)
env = client.list_environment(uuid=data[0])[0]
logger.info("env is: %s", format(env))
service = client.list_service(uuid=data[1])[0]
assert len(service) > 0
logger.info("service is: %s", format(service))
service_name = client.list_service(uuid=data[2])
logger.info("service_name is: %s", format(service_name))
consumed_service_name = client.list_service(uuid=data[2])
logger.info("consumed_service_name is: %s",
format(consumed_service_name))
validate_sidekick(super_client, service, service_name,
consumed_service_name)
def test_service_links_to_sidekick(client, super_client):
service_scale = 2
testname = "TestServiceLinksToSidekick"
env, linked_service, linked_service_name, linked_consumed_service_name = \
create_env_with_sidekick_for_linking(testname, client, service_scale)
client_port = "7004"
launch_config = {"imageUuid": SSH_IMAGE_UUID,
"ports": [client_port+":22/tcp"]}
service = create_svc(client, env, launch_config, 1)
link_svc(super_client, service, [linked_service])
env = env.activateservices()
service = client.wait_success(service, 120)
assert service.state == "active"
service_containers = get_service_container_list(super_client, service)
primary_consumed_service = get_service_containers_with_name(
super_client, linked_service, linked_service_name)
secondary_consumed_service = get_service_containers_with_name(
super_client, linked_service, linked_consumed_service_name)
dnsname = linked_service.name
validate_dns(super_client, service_containers, primary_consumed_service,
client_port, dnsname)
dnsname = \
linked_service.secondaryLaunchConfigs[0].name + "." + \
linked_service.name
validate_dns(super_client, service_containers, secondary_consumed_service,
client_port, dnsname)
delete_all(client, [env])
@pytest.mark.P0
@pytest.mark.Sidekick
@pytest.mark.incremental
class TestSidekickServiceScaleUp:
testname = "TestSidekickServiceScaleUp"
service_scale = 2
exposed_port = "7005"
final_service_scale = 3
@pytest.mark.create
def test_sidekick_service_scale_up_create(self, client, super_client):
env, service, service_name, consumed_service_name = \
env_with_sidekick(self.testname, super_client, client,
self.service_scale,
self.exposed_port)
service = client.update(service, scale=self.final_service_scale,
name=service.name)
service = client.wait_success(service, 120)
assert service.state == "active"
assert service.scale == self.final_service_scale
dnsname = service.secondaryLaunchConfigs[0].name
data = [env.uuid, service.uuid, service_name, consumed_service_name,
dnsname]
logger.info("data to save: %s", data)
save(data, self)
@pytest.mark.validate
def test_sidekick_service_scale_up_validate(self, client, super_client):
data = load(self)
env = client.list_environment(uuid=data[0])[0]
logger.info("env is: %s", format(env))
service = client.list_service(uuid=data[1])[0]
assert len(service) > 0
logger.info("service is: %s", format(service))
service_name = data[2]
consumed_service_name = data[3]
dnsname = data[4]
validate_sidekick(super_client, service, service_name,
consumed_service_name, self.exposed_port, dnsname)
@pytest.mark.P0
@pytest.mark.Sidekick
@pytest.mark.incremental
class TestSidekickScaleDown:
testname = "TestSidekickScaleDown"
service_scale = 3
exposed_port = "7006"
final_service_scale = 2
@pytest.mark.create
def test_sidekick_scale_down_create(self, client, super_client):
env, service, service_name, consumed_service_name = \
env_with_sidekick(self.testname, super_client, client,
self.service_scale,
self.exposed_port)
service = client.update(service, scale=self.final_service_scale,
name=service.name)
service = client.wait_success(service, 120)
assert service.state == "active"
assert service.scale == self.final_service_scale
dnsname = service.secondaryLaunchConfigs[0].name
data = [env.uuid, service.uuid, service_name, consumed_service_name,
dnsname]
logger.info("data to save: %s", data)
save(data, self)
@pytest.mark.validate
def test_sidekick_scale_down_validate(self, client, super_client):
data = load(self)
env = client.list_environment(uuid=data[0])[0]
logger.info("env is: %s", format(env))
service = client.list_service(uuid=data[1])[0]
assert len(service) > 0
logger.info("service is: %s", format(service))
service_name = data[2]
consumed_service_name = data[3]
dnsname = data[4]
validate_sidekick(super_client, service, service_name,
consumed_service_name, self.exposed_port, dnsname)
@pytest.mark.P0
@pytest.mark.Sidekick
@pytest.mark.incremental
class TestSidekickConsumedServicesStopStart:
testname = "TestSidekickConsumedServicesStopStart"
service_scale = 2
exposed_port = "7007"
@pytest.mark.create
def test_sidekick_consumed_services_stop_start_instance(self, client,
super_client):
env, service, service_name, consumed_service_name = \
env_with_sidekick(self.testname, super_client, client,
self.service_scale,
self.exposed_port)
container_name = consumed_service_name + "_2"
containers = client.list_container(name=container_name)
assert len(containers) == 1
container = containers[0]
# Stop instance
container = client.wait_success(container.stop(), 120)
client.wait_success(service)
dnsname = service.secondaryLaunchConfigs[0].name
data = [env.uuid, service.uuid, service_name, consumed_service_name,
dnsname]
logger.info("data to save: %s", data)
save(data, self)
@pytest.mark.validate
def test_sidekick_consumed_services_stop_start_instance_validate(
self, client, super_client):
data = load(self)
env = client.list_environment(uuid=data[0])[0]
logger.info("env is: %s", format(env))
service = client.list_service(uuid=data[1])[0]
assert len(service) > 0
logger.info("service is: %s", format(service))
service_name = data[2]
consumed_service_name = data[3]
dnsname = data[4]
validate_sidekick(super_client, service, service_name,
consumed_service_name, self.exposed_port, dnsname)
@pytest.mark.P0
@pytest.mark.Sidekick
@pytest.mark.incremental
class TestSidekickConsumedServicesRestartInstance:
testname = "TestSidekickConsumedServicesRestartInstance"
service_scale = 2
exposed_port = "7008"
@pytest.mark.create
def test_sidekick_consumed_services_restart_instance_create(self, client,
super_client):
env, service, service_name, consumed_service_name = \
env_with_sidekick(self.testname, super_client, client,
self.service_scale,
self.exposed_port)
container_name = consumed_service_name + "_2"
containers = client.list_container(name=container_name)
assert len(containers) == 1
container = containers[0]
# restart instance
container = client.wait_success(container.restart(), 120)
assert container.state == 'running'
dnsname = service.secondaryLaunchConfigs[0].name
data = [env.uuid, service.uuid, service_name, consumed_service_name,
dnsname]
logger.info("data to save: %s", data)
save(data, self)
@pytest.mark.validate
def test_sidekick_consumed_services_restart_instance_validate(
self, client, super_client):
data = load(self)
env = client.list_environment(uuid=data[0])[0]
logger.info("env is: %s", format(env))
service = client.list_service(uuid=data[1])[0]
assert len(service) > 0
logger.info("service is: %s", format(service))
service_name = data[2]
consumed_service_name = data[3]
dnsname = data[4]
validate_sidekick(super_client, service, service_name,
consumed_service_name, self.exposed_port, dnsname)
def test_sidekick_consumed_services_delete_instance(client, super_client):
service_scale = 3
exposed_port = "7009"
testname = "TestSidekickConsumedServicesDeleteInstance"
env, service, service_name, consumed_service_name = \
env_with_sidekick(testname, super_client, client, service_scale,
exposed_port)
container_name = consumed_service_name + "_1"
containers = client.list_container(name=container_name)
assert len(containers) == 1
container = containers[0]
print container_name
primary_container = get_side_kick_container(
super_client, container, service, service_name)
print primary_container.name
# Delete instance
container = client.wait_success(client.delete(container))
assert container.state == 'removed'
client.wait_success(service)
dnsname = service.secondaryLaunchConfigs[0].name
validate_sidekick(super_client, service, service_name,
consumed_service_name, exposed_port, dnsname)
# Check that the consumed container is not recreated
primary_container = client.reload(primary_container)
print primary_container.state
assert primary_container.state == "running"
delete_all(client, [env])
@pytest.mark.P0
@pytest.mark.Sidekick
@pytest.mark.incremental
class TestSidekickDeactivateActivateEnvironment:
testname = "TestSidekickDeactivateActivateEnvironment"
service_scale = 2
exposed_port = "7010"
@pytest.mark.create
def test_sidekick_deactivate_activate_environment_create(self, client,
super_client):
env, service, service_name, consumed_service_name = \
env_with_sidekick(self.testname, super_client, client,
self.service_scale,
self.exposed_port)
env = env.deactivateservices()
service = client.wait_success(service, 120)
assert service.state == "inactive"
wait_until_instances_get_stopped_for_service_with_sec_launch_configs(
super_client, service)
env = env.activateservices()
service = client.wait_success(service, 120)
assert service.state == "active"
dnsname = service.secondaryLaunchConfigs[0].name
data = [env.uuid, service.uuid, service_name, consumed_service_name,
dnsname]
logger.info("data to save: %s", data)
save(data, self)
@pytest.mark.validate
def test_sidekick_deactivate_activate_environment_validate(self, client,
super_client):
data = load(self)
env = client.list_environment(uuid=data[0])[0]
logger.info("env is: %s", format(env))
service = client.list_service(uuid=data[1])[0]
assert len(service) > 0
logger.info("service is: %s", format(service))
service_name = data[2]
consumed_service_name = data[3]
dnsname = data[4]
validate_sidekick(super_client, service, service_name,
consumed_service_name, self.exposed_port, dnsname)
@pytest.mark.P0
@pytest.mark.Sidekick
@pytest.mark.incremental
class TestSidekickServicesStopStartInstance:
testname = "TestSidekickServicesStopStartInstance"
service_scale = 2
exposed_port = "7011"
@pytest.mark.create
def test_sidekick_services_stop_start_instance_create(self, client,
super_client):
env, service, service_name, consumed_service_name = \
env_with_sidekick(self.testname, super_client, client,
self.service_scale,
self.exposed_port)
container_name = env.name + "_" + service.name + "_2"
containers = client.list_container(name=container_name)
assert len(containers) == 1
container = containers[0]
# Stop instance
container = client.wait_success(container.stop(), 120)
client.wait_success(service)
dnsname = service.secondaryLaunchConfigs[0].name
data = [env.uuid, service.uuid, service_name, consumed_service_name,
dnsname]
logger.info("data to save: %s", data)
save(data, self)
@pytest.mark.validate
def test_sidekick_services_stop_start_instance_validate(self, client,
super_client):
data = load(self)
env = client.list_environment(uuid=data[0])[0]
logger.info("env is: %s", format(env))
service = client.list_service(uuid=data[1])[0]
assert len(service) > 0
logger.info("service is: %s", format(service))
service_name = data[2]
consumed_service_name = data[3]
dnsname = data[4]
validate_sidekick(super_client, service, service_name,
consumed_service_name, self.exposed_port, dnsname)
@pytest.mark.P0
@pytest.mark.Sidekick
@pytest.mark.incremental
class TestSidekickServicesRestartInstance:
testname = "TestSidekickServicesRestartInstance"
service_scale = 3
exposed_port = "7012"
@pytest.mark.create
def test_sidekick_services_restart_instance_create(self, client,
super_client):
env, service, service_name, consumed_service_name = \
env_with_sidekick(self.testname, super_client, client,
self.service_scale,
self.exposed_port)
container_name = env.name + "_" + service.name + "_2"
containers = client.list_container(name=container_name)
assert len(containers) == 1
container = containers[0]
# restart instance
container = client.wait_success(container.restart(), 120)
assert container.state == 'running'
dnsname = service.secondaryLaunchConfigs[0].name
data = [env.uuid, service.uuid, service_name, consumed_service_name,
dnsname]
logger.info("data to save: %s", data)
save(data, self)
@pytest.mark.validate
def test_sidekick_services_restart_instance_validate(self, client,
super_client):
data = load(self)
env = client.list_environment(uuid=data[0])[0]
logger.info("env is: %s", format(env))
service = client.list_service(uuid=data[1])[0]
assert len(service) > 0
logger.info("service is: %s", format(service))
service_name = data[2]
consumed_service_name = data[3]
dnsname = data[4]
validate_sidekick(super_client, service, service_name,
consumed_service_name, self.exposed_port,
dnsname)
def test_sidekick_services_delete_instance(client, super_client):
service_scale = 2
exposed_port = "7013"
testname = "TestSidekickServicesDeleteInstance"
env, service, service_name, consumed_service_name = \
env_with_sidekick(testname, super_client, client, service_scale,
exposed_port)
container_name = env.name + "_" + service.name + "_1"
containers = client.list_container(name=container_name)
assert len(containers) == 1
container = containers[0]
print container_name
consumed_container = get_side_kick_container(
super_client, container, service, consumed_service_name)
print consumed_container.name
# Delete instance
container = client.wait_success(client.delete(container))
assert container.state == 'removed'
client.wait_success(service)
dnsname = service.secondaryLaunchConfigs[0].name
validate_sidekick(super_client, service, service_name,
consumed_service_name, exposed_port, dnsname)
# Check that the consumed container is not recreated
consumed_container = client.reload(consumed_container)
print consumed_container.state
assert consumed_container.state == "running"
delete_all(client, [env])
@pytest.mark.P0
@pytest.mark.Sidekick
@pytest.mark.incremental
class TestSidekickServicesDeactivateActivate:
testname = "TestSidekickServicesDeactivateActivate"
service_scale = 2
exposed_port = "7014"
@pytest.mark.create
def test_sidekick_services_deactivate_activate_create(self, client,
super_client):
env, service, service_name, consumed_service_name = \
env_with_sidekick(self.testname, super_client, client,
self.service_scale,
self.exposed_port)
service = service.deactivate()
service = client.wait_success(service, 120)
assert service.state == "inactive"
wait_until_instances_get_stopped_for_service_with_sec_launch_configs(
super_client, service)
service = service.activate()
service = client.wait_success(service, 120)
assert service.state == "active"
dnsname = service.secondaryLaunchConfigs[0].name
data = [env.uuid, service.uuid, service_name, consumed_service_name,
dnsname]
logger.info("data to save: %s", data)
save(data, self)
@pytest.mark.validate
def test_sidekick_services_deactivate_activate_validate(self, client,
super_client):
data = load(self)
env = client.list_environment(uuid=data[0])[0]
logger.info("env is: %s", format(env))
service = client.list_service(uuid=data[1])[0]
assert len(service) > 0
logger.info("service is: %s", format(service))
service_name = data[2]
consumed_service_name = data[3]
dnsname = data[4]
validate_sidekick(super_client, service, service_name,
consumed_service_name, self.exposed_port, dnsname)
| apache-2.0 |
vamanea/linux-odroidxu | tools/perf/scripts/python/syscall-counts.py | 11181 | 1522 | # system call counts
# (c) 2010, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts.py [comm]\n";
for_comm = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
print "%-40s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
googleads/google-ads-python | google/ads/googleads/v8/common/types/final_app_url.py | 1 | 1997 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.ads.googleads.v8.enums.types import app_url_operating_system_type
__protobuf__ = proto.module(
package="google.ads.googleads.v8.common",
marshal="google.ads.googleads.v8",
manifest={"FinalAppUrl",},
)
class FinalAppUrl(proto.Message):
r"""A URL for deep linking into an app for the given operating
system.
Attributes:
os_type (google.ads.googleads.v8.enums.types.AppUrlOperatingSystemTypeEnum.AppUrlOperatingSystemType):
The operating system targeted by this URL.
Required.
url (str):
The app deep link URL. Deep links specify a location in an
app that corresponds to the content you'd like to show, and
should be of the form {scheme}://{host_path} The scheme
identifies which app to open. For your app, you can use a
custom scheme that starts with the app's name. The host and
path specify the unique location in the app where your
content exists. Example: "exampleapp://productid_1234".
Required.
"""
os_type = proto.Field(
proto.ENUM,
number=1,
enum=app_url_operating_system_type.AppUrlOperatingSystemTypeEnum.AppUrlOperatingSystemType,
)
url = proto.Field(proto.STRING, number=3, optional=True,)
__all__ = tuple(sorted(__protobuf__.manifest))
| apache-2.0 |
googleads/google-ads-python | google/ads/googleads/v7/errors/types/string_format_error.py | 1 | 1160 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v7.errors",
marshal="google.ads.googleads.v7",
manifest={"StringFormatErrorEnum",},
)
class StringFormatErrorEnum(proto.Message):
r"""Container for enum describing possible string format errors. """
class StringFormatError(proto.Enum):
r"""Enum describing possible string format errors."""
UNSPECIFIED = 0
UNKNOWN = 1
ILLEGAL_CHARS = 2
INVALID_FORMAT = 3
__all__ = tuple(sorted(__protobuf__.manifest))
| apache-2.0 |
zqzhang/crosswalk-test-suite | misc/sampleapp-android-tests/sampleapp/simd.py | 14 | 4150 | #!/usr/bin/env python
#
# Copyright (c) 2015 Intel Corporation.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of works must retain the original copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the original copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this work without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors:
# Li, Hao<[email protected]>
import unittest
import os
import sys
import commands
import comm
from TestApp import *
app_name = "Simd"
package_name = "org.xwalk." + app_name.lower()
active_name = app_name + "Activity"
sample_src = comm.sample_src_pref + "simd-mandelbrot/"
testapp = None
comm.setUp()
class Simd(unittest.TestCase):
def test_1_pack(self):
#clean up old apk
commands.getstatusoutput("rm %s%s*" % (comm.build_app_dest, "org.xwalk." + app_name.lower()))
cmd = "%s --crosswalk=%s --platforms=android --android=%s --targets=%s --enable-remote-debugging %s" % \
(comm.apptools,
comm.crosswalkzip,
comm.MODE,
comm.ARCH,
sample_src)
comm.pack(cmd, app_name.lower(), self)
def test_2_install(self):
apk_file = commands.getstatusoutput("ls %s| grep %s" % (comm.build_app_dest, app_name.lower()))[1]
if apk_file.endswith(".apk"):
global testapp
testapp = TestApp(comm.device, comm.build_app_dest + apk_file, package_name, active_name)
if testapp.isInstalled():
testapp.uninstall()
self.assertTrue(testapp.install())
else:
print("-->> No packed %s apk in %s" % (app_name, comm.build_app_dest))
self.assertTrue(False)
def test_3_launch(self):
if testapp is not None:
self.assertTrue(testapp.launch())
else:
print("-->> Fail to pack %s apk" % app_name)
self.assertTrue(False)
def test_4_switch(self):
if testapp is not None:
self.assertTrue(testapp.switch())
else:
print("-->> Fail to pack %s apk" % app_name)
self.assertTrue(False)
def test_5_stop(self):
if testapp is not None:
self.assertTrue(testapp.stop())
else:
print("-->> Fail to pack %s apk" % app_name)
self.assertTrue(False)
def test_6_uninstall(self):
if testapp is not None:
self.assertTrue(testapp.uninstall())
else:
print("-->> Fail to pack %s apk" % app_name)
self.assertTrue(False)
def test_7_uninstall_when_app_running(self):
if testapp is not None:
if not testapp.isInstalled():
testapp.install()
if not testapp.isRunning():
testapp.launch()
self.assertTrue(testapp.uninstall())
else:
print("-->> Fail to pack %s apk" % app_name)
self.assertTrue(False)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
niavlys/kivy | kivy/tests/test_multistroke.py | 56 | 13829 | import unittest
import kivy.multistroke
from kivy.multistroke import Recognizer, MultistrokeGesture
from kivy.vector import Vector
best_score = 0.0
counter = 0
def best_score_cb(result):
global best_score
best_score = result.best['score']
def counter_cb(result):
global counter
counter += 1
# These are taken from the examples in JavaScript code (but made unistrokes)
TGesture = [Vector(30, 7), Vector(103, 7), Vector(66, 7), Vector(66, 87)]
NGesture = [Vector(177, 92), Vector(177, 2), Vector(182, 1), Vector(246, 95),
Vector(247, 87), Vector(247, 1)]
# dataset that matches N pretty well
Ncandidate = [
Vector(160, 271), Vector(160, 263), Vector(158, 257), Vector(156, 249),
Vector(146, 187), Vector(144, 181), Vector(144, 175), Vector(142, 167),
Vector(140, 113), Vector(140, 107), Vector(140, 103), Vector(140, 99),
Vector(140, 85), Vector(138, 85), Vector(138, 87), Vector(138, 89),
Vector(166, 151), Vector(176, 171), Vector(188, 189), Vector(200, 205),
Vector(238, 263), Vector(242, 269), Vector(244, 273), Vector(246, 277),
Vector(252, 289), Vector(254, 291), Vector(256, 291), Vector(258, 291),
Vector(260, 281), Vector(260, 275), Vector(260, 267), Vector(260, 255),
Vector(254, 189), Vector(254, 175), Vector(254, 161), Vector(254, 147),
Vector(260, 103), Vector(260, 101), Vector(260, 99), Vector(260, 95),
Vector(260, 93), Vector(260, 91), Vector(260, 89)
]
class MultistrokeTestCase(unittest.TestCase):
def setUp(self):
global best_score
best_score = 0
counter = 0
self.Tinvar = MultistrokeGesture('T', [TGesture],
orientation_sensitive=False)
self.Tbound = MultistrokeGesture('T', [TGesture],
orientation_sensitive=True)
self.Ninvar = MultistrokeGesture('N', [NGesture],
orientation_sensitive=False)
self.Nbound = MultistrokeGesture('N', [NGesture],
orientation_sensitive=True)
# -----------------------------------------------------------------------------
# Recognizer scheduling
# -----------------------------------------------------------------------------
def test_immediate(self):
gdb = Recognizer(db=[self.Tinvar, self.Ninvar])
r = gdb.recognize([Ncandidate], max_gpf=0)
self.assertEqual(r._match_ops, 4)
self.assertEqual(r._completed, 2)
self.assertEqual(r.progress, 1)
self.assertTrue(r.best['score'] > 0.94 and r.best['score'] < 0.95)
def test_scheduling(self):
global best_score
from kivy.clock import Clock
gdb = Recognizer(db=[self.Tinvar, self.Ninvar])
r = gdb.recognize([Ncandidate], max_gpf=1)
r.bind(on_complete=best_score_cb)
# _recognize_tick is scheduled here; compares to Tinvar
Clock.tick()
self.assertEqual(r.progress, .5)
self.assertEqual(best_score, .0)
# Now complete the search operation
Clock.tick()
self.assertEqual(r.progress, 1)
self.assertTrue(best_score > 0.94 and best_score < 0.95)
def test_scheduling_limits(self):
global best_score
from kivy.clock import Clock
gdb = Recognizer(db=[self.Ninvar])
tpls = len(self.Ninvar.templates)
best_score = 0
gdb.db.append(self.Ninvar)
r = gdb.recognize([Ncandidate], max_gpf=1)
r.bind(on_complete=best_score_cb)
self.assertEqual(r.progress, 0)
Clock.tick()
self.assertEqual(r.progress, 0.5)
self.assertEqual(best_score, 0)
Clock.tick()
self.assertEqual(r.progress, 1)
self.assertTrue(best_score > 0.94 and best_score < 0.95)
best_score = 0
gdb.db.append(self.Ninvar)
r = gdb.recognize([Ncandidate], max_gpf=1)
r.bind(on_complete=best_score_cb)
self.assertEqual(r.progress, 0)
Clock.tick()
self.assertEqual(r.progress, 1 / 3.)
Clock.tick()
self.assertEqual(r.progress, 2 / 3.)
self.assertEqual(best_score, 0)
Clock.tick()
self.assertEqual(r.progress, 1)
self.assertTrue(best_score > 0.94 and best_score < 0.95)
def test_parallel_recognize(self):
global counter
from kivy.clock import Clock
counter = 0
gdb = Recognizer()
for i in range(9):
gdb.add_gesture('T', [TGesture], priority=50)
gdb.add_gesture('N', [NGesture])
r1 = gdb.recognize([Ncandidate], max_gpf=1)
r1.bind(on_complete=counter_cb)
Clock.tick() # first run scheduled here; 9 left
r2 = gdb.recognize([Ncandidate], max_gpf=1)
r2.bind(on_complete=counter_cb)
Clock.tick() # 8 left
r3 = gdb.recognize([Ncandidate], max_gpf=1)
r3.bind(on_complete=counter_cb)
Clock.tick() # 7 left
# run some immediate searches, should not interfere.
for i in range(5):
n = gdb.recognize([TGesture], max_gpf=0)
self.assertEqual(n.best['name'], 'T')
self.assertTrue(round(n.best['score'], 1) == 1.0)
for i in range(6):
Clock.tick()
self.assertEqual(counter, 0)
Clock.tick()
self.assertEqual(counter, 1)
Clock.tick()
self.assertEqual(counter, 2)
Clock.tick()
self.assertEqual(counter, 3)
def test_timeout_case_1(self):
global best_score
from kivy.clock import Clock
from time import sleep
best_score = 0
gdb = Recognizer(db=[self.Tbound, self.Ninvar])
r = gdb.recognize([Ncandidate], max_gpf=1, timeout=0.1)
Clock.tick() # matches Tbound in this tick
self.assertEqual(best_score, 0)
sleep(0.11)
Clock.tick() # should match Ninv, but times out (got T)
self.assertEqual(r.status, 'timeout')
self.assertEqual(r.progress, .5)
self.assertTrue(r.best['name'] == 'T')
self.assertTrue(r.best['score'] < 0.5)
def test_timeout_case_2(self):
global best_score
from kivy.clock import Clock
from time import sleep
best_score = 0
gdb = Recognizer(db=[self.Tbound, self.Ninvar, self.Tinvar])
r = gdb.recognize([Ncandidate], max_gpf=1, timeout=0.2)
Clock.tick() # matches Tbound in this tick
self.assertEqual(best_score, 0)
sleep(0.1)
Clock.tick() # matches Ninvar in this tick
sleep(0.1)
Clock.tick() # should match Tinvar, but times out
self.assertEqual(r.status, 'timeout')
self.assertEqual(r.progress, 2 / 3.)
self.assertTrue(r.best['score'] >= .94 and r.best['score'] <= .95)
def test_priority_sorting(self):
gdb = Recognizer()
gdb.add_gesture('N', [NGesture], priority=10)
gdb.add_gesture('T', [TGesture], priority=5)
r = gdb.recognize([Ncandidate], goodscore=0.01, max_gpf=0,
force_priority_sort=True)
self.assertEqual(r.best['name'], 'T')
r = gdb.recognize([Ncandidate], goodscore=0.01,
force_priority_sort=False, max_gpf=0)
self.assertEqual(r.best['name'], 'N')
r = gdb.recognize([Ncandidate], goodscore=0.01, max_gpf=0,
priority=10)
self.assertEqual(r.best['name'], 'T')
r = gdb.recognize([Ncandidate], goodscore=0.01, max_gpf=0,
priority=4)
self.assertEqual(r.best['name'], None)
# -----------------------------------------------------------------------------
# Recognizer - filter tests
# -----------------------------------------------------------------------------
def test_name_filter(self):
gdb = Recognizer(db=[self.Ninvar, self.Nbound])
n = gdb.filter()
self.assertEqual(len(n), 2)
n = gdb.filter(name='X')
self.assertEqual(len(n), 0)
def test_numpoints_filter(self):
gdb = Recognizer(db=[self.Ninvar, self.Nbound])
n = gdb.filter(numpoints=100)
self.assertEqual(len(n), 0)
gdb.add_gesture('T', [TGesture], numpoints=100)
n = gdb.filter(numpoints=100)
self.assertEqual(len(n), 1)
n = gdb.filter(numpoints=[100, 16])
self.assertEqual(len(n), 3)
def test_numstrokes_filter(self):
gdb = Recognizer(db=[self.Ninvar, self.Nbound])
n = gdb.filter(numstrokes=2)
self.assertEqual(len(n), 0)
gdb.add_gesture('T', [TGesture, TGesture])
n = gdb.filter(numstrokes=2)
self.assertEqual(len(n), 1)
n = gdb.filter(numstrokes=[1, 2])
self.assertEqual(len(n), 3)
def test_priority_filter(self):
gdb = Recognizer(db=[self.Ninvar, self.Nbound])
n = gdb.filter(priority=50)
self.assertEqual(len(n), 0)
gdb.add_gesture('T', [TGesture], priority=51)
n = gdb.filter(priority=50)
self.assertEqual(len(n), 0)
n = gdb.filter(priority=51)
self.assertEqual(len(n), 1)
gdb.add_gesture('T', [TGesture], priority=52)
n = gdb.filter(priority=[0, 51])
self.assertEqual(len(n), 1)
n = gdb.filter(priority=[0, 52])
self.assertEqual(len(n), 2)
n = gdb.filter(priority=[51, 52])
self.assertEqual(len(n), 2)
n = gdb.filter(priority=[52, 53])
self.assertEqual(len(n), 1)
n = gdb.filter(priority=[53, 54])
self.assertEqual(len(n), 0)
def test_orientation_filter(self):
gdb = Recognizer(db=[self.Ninvar, self.Nbound])
n = gdb.filter(orientation_sensitive=True)
self.assertEqual(len(n), 1)
n = gdb.filter(orientation_sensitive=False)
self.assertEqual(len(n), 1)
n = gdb.filter(orientation_sensitive=None)
self.assertEqual(len(n), 2)
gdb.db.append(self.Tinvar)
n = gdb.filter(orientation_sensitive=True)
self.assertEqual(len(n), 1)
n = gdb.filter(orientation_sensitive=False)
self.assertEqual(len(n), 2)
n = gdb.filter(orientation_sensitive=None)
self.assertEqual(len(n), 3)
# -----------------------------------------------------------------------------
# misc tests
# -----------------------------------------------------------------------------
def test_resample(self):
r = kivy.multistroke.resample([Vector(0, 0), Vector(1, 1)], 11)
self.assertEqual(len(r), 11)
self.assertEqual(round(r[9].x, 1), 0.9)
r = kivy.multistroke.resample(TGesture, 25)
self.assertEqual(len(r), 25)
self.assertEqual(round(r[12].x), 81)
self.assertEqual(r[12].y, 7)
self.assertEqual(TGesture[3].x, r[24].x)
self.assertEqual(TGesture[3].y, r[24].y)
def test_rotateby(self):
r = kivy.multistroke.rotate_by(NGesture, 24)
self.assertEqual(round(r[2].x, 1), 158.59999999999999)
self.assertEqual(round(r[2].y, 1), 54.899999999999999)
def test_transfer(self):
gdb1 = Recognizer(db=[self.Ninvar])
gdb2 = Recognizer()
gdb1.transfer_gesture(gdb2, name='N')
r = gdb2.recognize([Ncandidate], max_gpf=0)
self.assertEqual(r.best['name'], 'N')
self.assertTrue(r.best['score'] > 0.94 and r.best['score'] < 0.95)
def test_export_import_case_1(self):
gdb1 = Recognizer(db=[self.Ninvar])
gdb2 = Recognizer()
g = gdb1.export_gesture(name='N')
gdb2.import_gesture(g)
r = gdb2.recognize([Ncandidate], max_gpf=0)
self.assertEqual(r.best['name'], 'N')
self.assertTrue(r.best['score'] > 0.94 and r.best['score'] < 0.95)
def test_export_import_case_2(self):
from tempfile import mkstemp
import os
gdb1 = Recognizer(db=[self.Ninvar, self.Tinvar])
gdb2 = Recognizer()
fh, fn = mkstemp()
os.close(fh)
g = gdb1.export_gesture(name='N', filename=fn)
gdb2.import_gesture(filename=fn)
os.unlink(fn)
self.assertEqual(len(gdb1.db), 2)
self.assertEqual(len(gdb2.db), 1)
r = gdb2.recognize([Ncandidate], max_gpf=0)
self.assertEqual(r.best['name'], 'N')
self.assertTrue(r.best['score'] > 0.94 and r.best['score'] < 0.95)
# ------------------------------------------------------------------------
# Test protractor
# ------------------------------------------------------------------------
def test_protractor_invariant(self):
gdb = Recognizer(db=[self.Tinvar, self.Ninvar])
r = gdb.recognize([NGesture], orientation_sensitive=False,
max_gpf=0)
self.assertEqual(r.best['name'], 'N')
self.assertTrue(r.best['score'] == 1.0)
r = gdb.recognize([NGesture], orientation_sensitive=True,
max_gpf=0)
self.assertEqual(r.best['name'], None)
self.assertEqual(r.best['score'], 0)
r = gdb.recognize([Ncandidate], orientation_sensitive=False,
max_gpf=0)
self.assertEqual(r.best['name'], 'N')
self.assertTrue(r.best['score'] > 0.94 and r.best['score'] < 0.95)
def test_protractor_bound(self):
gdb = Recognizer(db=[self.Tbound, self.Nbound])
r = gdb.recognize([NGesture], orientation_sensitive=True,
max_gpf=0)
self.assertEqual(r.best['name'], 'N')
self.assertTrue(r.best['score'] >= 0.99)
r = gdb.recognize([NGesture], orientation_sensitive=False,
max_gpf=0)
self.assertEqual(r.best['name'], None)
self.assertEqual(r.best['score'], 0)
r = gdb.recognize([Ncandidate], orientation_sensitive=True,
max_gpf=0)
self.assertEqual(r.best['name'], 'N')
self.assertTrue(r.best['score'] > 0.94 and r.best['score'] < 0.95)
if __name__ == '__main__':
unittest.main()
| mit |
alexryndin/ambari | ambari-common/src/main/python/resource_management/libraries/functions/list_ambari_managed_repos.py | 3 | 1906 | #!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ambari Agent
"""
__all__ = ["list_ambari_managed_repos"]
import os
import glob
from ambari_commons.os_check import OSCheck
from resource_management.core.exceptions import Fail
def list_ambari_managed_repos(stack_name):
"""
Lists all repositories that are present at host
"""
stack_name = stack_name.upper()
# TODO : get it dynamically from the server
repository_names = [stack_name, stack_name + "-UTILS" ]
if OSCheck.is_ubuntu_family():
repo_dir = '/etc/apt/sources.list.d/'
elif OSCheck.is_redhat_family(): # Centos/RHEL 5/6
repo_dir = '/etc/yum.repos.d/'
elif OSCheck.is_suse_family():
repo_dir = '/etc/zypp/repos.d/'
else:
raise Fail('Can not dermine repo dir')
repos = []
for name in repository_names:
# List all files that match pattern
files = glob.glob(os.path.join(repo_dir, name) + '*')
for f in files:
filename = os.path.basename(f)
# leave out extension
reponame = os.path.splitext(filename)[0]
repos.append(reponame)
# get uniq strings
seen = set()
uniq = [s for s in repos if not (s in seen or seen.add(s))]
return uniq
| apache-2.0 |
showtimesynergy/mojify | main.py | 1 | 2619 | from PIL import Image
import csv
from ast import literal_eval as make_tuple
from math import sqrt
import argparse
import os.path
def load_img(image):
# load an image as a PIL object
im = Image.open(image).convert('RGBA')
return im
def color_distance(c_tuple1, c_tuple2):
# calculate the color distance between two rgb tuples
red_mean = (c_tuple1[0] + c_tuple2[0]) / 2
red = c_tuple1[0] - c_tuple2[0]
green = c_tuple1[1] - c_tuple2[1]
blue = c_tuple1[2] - c_tuple2[2]
delta = (2 + (red_mean / 256)) * (red ** 2)
delta += (4 * (green ** 2))
delta += (2 + ((255 - red_mean) / 256)) * (blue ** 2)
delta = sqrt(delta)
return delta
def write_out(text_matrix):
# write out emoji grid to txt file
with open('out.txt', '+w', encoding='utf-8') as out:
for line in text_matrix:
line_out = ''
for char in line:
# TODO: ZWJ support
if char is None:
line_out += '\u2001\u2006'
else:
char_code = '0x' + char
char_code = int(char_code, base=16)
line_out += chr(char_code)
out.writelines(line_out + '\n')
def gen_matrix(pix_data):
# generate unicode data from colors
pix = pix_data.load()
emoji_grid = []
for y in range(0, size[1]):
emoji_grid.append([])
for x in range(0, size[0]):
pixel = pix[x, y]
best_delta = float('Inf')
for entry in emoji_list:
emoji_color = entry[1]
if pixel[3] == 0:
best = None
else:
delta = color_distance(emoji_color, pixel)
if delta < best_delta:
best = entry[0]
best_delta = delta
emoji_grid[-1].append(best)
return emoji_grid
def handle_arguments():
parser = argparse.ArgumentParser(
description='Represent an image using emoji'
)
parser.add_argument('image', help='image to be processed')
args = parser.parse_args()
return args
if __name__ == '__main__':
args = handle_arguments()
path = args.image
emoji_list = []
with open('proc.csv') as raw_list:
emoji_list = []
reader = csv.reader(raw_list)
raw_list = list(reader)
for entry in raw_list:
emoji_list.append([entry[0], make_tuple(entry[1])])
image = load_img(path)
size = image.size
emoji_grid = gen_matrix(image)
write_out(emoji_grid)
print('Output in out.txt')
| bsd-2-clause |
kickstandproject/asterisk-testsuite-temporary | tests/channels/pjsip/transfers/blind_transfer/caller_refer_only/transfer.py | 2 | 6588 | #!/usr/bin/env python
'''
Copyright (C) 2014, Digium, Inc.
John Bigelow <[email protected]>
This program is free software, distributed under the terms of
the GNU General Public License Version 2.
'''
import logging
import pjsua as pj
from twisted.internet import reactor
LOGGER = logging.getLogger(__name__)
URI = ["sip:[email protected]", "sip:[email protected]", "sip:[email protected]"]
ITERATION = 0
class CharlieCallback(pj.AccountCallback):
"""Derived callback class for Charlie's account."""
def __init__(self, controller, account=None):
pj.AccountCallback.__init__(self, account)
self.controller = controller
self.charlie_call = None
def on_incoming_call2(self, call, msg):
self.charlie_call = call
LOGGER.info("Incoming call for Charlie '%s' from '%s'." %
(call.info().uri, call.info().remote_uri))
if ITERATION > 0:
referred_by_hdr = "Referred-By: alice <sip:[email protected]>"
if (referred_by_hdr not in msg.msg_info_buffer):
LOGGER.warn("Expected header not found: '%s'" % referred_by_hdr)
self.controller.test_object.set_passed(False)
self.controller.test_object.stop_reactor()
inbound_cb = CharliePhoneCallCallback(call)
call.set_callback(inbound_cb)
call.answer(200)
reactor.callLater(1, self.hangup_call)
def hangup_call(self):
"""Hang up the call."""
LOGGER.info("Hanging up Charlie")
self.charlie_call.hangup(code=200, reason="Q.850;cause=16")
class BobCallback(pj.AccountCallback):
"""Derived callback class for Bob's account."""
def __init__(self, account=None):
pj.AccountCallback.__init__(self, account)
self.bob_call = None
def on_incoming_call(self, call):
self.bob_call = call
LOGGER.info("Incoming call for Bob '%s' from '%s'." %
(call.info().uri, call.info().remote_uri))
inbound_cb = BobPhoneCallCallback(call)
call.set_callback(inbound_cb)
call.answer(200)
class AlicePhoneCallCallback(pj.CallCallback):
"""Derived callback class for Alice's call."""
def __init__(self, call=None):
pj.CallCallback.__init__(self, call)
def on_state(self):
log_call_info(self.call.info())
if self.call.info().state == pj.CallState.CONFIRMED:
LOGGER.info("Call is up between Alice and Bob. Transferring call" \
" to Charlie.")
self.transfer_call()
if self.call.info().state == pj.CallState.DISCONNECTED:
LOGGER.info("Call disconnected: '%s'" % self.call)
def transfer_call(self):
"""Transfer the call."""
try:
LOGGER.info("Attempting to blind transfer the call.")
self.call.transfer(URI[2])
except:
LOGGER.warn("Failed to transfer the call! Retrying...")
reactor.callLater(.2, self.transfer_call)
def on_transfer_status(self, code, reason, final, cont):
log_call_info(self.call.info())
if code == 200 and reason == "OK" and final == 1 and cont == 0:
LOGGER.info("Transfer target answered the call.")
LOGGER.debug("Call uri: '%s'; remote uri: '%s'" %
(self.call.info().uri, self.call.info().remote_uri))
LOGGER.info("Hanging up Alice")
self.call.hangup(code=200, reason="Q.850;cause=16")
return cont
class BobPhoneCallCallback(pj.CallCallback):
"""Derived callback class for Bob's call."""
def __init__(self, call=None):
pj.CallCallback.__init__(self, call)
def on_state(self):
log_call_info(self.call.info())
if self.call.info().state == pj.CallState.DISCONNECTED:
LOGGER.info("Call disconnected: '%s'" % self.call)
class CharliePhoneCallCallback(pj.CallCallback):
"""Derived callback class for Charlie's call."""
def __init__(self, call=None):
pj.CallCallback.__init__(self, call)
def on_state(self):
log_call_info(self.call.info())
if self.call.info().state == pj.CallState.DISCONNECTED:
LOGGER.info("Call disconnected: '%s'" % self.call)
class AMICallback(object):
"""Class to set up callbacks and place calls."""
def __init__(self, test_object, accounts):
self.test_object = test_object
self.ami = self.test_object.ami[0]
self.ami.registerEvent('Hangup', self.hangup_event_handler)
self.alice = accounts.get('alice')
bob = accounts.get('bob')
charlie = accounts.get('charlie')
self.bob_cb = BobCallback()
self.charlie_cb = CharlieCallback(self)
bob.account.set_callback(self.bob_cb)
charlie.account.set_callback(self.charlie_cb)
self.channels_hungup = 0
def hangup_event_handler(self, ami, event):
"""AMI hang up event callback."""
global ITERATION
LOGGER.debug("Hangup detected for channel '%s'" % event['channel'])
self.channels_hungup += 1
if self.channels_hungup == 3 and ITERATION == 0:
LOGGER.info("Starting second iteration.")
self.channels_hungup = 0
ITERATION += 1
lock = self.alice.pj_lib.auto_lock()
self.make_call(self.alice.account, URI[1])
del lock
elif self.channels_hungup == 3 and ITERATION == 1:
self.test_object.stop_reactor()
def make_call(self, acc, uri):
"""Place a call.
Keyword Arguments:
acc The pjsua to make the call from
uri The URI to dial
"""
try:
LOGGER.info("Making call to '%s'" % uri)
acc.make_call(uri, cb=AlicePhoneCallCallback())
except pj.Error, err:
LOGGER.error("Exception: %s" % str(err))
def log_call_info(call_info):
"""Log call info."""
LOGGER.debug("Call '%s' <-> '%s'" % (call_info.uri, call_info.remote_uri))
LOGGER.debug("Call state: '%s'; last code: '%s'; last reason: '%s'" %
(call_info.state_text, call_info.last_code, call_info.last_reason))
def transfer(test_object, accounts):
"""The test's callback method.
Keyword Arguments:
test_object The test object
accounts Configured accounts
"""
LOGGER.info("Starting first iteration.")
alice = accounts.get('alice')
obj = AMICallback(test_object, accounts)
lock = alice.pj_lib.auto_lock()
obj.make_call(accounts['alice'].account, URI[0])
del lock
# vim:sw=4:ts=4:expandtab:textwidth=79
| gpl-2.0 |
tarballs-are-good/sympy | sympy/physics/quantum/innerproduct.py | 8 | 3833 | """Symbolic inner product."""
from sympy import Expr, conjugate
from sympy.printing.pretty.stringpict import prettyForm
from sympy.physics.quantum.dagger import Dagger
from sympy.physics.quantum.state import KetBase, BraBase, _lbracket
__all__ = [
'InnerProduct'
]
# InnerProduct is not an QExpr because it is really just a regular commutative
# number. We have gone back and forth about this, but we gain a lot by having
# it subclass Expr. The main challenges were getting Dagger to work
# (we use _eval_conjugate) and represent (we can use atoms and subs). Having
# it be an Expr, mean that there are no commutative QExpr subclasses,
# which simplifies the design of everything.
class InnerProduct(Expr):
"""An unevaluated inner product between a Bra and a Ket.
Parameters
==========
bra : BraBase or subclass
The bra on the left side of the inner product.
ket : KetBase or subclass
The ket on the right side of the inner product.
Examples
========
Create an InnerProduct and check its properties:
>>> from sympy.physics.quantum import Bra, Ket, InnerProduct
>>> b = Bra('b')
>>> k = Ket('k')
>>> ip = b*k
>>> ip
<b|k>
>>> ip.bra
<b|
>>> ip.ket
|k>
In simple products of kets and bras inner products will be automatically
identified and created::
>>> b*k
<b|k>
But in more complex expressions, there is ambiguity in whether inner or
outer products should be created::
>>> k*b*k*b
|k><b|*|k>*<b|
A user can force the creation of a inner products in a complex expression
by using parentheses to group the bra and ket::
>>> k*(b*k)*b
<b|k>*|k>*<b|
Notice how the inner product <b|k> moved to the left of the expression
because inner products are commutative complex numbers.
References
==========
http://en.wikipedia.org/wiki/Inner_product
"""
def __new__(cls, bra, ket, **old_assumptions):
if not isinstance(ket, KetBase):
raise TypeError('KetBase subclass expected, got: %r' % ket)
if not isinstance(bra, BraBase):
raise TypeError('BraBase subclass expected, got: %r' % ket)
obj = Expr.__new__(cls, *(bra, ket), **{'commutative':True})
return obj
@property
def bra(self):
return self.args[0]
@property
def ket(self):
return self.args[1]
def _eval_dagger(self):
return InnerProduct(Dagger(self.ket), Dagger(self.bra))
def _eval_conjugate(self):
return self._eval_dagger()
def _sympyrepr(self, printer, *args):
return '%s(%s,%s)' % (self.__class__.__name__,
printer._print(self.bra, *args), printer._print(self.ket, *args))
def _sympystr(self, printer, *args):
sbra = str(self.bra)
sket = str(self.ket)
return '%s|%s' % (sbra[:-1], sket[1:])
def _pretty(self, printer, *args):
pform = prettyForm(_lbracket)
pform = prettyForm(*pform.right(self.bra._print_label_pretty(printer, *args)))
return prettyForm(*pform.right(self.ket._pretty(printer, *args)))
def _latex(self, printer, *args):
bra_label = self.bra._print_label_latex(printer, *args)
ket = printer._print(self.ket, *args)
return r'\left\langle %s \right. %s' % (bra_label, ket)
def doit(self, **hints):
try:
r = self.ket._eval_innerproduct(self.bra, **hints)
except NotImplementedError:
try:
r = conjugate(
self.bra.dual._eval_innerproduct(self.ket.dual, **hints)
)
except NotImplementedError:
r = None
if r is not None:
return r
return self
| bsd-3-clause |
superdesk/superdesk-aap | scripts/update-cores-references.py | 2 | 1935 | #!/usr/bin/env python3
import feedparser
import fileinput
import re
import sys
import getopt
TO_BE_UPDATED = [
# superdesk-core
{
'feed_url': 'https://github.com/superdesk/superdesk-core/commits/',
'file_name': 'server/requirements.txt',
'pattern': 'superdesk-core.git@([a-f0-9]*)'
},
# superdesk-client-core
{
'feed_url': 'https://github.com/superdesk/superdesk-client-core/commits/',
'file_name': 'client/package.json',
'pattern': 'superdesk-client-core#([a-f0-9]*)'
}
]
def get_last_commit(url):
feed = feedparser.parse(url)
return feed['entries'][0]['id'].split('/')[1][:9]
def replace_in_file(filename, search, new_value):
textfile = open(filename, 'r')
filetext = textfile.read()
textfile.close()
matches = re.findall(search, filetext)
with fileinput.FileInput(filename, inplace=True) as file:
for line in file:
print(line.replace(matches[0], new_value), end='')
def get_branch():
branch = ''
try:
opts, args = getopt.getopt(sys.argv[1:], 'hb:', ['branch='])
except getopt.GetoptError:
print('usage: {} -b <branch_name>'.format(sys.argv[0]))
return
for opt, arg in opts:
if opt == '-h' or opt not in ('-b', '--branch'):
print('usage: {} -b <branch_name>'.format(sys.argv[0]))
elif opt in ('-b', '--branch'):
branch = arg
return branch
if __name__ == '__main__':
branch = get_branch()
if not branch:
print('usage: {} -b <branch_name>'.format(sys.argv[0]))
sys.exit(2)
print('modiying files.......')
for repo in TO_BE_UPDATED:
last_commit_hash = get_last_commit(repo['feed_url'] + '{}.atom'.format(branch))
replace_in_file(repo['file_name'], repo['pattern'], last_commit_hash)
print('modified file: {}'.format(repo['file_name']))
print('all files modified')
| agpl-3.0 |
jcoady9/python-for-android | python3-alpha/python3-src/Lib/distutils/tests/test_file_util.py | 65 | 1908 | """Tests for distutils.file_util."""
import unittest
import os
import shutil
from distutils.file_util import move_file
from distutils import log
from distutils.tests import support
from test.support import run_unittest
class FileUtilTestCase(support.TempdirManager, unittest.TestCase):
def _log(self, msg, *args):
if len(args) > 0:
self._logs.append(msg % args)
else:
self._logs.append(msg)
def setUp(self):
super(FileUtilTestCase, self).setUp()
self._logs = []
self.old_log = log.info
log.info = self._log
tmp_dir = self.mkdtemp()
self.source = os.path.join(tmp_dir, 'f1')
self.target = os.path.join(tmp_dir, 'f2')
self.target_dir = os.path.join(tmp_dir, 'd1')
def tearDown(self):
log.info = self.old_log
super(FileUtilTestCase, self).tearDown()
def test_move_file_verbosity(self):
f = open(self.source, 'w')
try:
f.write('some content')
finally:
f.close()
move_file(self.source, self.target, verbose=0)
wanted = []
self.assertEqual(self._logs, wanted)
# back to original state
move_file(self.target, self.source, verbose=0)
move_file(self.source, self.target, verbose=1)
wanted = ['moving %s -> %s' % (self.source, self.target)]
self.assertEqual(self._logs, wanted)
# back to original state
move_file(self.target, self.source, verbose=0)
self._logs = []
# now the target is a dir
os.mkdir(self.target_dir)
move_file(self.source, self.target_dir, verbose=1)
wanted = ['moving %s -> %s' % (self.source, self.target_dir)]
self.assertEqual(self._logs, wanted)
def test_suite():
return unittest.makeSuite(FileUtilTestCase)
if __name__ == "__main__":
run_unittest(test_suite())
| apache-2.0 |
khwilson/PynamoDB | examples/model.py | 3 | 5905 | """
An example using Amazon's Thread example for motivation
http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/SampleTablesAndData.html
"""
from __future__ import print_function
import logging
from pynamodb.models import Model
from pynamodb.attributes import (
UnicodeAttribute, NumberAttribute, UnicodeSetAttribute, UTCDateTimeAttribute
)
from datetime import datetime
logging.basicConfig()
log = logging.getLogger("pynamodb")
log.setLevel(logging.DEBUG)
log.propagate = True
class Thread(Model):
class Meta:
read_capacity_units = 1
write_capacity_units = 1
table_name = "Thread"
host = "http://localhost:8000"
forum_name = UnicodeAttribute(hash_key=True)
subject = UnicodeAttribute(range_key=True)
views = NumberAttribute(default=0)
replies = NumberAttribute(default=0)
answered = NumberAttribute(default=0)
tags = UnicodeSetAttribute()
last_post_datetime = UTCDateTimeAttribute(null=True)
# Delete the table
# print(Thread.delete_table())
# Create the table
if not Thread.exists():
Thread.create_table(wait=True)
# Create a thread
thread_item = Thread(
'Some Forum',
'Some Subject',
tags=['foo', 'bar'],
last_post_datetime=datetime.now()
)
# try:
# Thread.get('does not', 'exist')
# except Thread.DoesNotExist:
# pass
# Save the thread
thread_item.save()
# Batch write operation
with Thread.batch_write() as batch:
threads = []
for x in range(100):
thread = Thread('forum-{0}'.format(x), 'subject-{0}'.format(x))
thread.tags = ['tag1', 'tag2']
thread.last_post_datetime = datetime.now()
threads.append(thread)
for thread in threads:
batch.save(thread)
# Get table count
print(Thread.count())
# Count based on a filter
print(Thread.count('forum-1'))
# Batch get
item_keys = [('forum-{0}'.format(x), 'subject-{0}'.format(x)) for x in range(100)]
for item in Thread.batch_get(item_keys):
print(item)
# Scan
for item in Thread.scan():
print(item)
# Query
for item in Thread.query('forum-1', subject__begins_with='subject'):
print(item)
print("-"*80)
# A model that uses aliased attribute names
class AliasedModel(Model):
class Meta:
table_name = "AliasedModel"
host = "http://localhost:8000"
forum_name = UnicodeAttribute(hash_key=True, attr_name='fn')
subject = UnicodeAttribute(range_key=True, attr_name='s')
views = NumberAttribute(default=0, attr_name='v')
replies = NumberAttribute(default=0, attr_name='rp')
answered = NumberAttribute(default=0, attr_name='an')
tags = UnicodeSetAttribute(attr_name='t')
last_post_datetime = UTCDateTimeAttribute(attr_name='lp')
if not AliasedModel.exists():
AliasedModel.create_table(read_capacity_units=1, write_capacity_units=1, wait=True)
# Create a thread
thread_item = AliasedModel(
'Some Forum',
'Some Subject',
tags=['foo', 'bar'],
last_post_datetime=datetime.now()
)
# Save the thread
thread_item.save()
# Batch write operation
with AliasedModel.batch_write() as batch:
threads = []
for x in range(100):
thread = AliasedModel('forum-{0}'.format(x), 'subject-{0}'.format(x))
thread.tags = ['tag1', 'tag2']
thread.last_post_datetime = datetime.now()
threads.append(thread)
for thread in threads:
batch.save(thread)
# Batch get
item_keys = [('forum-{0}'.format(x), 'subject-{0}'.format(x)) for x in range(100)]
for item in AliasedModel.batch_get(item_keys):
print("Batch get item: {0}".format(item))
# Scan
for item in AliasedModel.scan():
print("Scanned item: {0}".format(item))
# Query
for item in AliasedModel.query('forum-1', subject__begins_with='subject'):
print("Query using aliased attribute: {0}".format(item))
# Query on non key attributes
for item in Thread.query('forum-1', views__eq=0):
print("Query result: {0}".format(item))
# Query with conditional operators
for item in Thread.query('forum-1', views__eq=0, replies__eq=0, conditional_operator='OR'):
print("Query result: {0}".format(item))
# Scan with filters
for item in Thread.scan(subject__begins_with='subject', views__ge=0, conditional_operator='AND'):
print("Scanned item: {0} {1}".format(item.subject, item.views))
# Scan with null filter
for item in Thread.scan(subject__begins_with='subject', last_post_datetime__null=True):
print("Scanned item: {0} {1}".format(item.subject, item.views))
# Conditionally save an item
thread_item = Thread(
'Some Forum',
'Some Subject',
tags=['foo', 'bar'],
last_post_datetime=datetime.now()
)
# DynamoDB will only save the item if forum_name exists and is not null
print(thread_item.save(forum_name__null=False))
# DynamoDB will update the item, by adding 1 to the views attribute,
# if the forum_name attribute equals 'Some Forum' or the subject attribute is not null
print(thread_item.update_item(
'views',
1,
action='add',
conditional_operator='or',
forum_name__eq='Some Forum',
subject__null=False)
)
# DynamoDB will delete the item, only if the views attribute is equal to one
print(thread_item.delete(views__eq=1))
# Delete an item's attribute
print(thread_item.update_item(
'tags',
action='delete'
))
# Backup/restore example
# Print the size of the table
print("Table size: {}".format(Thread.describe_table().get('ItemCount')))
# Dump the entire table to a file
Thread.dump('thread.json')
# Optionally Delete all table items
# Commented out for safety
# for item in Thread.scan():
# item.delete()
print("Table size: {}".format(Thread.describe_table().get('ItemCount')))
# Restore table from a file
Thread.load('thread.json')
print("Table size: {}".format(Thread.describe_table().get('ItemCount')))
# Dump the entire table to a string
serialized = Thread.dumps()
# Load the entire table from a string
Thread.loads(serialized)
| mit |
dbarbier/privot | python/test/t_SquareMatrixLapack_std.py | 1 | 1446 | #! /usr/bin/env python
from openturns import *
TESTPREAMBLE()
try :
matrix1 = SquareMatrix(2)
matrix1.setName("matrix1")
matrix1[0, 0] = 1.0
matrix1[1, 0] = 2.0
matrix1[0, 1] = 5.0
matrix1[1, 1] = 12.0
print "matrix1 = ", matrix1
pt = NumericalPoint()
pt.add(5.0)
pt.add(0.0)
print "pt = ", pt
result = matrix1.solveLinearSystem(pt)
print "result = ", result
determinant = matrix1.computeDeterminant()
print "determinant = ", determinant
ev = matrix1.computeEigenValues()
print "ev = ", ev
evect = SquareComplexMatrix(2)
ev = matrix1.computeEigenValues(evect)
print "ev=", ev
print "evect=", evect
print "evect="
print evect.__str__()
# Check the high dimension determinant computation
matrix2 = SquareMatrix(3)
matrix2[0, 0] = 1.0
matrix2[0, 1] = 2.0
matrix2[0, 2] = 3.0
matrix2[1, 0] = -1.5
matrix2[1, 1] = 2.5
matrix2[1, 2] = -3.5
matrix2[2, 0] = 1.5
matrix2[2, 1] = -3.5
matrix2[2, 2] = 2.5
print "matrix2="
print matrix2.__str__()
# Need a specific Python wrapping, e.g returning both value and sign
# sign = 0.0
# value = matrix2.computeLogAbsoluteDeterminant(sign)
# print "log(|det|)=", value, ", sign=", sign
value = matrix2.computeDeterminant()
print "det=", value
except :
import sys
print "t_SquareMatrixLapack_std.py", sys.exc_type, sys.exc_value
| lgpl-3.0 |
verma-varsha/zulip | analytics/lib/time_utils.py | 17 | 1174 | from zerver.lib.timestamp import floor_to_hour, floor_to_day, timestamp_to_datetime
from analytics.lib.counts import CountStat
from datetime import datetime, timedelta
from typing import List, Optional
# If min_length is None, returns end_times from ceiling(start) to floor(end), inclusive.
# If min_length is greater than 0, pads the list to the left.
# So informally, time_range(Sep 20, Sep 22, day, None) returns [Sep 20, Sep 21, Sep 22],
# and time_range(Sep 20, Sep 22, day, 5) returns [Sep 18, Sep 19, Sep 20, Sep 21, Sep 22]
def time_range(start, end, frequency, min_length):
# type: (datetime, datetime, str, Optional[int]) -> List[datetime]
if frequency == CountStat.HOUR:
end = floor_to_hour(end)
step = timedelta(hours=1)
elif frequency == CountStat.DAY:
end = floor_to_day(end)
step = timedelta(days=1)
else:
raise AssertionError("Unknown frequency: %s" % (frequency,))
times = []
if min_length is not None:
start = min(start, end - (min_length-1)*step)
current = end
while current >= start:
times.append(current)
current -= step
return list(reversed(times))
| apache-2.0 |
zhjunlang/kbengine | kbe/src/lib/python/Lib/genericpath.py | 106 | 3882 | """
Path operations common to more than one OS
Do not use directly. The OS specific modules import the appropriate
functions from this module themselves.
"""
import os
import stat
__all__ = ['commonprefix', 'exists', 'getatime', 'getctime', 'getmtime',
'getsize', 'isdir', 'isfile', 'samefile', 'sameopenfile',
'samestat']
# Does a path exist?
# This is false for dangling symbolic links on systems that support them.
def exists(path):
"""Test whether a path exists. Returns False for broken symbolic links"""
try:
os.stat(path)
except OSError:
return False
return True
# This follows symbolic links, so both islink() and isdir() can be true
# for the same path on systems that support symlinks
def isfile(path):
"""Test whether a path is a regular file"""
try:
st = os.stat(path)
except OSError:
return False
return stat.S_ISREG(st.st_mode)
# Is a path a directory?
# This follows symbolic links, so both islink() and isdir()
# can be true for the same path on systems that support symlinks
def isdir(s):
"""Return true if the pathname refers to an existing directory."""
try:
st = os.stat(s)
except OSError:
return False
return stat.S_ISDIR(st.st_mode)
def getsize(filename):
"""Return the size of a file, reported by os.stat()."""
return os.stat(filename).st_size
def getmtime(filename):
"""Return the last modification time of a file, reported by os.stat()."""
return os.stat(filename).st_mtime
def getatime(filename):
"""Return the last access time of a file, reported by os.stat()."""
return os.stat(filename).st_atime
def getctime(filename):
"""Return the metadata change time of a file, reported by os.stat()."""
return os.stat(filename).st_ctime
# Return the longest prefix of all list elements.
def commonprefix(m):
"Given a list of pathnames, returns the longest common leading component"
if not m: return ''
s1 = min(m)
s2 = max(m)
for i, c in enumerate(s1):
if c != s2[i]:
return s1[:i]
return s1
# Are two stat buffers (obtained from stat, fstat or lstat)
# describing the same file?
def samestat(s1, s2):
"""Test whether two stat buffers reference the same file"""
return (s1.st_ino == s2.st_ino and
s1.st_dev == s2.st_dev)
# Are two filenames really pointing to the same file?
def samefile(f1, f2):
"""Test whether two pathnames reference the same actual file"""
s1 = os.stat(f1)
s2 = os.stat(f2)
return samestat(s1, s2)
# Are two open files really referencing the same file?
# (Not necessarily the same file descriptor!)
def sameopenfile(fp1, fp2):
"""Test whether two open file objects reference the same file"""
s1 = os.fstat(fp1)
s2 = os.fstat(fp2)
return samestat(s1, s2)
# Split a path in root and extension.
# The extension is everything starting at the last dot in the last
# pathname component; the root is everything before that.
# It is always true that root + ext == p.
# Generic implementation of splitext, to be parametrized with
# the separators
def _splitext(p, sep, altsep, extsep):
"""Split the extension from a pathname.
Extension is everything from the last dot to the end, ignoring
leading dots. Returns "(root, ext)"; ext may be empty."""
# NOTE: This code must work for text and bytes strings.
sepIndex = p.rfind(sep)
if altsep:
altsepIndex = p.rfind(altsep)
sepIndex = max(sepIndex, altsepIndex)
dotIndex = p.rfind(extsep)
if dotIndex > sepIndex:
# skip all leading dots
filenameIndex = sepIndex + 1
while filenameIndex < dotIndex:
if p[filenameIndex:filenameIndex+1] != extsep:
return p[:dotIndex], p[dotIndex:]
filenameIndex += 1
return p, p[:0]
| lgpl-3.0 |
mbauskar/phrerp | erpnext/setup/page/setup_wizard/setup_wizard.py | 10 | 13323 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe, json
from frappe.utils import cstr, flt, getdate
from frappe import _
from frappe.utils.file_manager import save_file
from frappe.translate import set_default_language, get_dict, get_lang_dict, send_translations
from frappe.country_info import get_country_info
from frappe.utils.nestedset import get_root_of
from default_website import website_maker
import install_fixtures
@frappe.whitelist()
def setup_account(args=None):
try:
frappe.clear_cache()
if frappe.db.sql("select name from tabCompany"):
frappe.throw(_("Setup Already Complete!!"))
if not args:
args = frappe.local.form_dict
if isinstance(args, basestring):
args = json.loads(args)
args = frappe._dict(args)
if args.language != "english":
set_default_language(args.language)
install_fixtures.install(args.get("country"))
update_user_name(args)
frappe.local.message_log = []
create_fiscal_year_and_company(args)
frappe.local.message_log = []
set_defaults(args)
frappe.local.message_log = []
create_territories()
frappe.local.message_log = []
create_price_lists(args)
frappe.local.message_log = []
create_feed_and_todo()
frappe.local.message_log = []
create_email_digest()
frappe.local.message_log = []
create_letter_head(args)
frappe.local.message_log = []
create_taxes(args)
frappe.local.message_log = []
create_items(args)
frappe.local.message_log = []
create_customers(args)
frappe.local.message_log = []
create_suppliers(args)
frappe.local.message_log = []
frappe.db.set_default('desktop:home_page', 'desktop')
website_maker(args.company_name.strip(), args.company_tagline, args.name)
create_logo(args)
frappe.clear_cache()
frappe.db.commit()
except:
if args:
traceback = frappe.get_traceback()
for hook in frappe.get_hooks("setup_wizard_exception"):
frappe.get_attr(hook)(traceback, args)
raise
else:
for hook in frappe.get_hooks("setup_wizard_success"):
frappe.get_attr(hook)(args)
def update_user_name(args):
if args.get("email"):
args['name'] = args.get("email")
frappe.flags.mute_emails = True
frappe.get_doc({
"doctype":"User",
"email": args.get("email"),
"first_name": args.get("first_name"),
"last_name": args.get("last_name")
}).insert()
frappe.flags.mute_emails = False
from frappe.auth import _update_password
_update_password(args.get("email"), args.get("password"))
else:
args['name'] = frappe.session.user
# Update User
if not args.get('last_name') or args.get('last_name')=='None':
args['last_name'] = None
frappe.db.sql("""update `tabUser` SET first_name=%(first_name)s,
last_name=%(last_name)s WHERE name=%(name)s""", args)
if args.get("attach_user"):
attach_user = args.get("attach_user").split(",")
if len(attach_user)==3:
filename, filetype, content = attach_user
fileurl = save_file(filename, content, "User", args.get("name"), decode=True).file_url
frappe.db.set_value("User", args.get("name"), "user_image", fileurl)
add_all_roles_to(args.get("name"))
def create_fiscal_year_and_company(args):
curr_fiscal_year = get_fy_details(args.get('fy_start_date'), args.get('fy_end_date'))
frappe.get_doc({
"doctype":"Fiscal Year",
'year': curr_fiscal_year,
'year_start_date': args.get('fy_start_date'),
'year_end_date': args.get('fy_end_date'),
}).insert()
# Company
frappe.get_doc({
"doctype":"Company",
'domain': args.get("industry"),
'company_name':args.get('company_name').strip(),
'abbr':args.get('company_abbr'),
'default_currency':args.get('currency'),
'country': args.get('country'),
'chart_of_accounts': args.get(('chart_of_accounts')),
}).insert()
args["curr_fiscal_year"] = curr_fiscal_year
def create_price_lists(args):
for pl_type, pl_name in (("Selling", _("Standard Selling")), ("Buying", _("Standard Buying"))):
frappe.get_doc({
"doctype": "Price List",
"price_list_name": pl_name,
"enabled": 1,
"buying": 1 if pl_type == "Buying" else 0,
"selling": 1 if pl_type == "Selling" else 0,
"currency": args["currency"],
"valid_for_territories": [{
"territory": get_root_of("Territory")
}]
}).insert()
def set_defaults(args):
# enable default currency
frappe.db.set_value("Currency", args.get("currency"), "enabled", 1)
global_defaults = frappe.get_doc("Global Defaults", "Global Defaults")
global_defaults.update({
'current_fiscal_year': args.curr_fiscal_year,
'default_currency': args.get('currency'),
'default_company':args.get('company_name').strip(),
"country": args.get("country"),
})
global_defaults.save()
number_format = get_country_info(args.get("country")).get("number_format", "#,###.##")
# replace these as float number formats, as they have 0 precision
# and are currency number formats and not for floats
if number_format=="#.###":
number_format = "#.###,##"
elif number_format=="#,###":
number_format = "#,###.##"
system_settings = frappe.get_doc("System Settings", "System Settings")
system_settings.update({
"language": args.get("language"),
"time_zone": args.get("timezone"),
"float_precision": 3,
'date_format': frappe.db.get_value("Country", args.get("country"), "date_format"),
'number_format': number_format,
'enable_scheduler': 1
})
system_settings.save()
accounts_settings = frappe.get_doc("Accounts Settings")
accounts_settings.auto_accounting_for_stock = 1
accounts_settings.save()
stock_settings = frappe.get_doc("Stock Settings")
stock_settings.item_naming_by = "Item Code"
stock_settings.valuation_method = "FIFO"
stock_settings.stock_uom = _("Nos")
stock_settings.auto_indent = 1
stock_settings.save()
selling_settings = frappe.get_doc("Selling Settings")
selling_settings.cust_master_name = "Customer Name"
selling_settings.so_required = "No"
selling_settings.dn_required = "No"
selling_settings.save()
buying_settings = frappe.get_doc("Buying Settings")
buying_settings.supp_master_name = "Supplier Name"
buying_settings.po_required = "No"
buying_settings.pr_required = "No"
buying_settings.maintain_same_rate = 1
buying_settings.save()
notification_control = frappe.get_doc("Notification Control")
notification_control.quotation = 1
notification_control.sales_invoice = 1
notification_control.purchase_order = 1
notification_control.save()
hr_settings = frappe.get_doc("HR Settings")
hr_settings.emp_created_by = "Naming Series"
hr_settings.save()
def create_feed_and_todo():
"""update activty feed and create todo for creation of item, customer, vendor"""
from erpnext.home import make_feed
make_feed('Comment', 'ToDo', '', frappe.session['user'],
'ERNext Setup Complete!', '#6B24B3')
def create_email_digest():
from frappe.utils.user import get_system_managers
system_managers = get_system_managers(only_name=True)
if not system_managers:
return
companies = frappe.db.sql_list("select name FROM `tabCompany`")
for company in companies:
if not frappe.db.exists("Email Digest", "Default Weekly Digest - " + company):
edigest = frappe.get_doc({
"doctype": "Email Digest",
"name": "Default Weekly Digest - " + company,
"company": company,
"frequency": "Weekly",
"recipient_list": "\n".join(system_managers)
})
for fieldname in edigest.meta.get("fields", {"fieldtype": "Check"}):
if fieldname != "scheduler_errors":
edigest.set(fieldname, 1)
edigest.insert()
# scheduler errors digest
if companies:
edigest = frappe.new_doc("Email Digest")
edigest.update({
"name": "Scheduler Errors",
"company": companies[0],
"frequency": "Daily",
"recipient_list": "\n".join(system_managers),
"scheduler_errors": 1,
"enabled": 1
})
edigest.insert()
def get_fy_details(fy_start_date, fy_end_date):
start_year = getdate(fy_start_date).year
if start_year == getdate(fy_end_date).year:
fy = cstr(start_year)
else:
fy = cstr(start_year) + '-' + cstr(start_year + 1)
return fy
def create_taxes(args):
for i in xrange(1,6):
if args.get("tax_" + str(i)):
# replace % in case someone also enters the % symbol
tax_rate = (args.get("tax_rate_" + str(i)) or "").replace("%", "")
try:
frappe.get_doc({
"doctype":"Account",
"company": args.get("company_name").strip(),
"parent_account": _("Duties and Taxes") + " - " + args.get("company_abbr"),
"account_name": args.get("tax_" + str(i)),
"group_or_ledger": "Ledger",
"report_type": "Balance Sheet",
"account_type": "Tax",
"tax_rate": flt(tax_rate) if tax_rate else None
}).insert()
except frappe.NameError, e:
if e.args[2][0]==1062:
pass
else:
raise
def create_items(args):
for i in xrange(1,6):
item = args.get("item_" + str(i))
if item:
item_group = args.get("item_group_" + str(i))
is_sales_item = args.get("is_sales_item_" + str(i))
is_purchase_item = args.get("is_purchase_item_" + str(i))
is_stock_item = item_group!=_("Services")
default_warehouse = ""
if is_stock_item:
if is_sales_item:
default_warehouse = _("Finished Goods") + " - " + args.get("company_abbr")
else:
default_warehouse = _("Stores") + " - " + args.get("company_abbr")
frappe.get_doc({
"doctype":"Item",
"item_code": item,
"item_name": item,
"description": item,
"is_sales_item": "Yes" if is_sales_item else "No",
"is_purchase_item": "Yes" if is_purchase_item else "No",
"show_in_website": 1,
"is_stock_item": is_stock_item and "Yes" or "No",
"item_group": item_group,
"stock_uom": args.get("item_uom_" + str(i)),
"default_warehouse": default_warehouse
}).insert()
if args.get("item_img_" + str(i)):
item_image = args.get("item_img_" + str(i)).split(",")
if len(item_image)==3:
filename, filetype, content = item_image
fileurl = save_file(filename, content, "Item", item, decode=True).file_url
frappe.db.set_value("Item", item, "image", fileurl)
def create_customers(args):
for i in xrange(1,6):
customer = args.get("customer_" + str(i))
if customer:
frappe.get_doc({
"doctype":"Customer",
"customer_name": customer,
"customer_type": "Company",
"customer_group": _("Commercial"),
"territory": args.get("country"),
"company": args.get("company_name").strip()
}).insert()
if args.get("customer_contact_" + str(i)):
contact = args.get("customer_contact_" + str(i)).split(" ")
frappe.get_doc({
"doctype":"Contact",
"customer": customer,
"first_name":contact[0],
"last_name": len(contact) > 1 and contact[1] or ""
}).insert()
def create_suppliers(args):
for i in xrange(1,6):
supplier = args.get("supplier_" + str(i))
if supplier:
frappe.get_doc({
"doctype":"Supplier",
"supplier_name": supplier,
"supplier_type": _("Local"),
"company": args.get("company_name").strip()
}).insert()
if args.get("supplier_contact_" + str(i)):
contact = args.get("supplier_contact_" + str(i)).split(" ")
frappe.get_doc({
"doctype":"Contact",
"supplier": supplier,
"first_name":contact[0],
"last_name": len(contact) > 1 and contact[1] or ""
}).insert()
def create_letter_head(args):
if args.get("attach_letterhead"):
frappe.get_doc({
"doctype":"Letter Head",
"letter_head_name": _("Standard"),
"is_default": 1
}).insert()
attach_letterhead = args.get("attach_letterhead").split(",")
if len(attach_letterhead)==3:
filename, filetype, content = attach_letterhead
fileurl = save_file(filename, content, "Letter Head", _("Standard"), decode=True).file_url
frappe.db.set_value("Letter Head", _("Standard"), "content", "<img src='%s' style='max-width: 100%%;'>" % fileurl)
def create_logo(args):
if args.get("attach_logo"):
attach_logo = args.get("attach_logo").split(",")
if len(attach_logo)==3:
filename, filetype, content = attach_logo
fileurl = save_file(filename, content, "Website Settings", "Website Settings",
decode=True).file_url
frappe.db.set_value("Website Settings", "Website Settings", "banner_html",
"<img src='%s' style='max-width: 100%%;'>" % fileurl)
def add_all_roles_to(name):
user = frappe.get_doc("User", name)
for role in frappe.db.sql("""select name from tabRole"""):
if role[0] not in ["Administrator", "Guest", "All", "Customer", "Supplier", "Partner", "Employee"]:
d = user.append("user_roles")
d.role = role[0]
user.save()
def create_territories():
"""create two default territories, one for home country and one named Rest of the World"""
from frappe.utils.nestedset import get_root_of
country = frappe.db.get_default("country")
root_territory = get_root_of("Territory")
for name in (country, _("Rest Of The World")):
if name and not frappe.db.exists("Territory", name):
frappe.get_doc({
"doctype": "Territory",
"territory_name": name.replace("'", ""),
"parent_territory": root_territory,
"is_group": "No"
}).insert()
@frappe.whitelist()
def load_messages(language):
frappe.clear_cache()
lang = get_lang_dict()[language]
frappe.local.lang = lang
m = get_dict("page", "setup-wizard")
m.update(get_dict("boot"))
send_translations(m)
return lang
| agpl-3.0 |
rysson/filmkodi | plugin.video.fanfilm/resources/lib/libraries/cleantitle.py | 2 | 2591 | # -*- coding: utf-8 -*-
'''
FanFilm Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,unicodedata
def movie(title):
title = re.sub('\n|([[].+?[]])|([(].+?[)])|\s(vs|v[.])\s|(:|;|-|"|,|\'|\_|\.|\?)|\s', '', title).lower()
title = title.replace(''','')
return title
def tv(title):
title = re.sub('\n|\s(|[(])(UK|US|AU|\d{4})(|[)])$|\s(vs|v[.])\s|(:|;|-|"|,|\'|\_|\.|\?)|\s', '', title).lower()
title = title.replace(''','')
return title
def get(title):
if title == None: return
title = re.sub('(&#[0-9]+)([^;^0-9]+)', '\\1;\\2', title)
title = title.replace('"', '\"').replace('&', '&')
title = re.sub('\n|([[].+?[]])|([(].+?[)])|\s(vs|v[.])\s|(:|;|-|"|,|\'|\_|\.|\?)|\s', '', title).lower()
return title
def query(title):
if title == None: return
title = title.replace('\'', '').rsplit(':', 1)[0]
return title
def query2(title):
if title == None: return
#title = title.replace('\'', '').replace('-', '')
title = title.replace('-', '')
return title
def query10(title):
if title == None: return
title = title.replace('\'', '').replace(':','').replace('.','').replace(' ','-').lower()
return title
def normalize(title):
try:
try: return title.decode('ascii').encode("utf-8")
except: pass
t = ''
for i in title:
c = unicodedata.normalize('NFKD',unicode(i,"ISO-8859-1"))
c = c.encode("ascii","ignore").strip()
if i == ' ': c = i
t += c
return t.encode("utf-8")
except:
return title
def getsearch(title):
if title is None: return
title = title.lower()
title = re.sub('&#(\d+);', '', title)
title = re.sub('(&#[0-9]+)([^;^0-9]+)', '\\1;\\2', title)
title = title.replace('"', '\"').replace('&', '&')
title = re.sub('\\\|/|-|–|:|;|\*|\?|"|\'|<|>|\|', '', title).lower()
return title | apache-2.0 |
heihachi/PokemonGo-Bot | pokemongo_bot/walkers/polyline_generator.py | 8 | 7061 | # -*- coding: utf-8 -*-
from geographiclib.geodesic import Geodesic
from itertools import chain
import math
import polyline
import requests
from geopy.distance import great_circle
def distance(point1, point2):
return Geodesic.WGS84.Inverse(point1[0], point1[1], point2[0], point2[1])["s12"] # @UndefinedVariable
class PolylineObjectHandler:
'''
Does this need to be a class?
More like a namespace...
'''
_cache = None
_instability = 0
_run = False
@staticmethod
def cached_polyline(origin, destination, google_map_api_key=None):
'''
Google API has limits, so we can't generate new Polyline at every tick...
'''
# Absolute offset between bot origin and PolyLine get_last_pos() (in meters)
if PolylineObjectHandler._cache and PolylineObjectHandler._cache.get_last_pos() != (None, None):
abs_offset = distance(origin, PolylineObjectHandler._cache.get_last_pos())
else:
abs_offset = float("inf")
is_old_cache = lambda : abs_offset > 8 # Consider cache old if we identified an offset more then 8 m
new_dest_set = lambda : tuple(destination) != PolylineObjectHandler._cache.destination
if PolylineObjectHandler._run and (not is_old_cache()):
# bot used to have struggle with making a decision.
PolylineObjectHandler._instability -= 1
if PolylineObjectHandler._instability <= 0:
PolylineObjectHandler._instability = 0
PolylineObjectHandler._run = False
pass # use current cache
elif None == PolylineObjectHandler._cache or is_old_cache() or new_dest_set():
# no cache, old cache or new destination set by bot, so make new polyline
PolylineObjectHandler._instability += 2
if 10 <= PolylineObjectHandler._instability:
PolylineObjectHandler._run = True
PolylineObjectHandler._instability = 20 # next N moves use same cache
PolylineObjectHandler._cache = Polyline(origin, destination, google_map_api_key)
else:
# valid cache found
PolylineObjectHandler._instability -= 1
PolylineObjectHandler._instability = max(PolylineObjectHandler._instability, 0)
pass # use current cache
return PolylineObjectHandler._cache
class Polyline(object):
def __init__(self, origin, destination, google_map_api_key=None):
self.origin = origin
self.destination = tuple(destination)
self.DIRECTIONS_API_URL = 'https://maps.googleapis.com/maps/api/directions/json?mode=walking'
self.DIRECTIONS_URL = '{}&origin={}&destination={}'.format(self.DIRECTIONS_API_URL,
'{},{}'.format(*self.origin),
'{},{}'.format(*self.destination))
if google_map_api_key:
self.DIRECTIONS_URL = '{}&key={}'.format(self.DIRECTIONS_URL, google_map_api_key)
self._directions_response = requests.get(self.DIRECTIONS_URL).json()
try:
self._directions_encoded_points = [x['polyline']['points'] for x in
self._directions_response['routes'][0]['legs'][0]['steps']]
except IndexError:
# This handles both cases:
# a) In case of API limit reached we get back we get a status 200 code with an empty routes []
# {u'error_message': u'You have exceeded your rate-limit for this API.',
# u'routes': [],
# u'status': u'OVER_QUERY_LIMIT'
# }
# b) In case that google does not have any directions proposals we get:
# ZERO_RESULTS {
# "geocoded_waypoints" : [ {}, {} ],
# "routes" : [],
# "status" : "ZERO_RESULTS"
# }
self._directions_encoded_points = self._directions_response['routes']
self._points = [self.origin] + self._get_directions_points() + [self.destination]
self._polyline = self._get_encoded_points()
self._last_pos = self._points[0]
self._step_dict = self._get_steps_dict()
self._step_keys = sorted(self._step_dict.keys())
self._last_step = 0
self._nr_samples = int(max(min(self.get_total_distance() / 3, 512), 2))
self.ELEVATION_API_URL = 'https://maps.googleapis.com/maps/api/elevation/json?path=enc:'
self.ELEVATION_URL = '{}{}&samples={}'.format(self.ELEVATION_API_URL,
self._polyline, self._nr_samples)
if google_map_api_key:
self.ELEVATION_URL = '{}&key={}'.format(self.ELEVATION_URL, google_map_api_key)
self._elevation_response = requests.get(self.ELEVATION_URL).json()
self._elevation_at_point = dict((tuple(x['location'].values()),
x['elevation']) for x in
self._elevation_response['results'])
def _get_directions_points(self):
points = []
for point in self._directions_encoded_points:
points += polyline.decode(point)
return [x for n, x in enumerate(points) if x not in points[:n]]
def _get_encoded_points(self):
return polyline.encode(self._points)
def _get_walk_steps(self):
if self._points:
steps = zip(chain([self._points[0]], self._points),
chain(self._points, [self._points[-1]]))
steps = filter(None, [(o, d) if o != d else None for o, d in steps])
# consume the filter as list
return list(steps)
else:
return []
def _get_steps_dict(self):
walked_distance = 0.0
steps_dict = {}
for step in self._get_walk_steps():
walked_distance += distance(*step)
steps_dict[walked_distance] = step
return steps_dict
def get_alt(self, at_point=None):
if at_point is None:
at_point = self._last_pos
if self._elevation_at_point:
elevations = sorted([(great_circle(at_point, k).meters, v, k) for k, v in self._elevation_at_point.items()])
if len(elevations) == 1:
return elevations[0][1]
else:
(distance_to_p1, ep1, p1), (distance_to_p2, ep2, p2) = elevations[:2]
distance_p1_p2 = great_circle(p1, p2).meters
return self._get_relative_hight(ep1, ep2, distance_p1_p2, distance_to_p1, distance_to_p2)
else:
return None
def _get_relative_hight(self, ep1, ep2, distance_p1_p2, distance_to_p1, distance_to_p2):
hdelta = ep2 - ep1
elevation = ((math.pow(distance_p1_p2,2) + math.pow(distance_to_p1,2) - math.pow(distance_to_p2,2)) * hdelta)/ (3 * distance_p1_p2) + ep1
return elevation
def get_total_distance(self):
return math.ceil(sum([distance(*x) for x in self._get_walk_steps()]))
def get_last_pos(self):
return self._last_pos
| mit |
geier/alot | tests/commands/global_test.py | 1 | 7054 | # encoding=utf-8
# Copyright © 2017 Dylan Baker
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Tests for global commands."""
from __future__ import absolute_import
import os
from twisted.trial import unittest
from twisted.internet.defer import inlineCallbacks
import mock
from alot.commands import globals as g_commands
class Stop(Exception):
"""exception for stopping testing of giant unmanagable functions."""
pass
class TestComposeCommand(unittest.TestCase):
"""Tests for the compose command."""
@staticmethod
def _make_envelope_mock():
envelope = mock.Mock()
envelope.headers = {'From': 'foo <[email protected]>'}
envelope.get = envelope.headers.get
envelope.sign_key = None
envelope.sign = False
return envelope
@staticmethod
def _make_account_mock(sign_by_default=True, gpg_key=mock.sentinel.gpg_key):
account = mock.Mock()
account.sign_by_default = sign_by_default
account.gpg_key = gpg_key
account.signature = None
return account
@inlineCallbacks
def test_apply_sign_by_default_okay(self):
envelope = self._make_envelope_mock()
account = self._make_account_mock()
cmd = g_commands.ComposeCommand(envelope=envelope)
# This whole mess is required becasue ComposeCommand.apply is waaaaay
# too complicated, it needs to be split into more manageable segments.
with mock.patch('alot.commands.globals.settings.get_account_by_address',
mock.Mock(return_value=account)):
with mock.patch('alot.commands.globals.settings.get_accounts',
mock.Mock(return_value=[account])):
with mock.patch('alot.commands.globals.settings.get_addressbooks',
mock.Mock(side_effect=Stop)):
try:
yield cmd.apply(mock.Mock())
except Stop:
pass
self.assertTrue(envelope.sign)
self.assertIs(envelope.sign_key, mock.sentinel.gpg_key)
@inlineCallbacks
def test_apply_sign_by_default_false_doesnt_set_key(self):
envelope = self._make_envelope_mock()
account = self._make_account_mock(sign_by_default=False)
cmd = g_commands.ComposeCommand(envelope=envelope)
# This whole mess is required becasue ComposeCommand.apply is waaaaay
# too complicated, it needs to be split into more manageable segments.
with mock.patch('alot.commands.globals.settings.get_account_by_address',
mock.Mock(return_value=account)):
with mock.patch('alot.commands.globals.settings.get_accounts',
mock.Mock(return_value=[account])):
with mock.patch('alot.commands.globals.settings.get_addressbooks',
mock.Mock(side_effect=Stop)):
try:
yield cmd.apply(mock.Mock())
except Stop:
pass
self.assertFalse(envelope.sign)
self.assertIs(envelope.sign_key, None)
@inlineCallbacks
def test_apply_sign_by_default_but_no_key(self):
envelope = self._make_envelope_mock()
account = self._make_account_mock(gpg_key=None)
cmd = g_commands.ComposeCommand(envelope=envelope)
# This whole mess is required becasue ComposeCommand.apply is waaaaay
# too complicated, it needs to be split into more manageable segments.
with mock.patch('alot.commands.globals.settings.get_account_by_address',
mock.Mock(return_value=account)):
with mock.patch('alot.commands.globals.settings.get_accounts',
mock.Mock(return_value=[account])):
with mock.patch('alot.commands.globals.settings.get_addressbooks',
mock.Mock(side_effect=Stop)):
try:
yield cmd.apply(mock.Mock())
except Stop:
pass
self.assertFalse(envelope.sign)
self.assertIs(envelope.sign_key, None)
class TestExternalCommand(unittest.TestCase):
def test_no_spawn_no_stdin_success(self):
ui = mock.Mock()
cmd = g_commands.ExternalCommand(u'true', refocus=False)
cmd.apply(ui)
ui.notify.assert_not_called()
def test_no_spawn_stdin_success(self):
ui = mock.Mock()
cmd = g_commands.ExternalCommand(u"awk '{ exit $0 }'", stdin=u'0',
refocus=False)
cmd.apply(ui)
ui.notify.assert_not_called()
def test_no_spawn_no_stdin_attached(self):
ui = mock.Mock()
cmd = g_commands.ExternalCommand(u'test -t 0', refocus=False)
cmd.apply(ui)
ui.notify.assert_not_called()
def test_no_spawn_stdin_attached(self):
ui = mock.Mock()
cmd = g_commands.ExternalCommand(u"test -t 0", stdin=u'0', refocus=False)
cmd.apply(ui)
ui.notify.assert_called_once_with('', priority='error')
def test_no_spawn_failure(self):
ui = mock.Mock()
cmd = g_commands.ExternalCommand(u'false', refocus=False)
cmd.apply(ui)
ui.notify.assert_called_once_with('', priority='error')
@mock.patch('alot.commands.globals.settings.get', mock.Mock(return_value=''))
@mock.patch.dict(os.environ, {'DISPLAY': ':0'})
def test_spawn_no_stdin_success(self):
ui = mock.Mock()
cmd = g_commands.ExternalCommand(u'true', refocus=False, spawn=True)
cmd.apply(ui)
ui.notify.assert_not_called()
@mock.patch('alot.commands.globals.settings.get', mock.Mock(return_value=''))
@mock.patch.dict(os.environ, {'DISPLAY': ':0'})
def test_spawn_stdin_success(self):
ui = mock.Mock()
cmd = g_commands.ExternalCommand(
u"awk '{ exit $0 }'",
stdin=u'0', refocus=False, spawn=True)
cmd.apply(ui)
ui.notify.assert_not_called()
@mock.patch('alot.commands.globals.settings.get', mock.Mock(return_value=''))
@mock.patch.dict(os.environ, {'DISPLAY': ':0'})
def test_spawn_failure(self):
ui = mock.Mock()
cmd = g_commands.ExternalCommand(u'false', refocus=False, spawn=True)
cmd.apply(ui)
ui.notify.assert_called_once_with('', priority='error')
| gpl-3.0 |
nirmeshk/oh-mainline | vendor/packages/python-social-auth/social/tests/backends/test_yahoo.py | 76 | 2535 | import json
import requests
from httpretty import HTTPretty
from social.p3 import urlencode
from social.tests.backends.oauth import OAuth1Test
class YahooOAuth1Test(OAuth1Test):
backend_path = 'social.backends.yahoo.YahooOAuth'
user_data_url = 'https://social.yahooapis.com/v1/user/a-guid/profile?' \
'format=json'
expected_username = 'foobar'
access_token_body = json.dumps({
'access_token': 'foobar',
'token_type': 'bearer'
})
request_token_body = urlencode({
'oauth_token_secret': 'foobar-secret',
'oauth_token': 'foobar',
'oauth_callback_confirmed': 'true'
})
guid_body = json.dumps({
'guid': {
'uri': 'https://social.yahooapis.com/v1/me/guid',
'value': 'a-guid'
}
})
user_data_body = json.dumps({
'profile': {
'bdRestricted': True,
'memberSince': '2007-12-11T14:40:30Z',
'image': {
'width': 192,
'imageUrl': 'http://l.yimg.com/dh/ap/social/profile/'
'profile_b192.png',
'size': '192x192',
'height': 192
},
'created': '2013-03-18T04:15:08Z',
'uri': 'https://social.yahooapis.com/v1/user/a-guid/profile',
'isConnected': False,
'profileUrl': 'http://profile.yahoo.com/a-guid',
'guid': 'a-guid',
'nickname': 'foobar',
'emails': [{
'handle': '[email protected]',
'id': 1,
'primary': True,
'type': 'HOME',
}, {
'handle': '[email protected]',
'id': 2,
'type': 'HOME',
}],
}
})
def test_login(self):
HTTPretty.register_uri(
HTTPretty.GET,
'https://social.yahooapis.com/v1/me/guid?format=json',
status=200,
body=self.guid_body
)
self.do_login()
def test_partial_pipeline(self):
self.do_partial_pipeline()
def test_get_user_details(self):
HTTPretty.register_uri(
HTTPretty.GET,
self.user_data_url,
status=200,
body=self.user_data_body
)
response = requests.get(self.user_data_url)
user_details = self.backend.get_user_details(
response.json()['profile']
)
self.assertEqual(user_details['email'], '[email protected]')
| agpl-3.0 |
Red680812/android_44_KitKat_kernel_htc_dlxpul-2 | tools/perf/scripts/python/syscall-counts-by-pid.py | 11180 | 1927 | # system call counts, by pid
# (c) 2010, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts-by-pid.py [comm]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
try:
syscalls[common_comm][common_pid][id] += 1
except TypeError:
syscalls[common_comm][common_pid][id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events by comm/pid:\n\n",
print "%-40s %10s\n" % ("comm [pid]/syscalls", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id, val in sorted(syscalls[comm][pid].iteritems(), \
key = lambda(k, v): (v, k), reverse = True):
print " %-38s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
creasyw/IMTAphy | documentation/doctools/tags/0.4.2/sphinx/util/console.py | 9 | 1809 | # -*- coding: utf-8 -*-
"""
sphinx.util.console
~~~~~~~~~~~~~~~~~~~
Format colored console output.
:copyright: 2007-2008 by Georg Brandl.
:license: BSD.
"""
codes = {}
def get_terminal_width():
"""Borrowed from the py lib."""
try:
import os, termios, fcntl, struct
call = fcntl.ioctl(0, termios.TIOCGWINSZ, "\000"*8)
height, width = struct.unpack("hhhh", call)[:2]
terminal_width = width
except (SystemExit, KeyboardInterrupt):
raise
except:
# FALLBACK
terminal_width = int(os.environ.get('COLUMNS', 80))-1
return terminal_width
_tw = get_terminal_width()
def print_and_backspace(text, func):
if not codes:
# if no coloring, don't output fancy backspaces
func(text)
else:
func(text.ljust(_tw) + _tw * "\b")
def nocolor():
codes.clear()
def coloron():
codes.update(_orig_codes)
def colorize(name, text):
return codes.get(name, '') + text + codes.get('reset', '')
def create_color_func(name):
def inner(text):
return colorize(name, text)
globals()[name] = inner
_attrs = {
'reset': '39;49;00m',
'bold': '01m',
'faint': '02m',
'standout': '03m',
'underline': '04m',
'blink': '05m',
}
for _name, _value in _attrs.items():
codes[_name] = '\x1b[' + _value
_colors = [
('black', 'darkgray'),
('darkred', 'red'),
('darkgreen', 'green'),
('brown', 'yellow'),
('darkblue', 'blue'),
('purple', 'fuchsia'),
('turquoise', 'teal'),
('lightgray', 'white'),
]
for i, (dark, light) in enumerate(_colors):
codes[dark] = '\x1b[%im' % (i+30)
codes[light] = '\x1b[%i;01m' % (i+30)
_orig_codes = codes.copy()
for _name in codes:
create_color_func(_name)
| gpl-2.0 |
PriceChild/ansible | lib/ansible/module_utils/junos.py | 16 | 6193 | #
# (c) 2017 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from contextlib import contextmanager
from xml.etree.ElementTree import Element, SubElement, tostring
from ansible.module_utils.basic import env_fallback
from ansible.module_utils.netconf import send_request, children
from ansible.module_utils.netconf import discard_changes, validate
from ansible.module_utils.network_common import to_list
from ansible.module_utils.six import string_types
ACTIONS = frozenset(['merge', 'override', 'replace', 'update', 'set'])
JSON_ACTIONS = frozenset(['merge', 'override', 'update'])
FORMATS = frozenset(['xml', 'text', 'json'])
CONFIG_FORMATS = frozenset(['xml', 'text', 'json', 'set'])
junos_argument_spec = {
'host': dict(),
'port': dict(type='int'),
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
'timeout': dict(type='int', default=10),
'provider': dict(type='dict', no_log=True),
'transport': dict()
}
def check_args(module, warnings):
provider = module.params['provider'] or {}
for key in junos_argument_spec:
if key in ('provider',) and module.params[key]:
warnings.append('argument %s has been deprecated and will be '
'removed in a future version' % key)
def _validate_rollback_id(value):
try:
if not 0 <= int(value) <= 49:
raise ValueError
except ValueError:
module.fail_json(msg='rollback must be between 0 and 49')
def load_configuration(module, candidate=None, action='merge', rollback=None, format='xml'):
if all((candidate is None, rollback is None)):
module.fail_json(msg='one of candidate or rollback must be specified')
elif all((candidate is not None, rollback is not None)):
module.fail_json(msg='candidate and rollback are mutually exclusive')
if format not in FORMATS:
module.fail_json(msg='invalid format specified')
if format == 'json' and action not in JSON_ACTIONS:
module.fail_json(msg='invalid action for format json')
elif format in ('text', 'xml') and action not in ACTIONS:
module.fail_json(msg='invalid action format %s' % format)
if action == 'set' and not format == 'text':
module.fail_json(msg='format must be text when action is set')
if rollback is not None:
_validate_rollback_id(rollback)
xattrs = {'rollback': str(rollback)}
else:
xattrs = {'action': action, 'format': format}
obj = Element('load-configuration', xattrs)
if candidate is not None:
lookup = {'xml': 'configuration', 'text': 'configuration-text',
'set': 'configuration-set', 'json': 'configuration-json'}
if action == 'set':
cfg = SubElement(obj, 'configuration-set')
else:
cfg = SubElement(obj, lookup[format])
if isinstance(candidate, string_types):
cfg.text = candidate
else:
cfg.append(candidate)
return send_request(module, obj)
def get_configuration(module, compare=False, format='xml', rollback='0'):
if format not in CONFIG_FORMATS:
module.fail_json(msg='invalid config format specified')
xattrs = {'format': format}
if compare:
_validate_rollback_id(rollback)
xattrs['compare'] = 'rollback'
xattrs['rollback'] = str(rollback)
return send_request(module, Element('get-configuration', xattrs))
def commit_configuration(module, confirm=False, check=False, comment=None, confirm_timeout=None):
obj = Element('commit-configuration')
if confirm:
SubElement(obj, 'confirmed')
if check:
SubElement(obj, 'check')
if comment:
subele = SubElement(obj, 'log')
subele.text = str(comment)
if confirm_timeout:
subele = SubElement(obj, 'confirm-timeout')
subele.text = int(confirm_timeout)
return send_request(module, obj)
def command(module, command, format='text', rpc_only=False):
xattrs = {'format': format}
if rpc_only:
command += ' | display xml rpc'
xattrs['format'] = 'text'
return send_request(module, Element('command', xattrs, text=command))
lock_configuration = lambda x: send_request(x, Element('lock-configuration'))
unlock_configuration = lambda x: send_request(x, Element('unlock-configuration'))
@contextmanager
def locked_config(module):
try:
lock_configuration(module)
yield
finally:
unlock_configuration(module)
def get_diff(module):
reply = get_configuration(module, compare=True, format='text')
output = reply.find('.//configuration-output')
if output is not None:
return output.text
def load_config(module, candidate, action='merge', commit=False, format='xml',
comment=None, confirm=False, confirm_timeout=None):
with locked_config(module):
if isinstance(candidate, list):
candidate = '\n'.join(candidate)
reply = load_configuration(module, candidate, action=action, format=format)
validate(module)
diff = get_diff(module)
if diff:
diff = str(diff).strip()
if commit:
commit_configuration(module, confirm=confirm, comment=comment,
confirm_timeout=confirm_timeout)
else:
discard_changes(module)
return diff
| gpl-3.0 |
DonaldTrumpHasTinyHands/tiny_hands_pac | products/models.py | 1 | 3762 | from django.db import models
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from wagtail.wagtailcore.models import Page, Orderable
from wagtail.wagtailcore.fields import RichTextField
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
from wagtail.wagtailimages.models import Image
from wagtail.wagtailadmin.edit_handlers import (
FieldPanel, MultiFieldPanel, InlinePanel
)
from wagtail.wagtailsearch import index
from wagtail.wagtailsnippets.models import register_snippet
from wagtail.wagtailsnippets.edit_handlers import SnippetChooserPanel
from modelcluster.fields import ParentalKey
from modelcluster.tags import ClusterTaggableManager
from taggit.models import Tag, TaggedItemBase
from utils.models import LinkFields, RelatedLink, CarouselItem
# Product page
class ProductIndexPageRelatedLink(Orderable, RelatedLink):
page = ParentalKey('products.ProductIndexPage', related_name='related_links')
class ProductIndexPage(Page):
subtitle = models.CharField(max_length=255, blank=True)
intro = RichTextField(blank=True)
indexed_fields = ('intro', )
@property
def products(self):
# Get list of live blog pages that are descendants of this page
products = ProductPage.objects.live().descendant_of(self)
return products
@property
def tag_list(self):
tag_ids = ProductPageTag.objects.all().values_list('tag_id', flat=True)
return Tag.objects.filter(pk__in=tag_ids)
def get_context(self, request):
# Get products
products = self.products
# Filter by tag
tag = request.GET.get('tag')
if tag:
products = products.filter(tags__name=tag)
# Pagination
page = request.GET.get('page')
paginator = Paginator(products, 12) # Show 10 products per page
try:
products = paginator.page(page)
except PageNotAnInteger:
products = paginator.page(1)
except EmptyPage:
products = paginator.page(paginator.num_pages)
# Update template context
context = super(ProductIndexPage, self).get_context(request)
context['products'] = products
return context
ProductIndexPage.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('subtitle'),
FieldPanel('intro', classname="full"),
InlinePanel('related_links', label="Related links"),
]
class ProductPageRelatedLink(Orderable, RelatedLink):
page = ParentalKey('products.ProductPage', related_name='related_links')
class ProductPageTag(TaggedItemBase):
content_object = ParentalKey(
'products.ProductPage', related_name='tagged_items'
)
def __unicode__(self):
return self.name
class ProductPage(Page):
price = models.CharField(max_length=255, blank=True)
description = RichTextField(blank=True)
tags = ClusterTaggableManager(through=ProductPageTag, blank=True)
image = models.ForeignKey(
Image,
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
feed_image = models.ForeignKey(
Image,
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
indexed_fields = ('title', 'intro', 'biography')
ProductPage.content_panels = [
FieldPanel('title', classname="title"),
FieldPanel('price', classname="full"),
FieldPanel('description', classname="full"),
ImageChooserPanel('image'),
FieldPanel('tags'),
InlinePanel('related_links', label="Related links"),
]
ProductPage.promote_panels = [
MultiFieldPanel(Page.promote_panels, "Common page configuration"),
ImageChooserPanel('feed_image'),
] | mit |
shenyy/lily2-gem5 | tests/long/se/60.bzip2/test.py | 21 | 1751 | # Copyright (c) 2006-2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Korey Sewell
m5.util.addToPath('../configs/common')
from cpu2000 import bzip2_source
workload = bzip2_source(isa, opsys, 'lgred')
root.system.cpu.workload = workload.makeLiveProcess()
| bsd-3-clause |
tmpgit/intellij-community | python/lib/Lib/site-packages/django/contrib/localflavor/ro/ro_counties.py | 428 | 1231 | # -*- coding: utf-8 -*-
"""
A list of Romanian counties as `choices` in a formfield.
This exists as a standalone file so that it's only imported into memory when
explicitly needed.
"""
COUNTIES_CHOICES = (
('AB', u'Alba'),
('AR', u'Arad'),
('AG', u'Argeş'),
('BC', u'Bacău'),
('BH', u'Bihor'),
('BN', u'Bistriţa-Năsăud'),
('BT', u'Botoşani'),
('BV', u'Braşov'),
('BR', u'Brăila'),
('B', u'Bucureşti'),
('BZ', u'Buzău'),
('CS', u'Caraş-Severin'),
('CL', u'Călăraşi'),
('CJ', u'Cluj'),
('CT', u'Constanţa'),
('CV', u'Covasna'),
('DB', u'Dâmboviţa'),
('DJ', u'Dolj'),
('GL', u'Galaţi'),
('GR', u'Giurgiu'),
('GJ', u'Gorj'),
('HR', u'Harghita'),
('HD', u'Hunedoara'),
('IL', u'Ialomiţa'),
('IS', u'Iaşi'),
('IF', u'Ilfov'),
('MM', u'Maramureş'),
('MH', u'Mehedinţi'),
('MS', u'Mureş'),
('NT', u'Neamţ'),
('OT', u'Olt'),
('PH', u'Prahova'),
('SM', u'Satu Mare'),
('SJ', u'Sălaj'),
('SB', u'Sibiu'),
('SV', u'Suceava'),
('TR', u'Teleorman'),
('TM', u'Timiş'),
('TL', u'Tulcea'),
('VS', u'Vaslui'),
('VL', u'Vâlcea'),
('VN', u'Vrancea'),
)
| apache-2.0 |
pfnet/chainer | tests/chainermn_tests/iterators_tests/test_iterator_compatibility.py | 8 | 5655 | # This test is based on Chainer's iterator compatibility test.
# The major changed point is that we do not test
# the order SerialIterator -> MultiNodeIterator,
# because slave iterator must synchronize the batch order with master
# thus should not accept overwriting the batch order by serialization.
# See: chainer/tests/chainer_tests/
# iterators_tests/test_iterator_compatibility.py (7e8f6cc)
import numpy
import platform
import pytest
import unittest
import chainer
import chainer.testing
import chainermn
class DummySerializer(chainer.serializer.Serializer):
def __init__(self, target):
super(DummySerializer, self).__init__()
self.target = target
def __getitem__(self, key):
raise NotImplementedError
def __call__(self, key, value):
self.target[key] = value
return self.target[key]
class DummyDeserializer(chainer.serializer.Deserializer):
def __init__(self, target):
super(DummyDeserializer, self).__init__()
self.target = target
def __getitem__(self, key):
raise NotImplementedError
def __call__(self, key, value):
if value is None:
value = self.target[key]
elif isinstance(value, numpy.ndarray):
numpy.copyto(value, self.target[key])
else:
value = type(value)(numpy.asarray(self.target[key]))
return value
@chainer.testing.parameterize(*chainer.testing.product({
'iterator_class': [
chainer.iterators.SerialIterator,
chainer.iterators.MultiprocessIterator,
],
}))
class TestIteratorCompatibility(unittest.TestCase):
def setUp(self):
if self.iterator_class == chainer.iterators.MultiprocessIterator and \
int(platform.python_version_tuple()[0]) < 3:
pytest.skip('This test requires Python version >= 3')
self.communicator = chainermn.create_communicator('naive')
if self.communicator.size < 2:
pytest.skip('This test is for multinode only')
self.N = 6
self.dataset = numpy.arange(self.N).astype(numpy.float32)
self.bs = 2
def test_multi_node_iterator_compatibility(self):
iters = (
lambda: chainermn.iterators.create_multi_node_iterator(
self.iterator_class(
self.dataset, batch_size=self.bs),
self.communicator),
lambda: self.iterator_class(
self.dataset, batch_size=self.bs),
)
bs_n_ratio = 1. * self.bs / self.N
it_before, it_after = iters
it = it_before()
self.assertEqual(it.epoch, 0)
self.assertAlmostEqual(it.epoch_detail, 0 * bs_n_ratio)
batch1 = it.next()
self.assertEqual(len(batch1), self.bs)
self.assertIsInstance(batch1, list)
self.assertFalse(it.is_new_epoch)
self.assertAlmostEqual(it.epoch_detail, 1 * bs_n_ratio)
batch2 = it.next()
self.assertEqual(len(batch2), self.bs)
self.assertIsInstance(batch2, list)
self.assertFalse(it.is_new_epoch)
self.assertAlmostEqual(it.epoch_detail, 2 * bs_n_ratio)
target = dict()
it.serialize(DummySerializer(target))
it = it_after()
it.serialize(DummyDeserializer(target))
self.assertFalse(it.is_new_epoch)
self.assertAlmostEqual(it.epoch_detail, 2 * bs_n_ratio)
batch3 = it.next()
self.assertEqual(len(batch3), self.bs)
self.assertIsInstance(batch3, list)
self.assertTrue(it.is_new_epoch)
self.assertEqual(
sorted(batch1 + batch2 + batch3),
self.dataset.tolist())
self.assertAlmostEqual(it.epoch_detail, 3 * bs_n_ratio)
def test_synchronized_iterator_compatibility(self):
"""
Do not use `chainer.testing.parameterize` to share the code with
`test_multi_node_iterator_compatibility` because pytest cannot
guarantee the execution order of tests produced by `parameterize`,
which causes unexpected behaviors with MPI programs.
"""
iters = (
lambda: chainermn.iterators.create_synchronized_iterator(
self.iterator_class(
self.dataset, batch_size=self.bs),
self.communicator),
lambda: self.iterator_class(
self.dataset, batch_size=self.bs),
)
bs_n_ratio = 1. * self.bs / self.N
it_before, it_after = iters
it = it_before()
self.assertEqual(it.epoch, 0)
self.assertAlmostEqual(it.epoch_detail, 0 * bs_n_ratio)
batch1 = it.next()
self.assertEqual(len(batch1), self.bs)
self.assertIsInstance(batch1, list)
self.assertFalse(it.is_new_epoch)
self.assertAlmostEqual(it.epoch_detail, 1 * bs_n_ratio)
batch2 = it.next()
self.assertEqual(len(batch2), self.bs)
self.assertIsInstance(batch2, list)
self.assertFalse(it.is_new_epoch)
self.assertAlmostEqual(it.epoch_detail, 2 * bs_n_ratio)
target = dict()
it.serialize(DummySerializer(target))
it = it_after()
it.serialize(DummyDeserializer(target))
self.assertFalse(it.is_new_epoch)
self.assertAlmostEqual(it.epoch_detail, 2 * bs_n_ratio)
batch3 = it.next()
self.assertEqual(len(batch3), self.bs)
self.assertIsInstance(batch3, list)
self.assertTrue(it.is_new_epoch)
self.assertEqual(
sorted(batch1 + batch2 + batch3),
self.dataset.tolist())
self.assertAlmostEqual(it.epoch_detail, 3 * bs_n_ratio)
| mit |
cancro7/gem5 | src/arch/x86/isa/insts/system/segmentation.py | 25 | 7202 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# Copyright (c) 2012-2013 AMD
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = '''
def macroop LGDT_M
{
.serializing
.adjust_env maxOsz
# Get the limit
ld t1, seg, sib, disp, dataSize=2
# Get the base
ld t2, seg, sib, 'adjustedDisp + 2'
wrbase tsg, t2
wrlimit tsg, t1
};
def macroop LGDT_P
{
.serializing
.adjust_env maxOsz
rdip t7
# Get the limit
ld t1, seg, riprel, disp, dataSize=2
# Get the base
ld t2, seg, riprel, 'adjustedDisp + 2'
wrbase tsg, t2
wrlimit tsg, t1
};
#
# These versions are for when the original data size was 16 bits. The base is
# still 32 bits, but the top byte is zeroed before being used.
#
def macroop LGDT_16_M
{
.serializing
.adjust_env maxOsz
# Get the limit
ld t1, seg, sib, disp, dataSize=2
# Get the base
ld t2, seg, sib, 'adjustedDisp + 2', dataSize=4
zexti t2, t2, 23, dataSize=8
wrbase tsg, t2, dataSize=8
wrlimit tsg, t1
};
def macroop LGDT_16_P
{
.serializing
.adjust_env maxOsz
rdip t7
# Get the limit
ld t1, seg, riprel, disp, dataSize=2
# Get the base
ld t2, seg, riprel, 'adjustedDisp + 2', dataSize=4
zexti t2, t2, 23, dataSize=8
wrbase tsg, t2
wrlimit tsg, t1
};
def macroop LIDT_M
{
.serializing
.adjust_env maxOsz
# Get the limit
ld t1, seg, sib, disp, dataSize=2
# Get the base
ld t2, seg, sib, 'adjustedDisp + 2'
wrbase idtr, t2
wrlimit idtr, t1
};
def macroop LIDT_P
{
.serializing
.adjust_env maxOsz
rdip t7
# Get the limit
ld t1, seg, riprel, disp, dataSize=2
# Get the base
ld t2, seg, riprel, 'adjustedDisp + 2'
wrbase idtr, t2
wrlimit idtr, t1
};
#
# These versions are for when the original data size was 16 bits. The base is
# still 32 bits, but the top byte is zeroed before being used.
#
def macroop LIDT_16_M
{
.serializing
.adjust_env maxOsz
# Get the limit
ld t1, seg, sib, disp, dataSize=2
# Get the base
ld t2, seg, sib, 'adjustedDisp + 2', dataSize=4
zexti t2, t2, 23, dataSize=8
wrbase idtr, t2, dataSize=8
wrlimit idtr, t1
};
def macroop LIDT_16_P
{
.serializing
.adjust_env maxOsz
rdip t7
# Get the limit
ld t1, seg, riprel, disp, dataSize=2
# Get the base
ld t2, seg, riprel, 'adjustedDisp + 2', dataSize=4
zexti t2, t2, 23, dataSize=8
wrbase idtr, t2
wrlimit idtr, t1
};
def macroop LTR_R
{
.serializing
chks reg, t0, TRCheck
limm t4, 0, dataSize=8
srli t4, reg, 3, dataSize=2
ldst t1, tsg, [8, t4, t0], dataSize=8
ld t2, tsg, [8, t4, t0], 8, dataSize=8
chks reg, t1, TSSCheck
wrdh t3, t1, t2
wrdl tr, t1, reg
wrbase tr, t3, dataSize=8
limm t5, (1 << 9)
or t1, t1, t5
st t1, tsg, [8, t4, t0], dataSize=8
};
def macroop LTR_M
{
.serializing
ld t5, seg, sib, disp, dataSize=2
chks t5, t0, TRCheck
limm t4, 0, dataSize=8
srli t4, t5, 3, dataSize=2
ldst t1, tsg, [8, t4, t0], dataSize=8
ld t2, tsg, [8, t4, t0], 8, dataSize=8
chks t5, t1, TSSCheck
wrdh t3, t1, t2
wrdl tr, t1, t5
wrbase tr, t3, dataSize=8
limm t5, (1 << 9)
or t1, t1, t5
st t1, tsg, [8, t4, t0], dataSize=8
};
def macroop LTR_P
{
.serializing
rdip t7
ld t5, seg, riprel, disp, dataSize=2
chks t5, t0, TRCheck
limm t4, 0, dataSize=8
srli t4, t5, 3, dataSize=2
ldst t1, tsg, [8, t4, t0], dataSize=8
ld t2, tsg, [8, t4, t0], 8, dataSize=8
chks t5, t1, TSSCheck
wrdh t3, t1, t2
wrdl tr, t1, t5
wrbase tr, t3, dataSize=8
limm t5, (1 << 9)
or t1, t1, t5
st t1, tsg, [8, t4, t0], dataSize=8
};
def macroop LLDT_R
{
.serializing
chks reg, t0, InGDTCheck, flags=(EZF,)
br label("end"), flags=(CEZF,)
limm t4, 0, dataSize=8
srli t4, reg, 3, dataSize=2
ldst t1, tsg, [8, t4, t0], dataSize=8
ld t2, tsg, [8, t4, t0], 8, dataSize=8
chks reg, t1, LDTCheck
wrdh t3, t1, t2
wrdl tsl, t1, reg
wrbase tsl, t3, dataSize=8
end:
fault "NoFault"
};
def macroop LLDT_M
{
.serializing
ld t5, seg, sib, disp, dataSize=2
chks t5, t0, InGDTCheck, flags=(EZF,)
br label("end"), flags=(CEZF,)
limm t4, 0, dataSize=8
srli t4, t5, 3, dataSize=2
ldst t1, tsg, [8, t4, t0], dataSize=8
ld t2, tsg, [8, t4, t0], 8, dataSize=8
chks t5, t1, LDTCheck
wrdh t3, t1, t2
wrdl tsl, t1, t5
wrbase tsl, t3, dataSize=8
end:
fault "NoFault"
};
def macroop LLDT_P
{
.serializing
rdip t7
ld t5, seg, riprel, disp, dataSize=2
chks t5, t0, InGDTCheck, flags=(EZF,)
br label("end"), flags=(CEZF,)
limm t4, 0, dataSize=8
srli t4, t5, 3, dataSize=2
ldst t1, tsg, [8, t4, t0], dataSize=8
ld t2, tsg, [8, t4, t0], 8, dataSize=8
chks t5, t1, LDTCheck
wrdh t3, t1, t2
wrdl tsl, t1, t5
wrbase tsl, t3, dataSize=8
end:
fault "NoFault"
};
def macroop SWAPGS
{
rdval t1, kernel_gs_base, dataSize=8
rdbase t2, gs, dataSize=8
wrbase gs, t1, dataSize=8
wrval kernel_gs_base, t2, dataSize=8
};
'''
| bsd-3-clause |
raphael0202/spaCy | spacy/fr/_tokenizer_exceptions_list.py | 3 | 513595 | # coding: utf8
from __future__ import unicode_literals
BASE_EXCEPTIONS = [
"0-day",
"0-days",
"1000Base-T",
"100Base-T",
"100Base-T4",
"100Base-TX",
"10BASE-F",
"10Base-T",
"1,1-diméthylhydrazine",
"11-septembre",
"11-Septembre",
"120-cellules",
"1,2,3-tris-nitrooxy-propane",
"1,2-diazine",
"1,2-dichloropropane",
"1,3-diazine",
"1,3-dichloropropène",
"14-18",
"1,4-diazine",
"16-cellules",
"1-alpha,2-alpha,3-bêta,4-alpha,5-alpha,6-bêta-hexachlorocyclohexane",
"1-DDOL",
"1-dodécanol",
"1-méthyl-2,4,6-trinitrobenzène",
"1-TDOL",
"1-tétradécanol",
"1T-SRAM",
"22-dihydroergocalciférol",
"2,2'-iminodi(éthylamine)",
"2,3,6-TBA",
"2,4,5-T",
"2,4,5-TP",
"2,4,6-trinitrophénol",
"24-cellules",
"2,4-D",
"2,4-DB",
"2,4-DP",
"2,4-MCPA",
"2,4-MCPB",
"2-désoxyribose",
"2-méthylpropane",
"2-méthylpropanes",
"2′-O-méthyla",
"2′-O-méthylai",
"2′-O-méthylaient",
"2′-O-méthylais",
"2′-O-méthylait",
"2′-O-méthylâmes",
"2′-O-méthylant",
"2′-O-méthylas",
"2′-O-méthylasse",
"2′-O-méthylassent",
"2′-O-méthylasses",
"2′-O-méthylassiez",
"2′-O-méthylassions",
"2′-O-méthylât",
"2′-O-méthylâtes",
"2′-O-méthyle",
"2′-O-méthylé",
"2′-O-méthylée",
"2′-O-méthylées",
"2′-O-méthylent",
"2′-O-méthyler",
"2′-O-méthylera",
"2′-O-méthylerai",
"2′-O-méthyleraient",
"2′-O-méthylerais",
"2′-O-méthylerait",
"2′-O-méthyleras",
"2′-O-méthylèrent",
"2′-O-méthylerez",
"2′-O-méthyleriez",
"2′-O-méthylerions",
"2′-O-méthylerons",
"2′-O-méthyleront",
"2′-O-méthyles",
"2′-O-méthylés",
"2′-O-méthylez",
"2′-O-méthyliez",
"2′-O-méthylions",
"2′-O-méthylons",
"33-tours",
"3,4-DCPA",
"3,6-DCP",
"39-45",
"3-hydroxyflavone",
"3-méthylmorphine",
"4-3-3",
"4-5-1",
"4-acétylaminophénol",
"4-CPA",
"5-4-1",
"5-cellules",
"5-HPETE",
"(5R,6S)-7,8-didehydro-4,5-époxy-3-méthoxy-N-méthylmorphinan-6-ol",
"600-cellules",
"6-benzyladénine",
"8-hydroxyquinoléine",
"9-2",
"9-3",
"AAAA-MM-JJ",
"Aarle-Rixtel",
"abaisse-langue",
"abaisse-langues",
"Abanto-Zierbena",
"Abaucourt-Hautecourt",
"Abbans-Dessous",
"Abbans-Dessus",
"Abbaye-sous-Plancy",
"Abbéville-la-Rivière",
"Abbéville-lès-Conflans",
"Abbeville-Saint-Lucien",
"Abcoude-Baambrugge",
"Abcoude-Proostdij",
"Abel-François",
"Abergement-Clémenciat",
"Abergement-de-Cuisery",
"Abergement-de-Varey",
"Abergement-la-Ronce",
"Abergement-le-Grand",
"Abergement-le-Petit",
"Abergement-lès-Thésy",
"Abergement-Sainte-Colombe",
"Abergement-Saint-Jean",
"Abitibi-Témiscamien",
"Abitibi-Témiscamingue",
"Abjat-sur-Bandiat",
"Ablaincourt-Pressoir",
"Ablain-Saint-Nazaire",
"Ablon-sur-Seine",
"Aboncourt-Gesincourt",
"Aboncourt-sur-Seille",
"abou-hannès",
"abou-mengel",
"abou-mengels",
"abricotier-pays",
"abricot-pêche",
"abricots-pêches",
"abri-sous-roche",
"abris-sous-roche",
"abris-vent",
"abri-vent",
"absorbeur-neutralisateur",
"acajou-amer",
"acajou-bois",
"acajous-amers",
"acajous-bois",
"accord-cadre",
"accords-cadres",
"accroche-coeur",
"accroche-cœur",
"accroche-coeurs",
"accroche-cœurs",
"accroche-pied",
"accroche-pieds",
"accroche-plat",
"accroche-plats",
"acétyl-salicylate",
"acétyl-salicylates",
"achard-bourgeois",
"Achard-Bourgeois",
"achard-bourgeoise",
"Achard-Bourgeoise",
"achard-bourgeoises",
"Achard-Bourgeoises",
"Achères-la-Forêt",
"Acheux-en-Amiénois",
"Acheux-en-Vimeu",
"Achiet-le-Grand",
"Achiet-le-Petit",
"Achter-Drempt",
"Achter-Lindt",
"Achter-Thesinge",
"acibenzolar-S-méthyle",
"acide-N-1-naphtyl-phtalamique",
"acide-phénol",
"acides-phénols",
"acido-alcalimétrie",
"acido-alcoolo-résistance",
"acido-alcoolo-résistances",
"acido-alcoolo-résistant",
"acido-alcoolo-résistante",
"acido-alcoolo-résistantes",
"acido-alcoolo-résistants",
"acido-basique",
"acido-résistant",
"acido-résistants",
"acquae-sextien",
"Acquae-Sextien",
"acquae-sextienne",
"Acquae-Sextienne",
"acquae-sextiennes",
"Acquae-Sextiennes",
"acquae-sextiens",
"Acquae-Sextiens",
"acqua-toffana",
"acqua-toffanas",
"Acquin-Westbécourt",
"acquit-à-caution",
"acquit-patent",
"acquits-à-caution",
"acquits-patents",
"acting-out",
"actino-uranium",
"Acy-en-Multien",
"Acy-Romance",
"Adam-lès-Passavant",
"Adam-lès-Vercel",
"Ad-Dawr",
"Addis-Abeba",
"Addis-Abebien",
"Addis-Abébien",
"add-on",
"Adelans-et-le-Val-de-Bithaine",
"Adervielle-Pouchergues",
"adieu-mes-couilles",
"adieu-tout",
"adieu-touts",
"adieu-va",
"adieu-vas",
"adieu-vat",
"adieu-vats",
"adiposo-génital",
"adiposo-génitale",
"adiposo-génitales",
"adiposo-génitaux",
"adjudant-chef",
"adjudants-chefs",
"Admannshagen-Bargeshagen",
"Adrets-de-Fréjus",
"Adwick-le-Street",
"A-EF",
"A-ÉF",
"africain-américain",
"Africain-Américain",
"africaine-américaine",
"Africaine-Américaine",
"africaines-américaines",
"Africaines-Américaines",
"africains-américains",
"Africains-Américains",
"africano-brésilien",
"africano-brésilienne",
"africano-brésiliennes",
"africano-brésiliens",
"africano-taïwanais",
"africano-taïwanaise",
"africano-taïwanaises",
"agace-pissette",
"agar-agar",
"agasse-tambourinette",
"agatha-christien",
"Agatha-christien",
"Agen-d'Aveyron",
"agit-prop",
"Agnam-Goly",
"Agnez-lès-Duisans",
"Agnicourt-et-Séchelles",
"Agnières-en-Dévoluy",
"agnus-castus",
"agnus-dei",
"Agon-Coutainville",
"agora-phobie",
"agora-phobies",
"Agos-Vidalos",
"Ahaxe-Alciette-Bascassan",
"Ahlefeld-Bistensee",
"Ahrenshagen-Daskow",
"aï-aï",
"Aibar-Oibar",
"Aichach-Friedberg",
"ai-cham",
"Aïcirits-Camou-Suhast",
"aide-comptable",
"aide-écuyer",
"aide-écuyers",
"aide-éducateur",
"Aïd-el-Kébir",
"Aïd-el-Séghir",
"aide-mémoire",
"aide-mémoires",
"aide-soignant",
"aide-soignante",
"aide-soignantes",
"aide-soignants",
"aides-soignantes",
"aides-soignants",
"aigle-bar",
"Aignay-le-Duc",
"Aignes-et-Puypéroux",
"aigre-douce",
"aigre-doux",
"Aigrefeuille-d'Aunis",
"Aigrefeuille-sur-Maine",
"aigre-moines",
"aigres-douces",
"aigres-doux",
"Aiguebelette-le-Lac",
"aigue-marine",
"aigue-marines",
"aigues-juntais",
"Aigues-Juntais",
"aigues-juntaise",
"Aigues-Juntaise",
"aigues-juntaises",
"Aigues-Juntaises",
"Aigues-Juntes",
"aigues-marines",
"aigues-mortais",
"Aigues-Mortais",
"aigues-mortaise",
"Aigues-Mortaise",
"aigues-mortaises",
"Aigues-Mortaises",
"Aigues-Mortes",
"Aigues-Vives",
"aigues-vivesien",
"Aigues-Vivesien",
"aigues-vivesienne",
"Aigues-Vivesienne",
"aigues-vivesiennes",
"Aigues-Vivesiennes",
"aigues-vivesiens",
"Aigues-Vivesiens",
"aigues-vivien",
"aigues-vivois",
"Aigues-Vivois",
"aigues-vivoise",
"Aigues-Vivoise",
"aigues-vivoises",
"Aigues-Vivoises",
"Aiguillon-sur-Mer",
"Aiguillon-sur-Vie",
"aiguise-crayon",
"aiguise-crayons",
"Aillant-sur-Milleron",
"Aillant-sur-Tholon",
"Aillevillers-et-Lyaumont",
"Aillières-Beauvoir",
"Aillon-le-Jeune",
"Aillon-le-Vieux",
"Ailly-le-Haut-Clocher",
"Ailly-sur-Meuse",
"Ailly-sur-Noye",
"Ailly-sur-Somme",
"Aime-la-Plagne",
"Ainay-le-Château",
"Ainay-le-Vieil",
"Ainhice-Mongelos",
"Aínsa-Sobrarbe",
"ainu-ken",
"Ainval-Septoutre",
"Aire-la-Ville",
"airelle-myrtille",
"Aire-sur-l'Adour",
"Aire-sur-la-Lys",
"Airon-Notre-Dame",
"Airon-Saint-Vaast",
"Aische-en-Refail",
"Aiseau-Presles",
"aiseau-preslois",
"Aiseau-Preslois",
"Aiseau-Presloise",
"Aisey-et-Richecourt",
"Aisey-sur-Seine",
"Aisonville-et-Bernoville",
"Aisy-sous-Thil",
"Aisy-sur-Armançon",
"Aix-en-Diois",
"Aix-en-Ergny",
"Aix-en-Issart",
"Aix-en-Othe",
"Aix-en-Provence",
"Aixe-sur-Vienne",
"Aix-la-Chapelle",
"Aix-la-Fayette",
"Aix-les-Bains",
"Aix-Noulette",
"Aix-Villemaur-Pâlis",
"Aizecourt-le-Bas",
"Aizecourt-le-Haut",
"Aizy-Jouy",
"Ajoupa-Bouillon",
"aka-bea",
"aka-bo",
"aka-cari",
"aka-jeru",
"aka-kede",
"aka-kora",
"akar-bale",
"akhal-teke",
"akua-ba",
"Alaincourt-la-Côte",
"al-Anbar",
"Al-Anbar",
"al-Anbâr",
"Al-Anbâr",
"al-Anbār",
"Al-Andalus",
"Alba-la-Romaine",
"albano-letton",
"Albaret-le-Comtal",
"Albaret-Sainte-Marie",
"Alb-Danube",
"Albefeuille-Lagarde",
"Albepierre-Bredons",
"Albergaria-a-Velha",
"Albiez-le-Jeune",
"Albiez-Montrond",
"Albigny-sur-Saône",
"Albon-d'Ardèche",
"Alby-sur-Chéran",
"alcalino-terreuse",
"alcalino-terreuses",
"alcalino-terreux",
"Alçay-Alçabéhéty-Sunharette",
"alcoolo-dépendance",
"alcoolo-dépendances",
"alcool-phénol",
"alcools-phénols",
"Al-Dour",
"Aldridge-Brownhills",
"Alegría-Dulantzi",
"aléseuse-fraiseuse",
"aléseuses-fraiseuses",
"Alet-les-Bains",
"algéro-marocain",
"algéro-tuniso-lybien",
"algéro-tuniso-marocain",
"algo-carburant",
"algo-carburants",
"Alignan-du-Vent",
"Alise-Sainte-Reine",
"al-Kachi",
"Al-Khwarizmi",
"Allaines-Mervilliers",
"Allainville-aux-Bois",
"Allainville-en-Beauce",
"Alland'Huy",
"Alland'Huy-et-Sausseuil",
"allanto-chorion",
"allanto-chorions",
"Allas-Bocage",
"Allas-Champagne",
"Allas-les-Mines",
"Allègre-les-Fumades",
"Allemagne-en-Provence",
"Allemanche-Launay-et-Soyer",
"Allemans-du-Dropt",
"Allennes-les-Marais",
"Allerey-sur-Saône",
"aller-retour",
"aller-retours",
"allers-retours",
"Alles-sur-Dordogne",
"Allez-et-Cazeneuve",
"allez-vous-en",
"allez-y",
"Allières-et-Risset",
"Alligny-Cosne",
"Alligny-en-Morvan",
"Allondrelle-la-Malmaison",
"Allonzier-la-Caille",
"Allouville-Bellefosse",
"alloxydime-sodium",
"allume-cigare",
"allume-cigares",
"allume-feu",
"allume-feux",
"allume-gaz",
"allumette-bougie",
"allumettes-bougies",
"Almon-les-Junies",
"Almont-les-Junies",
"Alos-Sibas-Abense",
"Aloxe-Corton",
"Alpes-de-Haute-Provence",
"Alpes-Maritimes",
"alpha-amylase",
"alpha-amylases",
"alpha-conversion",
"alpha-conversions",
"alpha-test",
"alpha-tests",
"alpha-tridymite",
"alpha-tridymites",
"alpha-variscite",
"alpha-variscites",
"Alphen-Boshoven",
"Alphen-Chaam",
"Alphen-Oosterwijk",
"Alphen-sur-le-Rhin",
"al-Qaida",
"Al-Qaida",
"al-Qaïda",
"Al-Qaïda",
"Alsace-Champagne-Ardenne-Lorraine",
"Alsace-Lorraine",
"alsacien-lorrain",
"Alsbach-Hähnlein",
"Althen-des-Paluds",
"Altmark-Salzwedel",
"alto-basso",
"alto-bassos",
"aluminium-épidote",
"aluminium-épidotes",
"alumu-tesu",
"Alzey-Worms",
"Amagne-Lucquy",
"Amareins-Francheleins-Cesseins",
"Amathay-Vésigneux",
"Amayé-sur-Orne",
"Amayé-sur-Seulles",
"Ambarès-et-Lagrave",
"Amberg-Sulzbach",
"Ambérieu-en-Bugey",
"Ambérieux-en-Dombes",
"Ambillou-Château",
"Amblans-et-Velotte",
"Ambly-Fleury",
"Ambly-sur-Aisne",
"Ambly-sur-Meuse",
"ambre-gris",
"Ambrières-les-Vallées",
"ambystomes-tigres",
"ambystome-tigre",
"Amélie-les-Bains",
"Amélie-les-Bains-Palalda",
"Amel-sur-l'Etang",
"Amel-sur-l'Étang",
"Amendeuix-Oneix",
"âme-sœur",
"âmes-sœurs",
"Amfreville-la-Campagne",
"Amfreville-la-Mi-Voie",
"Amfreville-les-Champs",
"Amfreville-Saint-Amand",
"Amfreville-sous-les-Monts",
"Amfreville-sur-Iton",
"ami-ami",
"amiante-ciment",
"Amigny-Rouy",
"amino-acétique",
"amino-acide",
"amino-acides",
"Amlikon-Bissegg",
"Amont-et-Effreney",
"Amorebieta-Etxano",
"Amorots-Succos",
"amour-en-cage",
"amour-propre",
"amours-en-cage",
"amours-propres",
"ampère-heure",
"ampères-heures",
"(+)-amphétamine",
"(−)-amphétamine",
"Ampilly-les-Bordes",
"Ampilly-le-Sec",
"ampli-syntoniseur",
"amuse-bouche",
"amuse-bouches",
"amuse-gueule",
"amuse-gueules",
"analyste-programmeur",
"analystes-programmeurs",
"ananas-bois",
"anarcho-capitalisme",
"anarcho-capitalismes",
"anarcho-fasciste",
"anarcho-fascistes",
"anarcho-punk",
"anarcho-punks",
"anarcho-syndicalisme",
"anarcho-syndicalismes",
"anarcho-syndicaliste",
"anarcho-syndicalistes",
"anatomo-pathologie",
"anatomo-pathologies",
"anatomo-pathologique",
"anatomo-pathologiques",
"Ance-Féas",
"Anchenoncourt-et-Chazel",
"Ancourteville-sur-Héricourt",
"Ancretiéville-Saint-Victor",
"Ancretteville-sur-Mer",
"Anctoville-sur-Boscq",
"Ancy-Dornot",
"Ancy-le-Franc",
"Ancy-le-Libre",
"Ancy-sur-Moselle",
"Andelot-Blancheville",
"Andelot-en-Montagne",
"Andelot-Morval",
"Andernos-les-Bains",
"Andert-et-Condon",
"Andilly-en-Bassigny",
"Andorre-la-Vieille",
"Andouillé-Neuville",
"Andréen-de-l'Est",
"Andréenne-de-l'Est",
"Andréennes-de-l'Est",
"Andréens-de-l'Est",
"andrézien-bouthéonnais",
"Andrézien-Bouthéonnais",
"andrézienne-bouthéonnaise",
"Andrézienne-Bouthéonnaise",
"andréziennes-bouthéonnaises",
"Andréziennes-Bouthéonnaises",
"andréziens-bouthéonnais",
"Andréziens-Bouthéonnais",
"Andrézieux-Bouthéon",
"Anéran-Camors",
"ânes-zèbres",
"âne-zèbre",
"Angeac-Champagne",
"Angeac-Charente",
"Ange-Gardienois",
"Ange-Gardienoise",
"Angerville-Bailleul",
"Angerville-la-Campagne",
"Angerville-la-Martel",
"Angerville-l'Orcher",
"Anglards-de-Saint-Flour",
"Anglards-de-Salers",
"Anglars-Juillac",
"Anglars-Nozac",
"Anglars-Saint-Félix",
"Anglesqueville-la-Bras-Long",
"Anglesqueville-l'Esneval",
"Angles-sur-l'Anglin",
"Anglure-sous-Dun",
"Angluzelles-et-Courcelles",
"Angoustrine-Villeneuve-des-Escaldes",
"Angoville-au-Plain",
"Angoville-en-Saire",
"Angoville-sur-Ay",
"Anguilcourt-le-Sart",
"anguille-spaghetti",
"Angviller-lès-Bisping",
"Anhalt-Bitterfeld",
"animal-garou",
"animalier-soigneur",
"animaux-garous",
"Anizy-le-Château",
"Annaberg-Buchholz",
"Annay-la-Côte",
"Annay-sur-Serein",
"Anne-Charlotte",
"Annecy-le-Vieux",
"année-homme",
"année-lumière",
"années-homme",
"années-hommes",
"années-lumière",
"Anne-Laure",
"Anne-Marie",
"Anne-Sophie",
"Annesse-et-Beaulieu",
"Annet-sur-Marne",
"Anneville-Ambourville",
"Anneville-en-Saire",
"Annéville-la-Prairie",
"Anneville-sur-Mer",
"Anneville-sur-Scie",
"Annevoie-Rouillon",
"Annoisin-Chatelans",
"Annouville-Vilmesnil",
"ano-génital",
"ano-génitale",
"ano-génitales",
"ano-génitaux",
"Ansac-sur-Vienne",
"ansbach-triesdorfer",
"Anse-aux-Fraisois",
"Anse-aux-Fraisoise",
"Anse-Bertrand",
"ante-bois",
"anté-diluvien",
"anté-hypophyse",
"anté-hypophyses",
"ante-meridiem",
"ante-meridiems",
"ante-mortem",
"ante-mortems",
"antenne-relais",
"antennes-radar",
"antennes-relais",
"anté-pénultième",
"anté-pénultièmes",
"anté-prédécesseur",
"anté-prédécesseurs",
"Antey-Saint-André",
"Antezant-la-Chapelle",
"Antheuil-Portes",
"anthropo-gammamétrie",
"anthropo-gammamétries",
"anthropo-toponyme",
"anthropo-toponymes",
"anthropo-zoomorphe",
"anthropo-zoomorphes",
"Anthy-sur-Léman",
"Antichan-de-Frontignes",
"Anticostien-Minganien",
"Antigny-la-Ville",
"Antigua-et-Barbuda",
"antiguais-barbudien",
"Antiguais-Barbudien",
"antiguais-barbudiens",
"Antiguais-Barbudiens",
"antiguaise-barbudienne",
"Antiguaise-Barbudienne",
"antiguaises-barbudiennes",
"Antiguaises-Barbudiennes",
"antiguais-et-barbudien",
"Antiguais-et-Barbudien",
"antilope-chevreuil",
"Antogny-le-Tillac",
"Antoine-Labellois",
"Antonne-et-Trigonant",
"Antraigues-sur-Volane",
"Any-Martin-Rieux",
"Anzat-le-Luguet",
"Anzin-Saint-Aubin",
"Anzy-le-Duc",
"A-OF",
"Aouste-sur-Sye",
"Apenburg-Winterfeld",
"apico-alvéolaire",
"apico-dental",
"appartements-témoins",
"appartement-témoin",
"appel-contre-appel",
"appels-contre-appels",
"Appelterre-Eichem",
"Appenai-sous-Bellême",
"Appeville-Annebault",
"apprentie-sorcière",
"apprenties-sorcières",
"apprenti-sorcellerie",
"apprenti-sorcelleries",
"apprenti-sorcier",
"apprentis-sorciers",
"appui-bras",
"appuie-main",
"appuie-mains",
"appuie-tête",
"appuie-têtes",
"appui-livres",
"appui-main",
"appui-mains",
"appui-pied",
"appui-pieds",
"appui-pot",
"appui-pots",
"appuis-main",
"appuis-pot",
"appuis-tête",
"appui-tête",
"appui-têtes",
"Apremont-la-Forêt",
"Apremont-sur-Allier",
"aquae-sextien",
"Aquae-Sextien",
"aquae-sextienne",
"Aquae-Sextienne",
"aquae-sextiennes",
"Aquae-Sextiennes",
"aquae-sextiens",
"Aquae-Sextiens",
"aqua-tinta",
"aqua-toffana",
"aquila-alba",
"Aquitaine-Limousin-Poitou-Charentes",
"Arâches-la-Frasse",
"araignée-crabe",
"araignée-loup",
"araignées-crabes",
"araignées-loups",
"aralo-caspien",
"aralo-caspienne",
"Arandon-Passins",
"Arbedo-Castione",
"Arbérats-Sillègue",
"Arbigny-sous-Varennes",
"Arblade-le-Bas",
"Arblade-le-Haut",
"Arbonne-la-Forêt",
"Arbouet-Sussaute",
"arbre-à-la-fièvre",
"arbre-de-Moïse",
"arbres-de-Moïse",
"arbres-refuges",
"arcado-chypriote",
"arcado-chypriotes",
"arcado-cypriote",
"arcado-cypriotes",
"Arces-Dilo",
"Arcis-le-Ponsart",
"Arcis-sur-Aube",
"Arcizac-Adour",
"Arcizac-ez-Angles",
"Arcizans-Avant",
"Arcizans-Dessus",
"Arcy-Sainte-Restitue",
"Arcy-sur-Cure",
"Ardenay-sur-Mérize",
"ardennite-(As)",
"ardennite-(As)s",
"Ardeuil-et-Montfauxelles",
"Ardeuil-Montfauxelles",
"ardi-gasna",
"Arelaune-en-Seine",
"Arfeuille-Châtain",
"Argelès-Bagnères",
"Argelès-Gazost",
"Argelès-sur-Mer",
"Argens-Minervois",
"Argentat-sur-Dordogne",
"Argenteuil-sur-Armançon",
"Argentière-la-Bessée",
"argentite-β",
"argentite-βs",
"argent-métal",
"argento-analcime",
"argento-analcimes",
"Argenton-Château",
"Argenton-l'Eglise",
"Argenton-l'Église",
"Argenton-les-Vallées",
"Argenton-Notre-Dame",
"Argenton-sur-Creuse",
"argento-perrylite",
"argento-perrylites",
"Argentré-du-Plessis",
"Argent-sur-Sauldre",
"argilo-calcaire",
"argilo-calcaires",
"argilo-gréseuse",
"argilo-gréseuses",
"argilo-gréseux",
"argilo-loessique",
"argilo-loessiques",
"argilo-siliceuse",
"argilo-siliceuses",
"argilo-siliceux",
"arginine-méthyla",
"arginine-méthylai",
"arginine-méthylaient",
"arginine-méthylais",
"arginine-méthylait",
"arginine-méthylâmes",
"arginine-méthylant",
"arginine-méthylas",
"arginine-méthylasse",
"arginine-méthylassent",
"arginine-méthylasses",
"arginine-méthylassiez",
"arginine-méthylassions",
"arginine-méthylât",
"arginine-méthylâtes",
"arginine-méthyle",
"arginine-méthylé",
"arginine-méthylée",
"arginine-méthylées",
"arginine-méthylent",
"arginine-méthyler",
"arginine-méthylera",
"arginine-méthylerai",
"arginine-méthyleraient",
"arginine-méthylerais",
"arginine-méthylerait",
"arginine-méthyleras",
"arginine-méthylèrent",
"arginine-méthylerez",
"arginine-méthyleriez",
"arginine-méthylerions",
"arginine-méthylerons",
"arginine-méthyleront",
"arginine-méthyles",
"arginine-méthylés",
"arginine-méthylez",
"arginine-méthyliez",
"arginine-méthylions",
"arginine-méthylons",
"arginine-vasopressine",
"Argiusta-Moriccio",
"Argut-Dessous",
"Argut-Dessus",
"ariaco-dompierrois",
"Ariaco-Dompierrois",
"ariaco-dompierroise",
"Ariaco-Dompierroise",
"ariaco-dompierroises",
"Ariaco-Dompierroises",
"Aries-Espénan",
"aristo-bourgeoisie",
"aristo-bourgeoisies",
"aristotélico-thomiste",
"aristotélico-thomistes",
"arivey-lingeois",
"Arivey-Lingeois",
"arivey-lingeoise",
"Arivey-Lingeoise",
"arivey-lingeoises",
"Arivey-Lingeoises",
"Arles-sur-Tech",
"Arleux-en-Gohelle",
"armançon-martinois",
"Armançon-Martinois",
"armançon-martinoise",
"Armançon-Martinoise",
"armançon-martinoises",
"Armançon-Martinoises",
"Armbouts-Cappel",
"armbouts-cappellois",
"Armbouts-Cappellois",
"armbouts-cappelloise",
"Armbouts-Cappelloise",
"armbouts-cappelloises",
"Armbouts-Cappelloises",
"Armenonville-les-Gâtineaux",
"Armentières-en-Brie",
"Armentières-sur-Avre",
"Armentières-sur-Ourcq",
"Armous-et-Cau",
"Arnac-la-Poste",
"Arnac-Pompadour",
"Arnac-sur-Dourdou",
"Arnaud-Guilhem",
"arnaud-guilhémois",
"Arnaud-Guilhémois",
"arnaud-guilhémoise",
"Arnaud-Guilhémoise",
"arnaud-guilhémoises",
"Arnaud-Guilhémoises",
"Arnay-le-Duc",
"Arnay-sous-Vitteaux",
"Arnex-sur-Nyon",
"Arnex-sur-Orbe",
"Arnières-sur-Iton",
"Arnoncourt-sur-Apance",
"Arnouville-lès-Gonesse",
"Arnouville-lès-Mantes",
"Aroue-Ithorots-Olhaïby",
"Arpaillargues-et-Aureillac",
"Arpajon-sur-Cère",
"Arpheuilles-Saint-Priest",
"Arques-la-Bataille",
"Arquettes-en-Val",
"arrache-clou",
"arrache-clous",
"arrache-pied",
"arrache-sonde",
"Arraia-Maeztu",
"Arrancy-sur-Crusne",
"Arras-en-Lavedan",
"Arras-sur-Rhône",
"Arrast-Larrebieu",
"Arratzua-Ubarrundia",
"Arraute-Charritte",
"Arraye-et-Han",
"Arrayou-Lahitte",
"Arrens-Marsous",
"Arrentès-de-Corcieux",
"arrêt-buffet",
"arrêt-court",
"arrête-boeuf",
"arrête-bœuf",
"arrête-bœufs",
"arrêts-buffet",
"arrêts-courts",
"Arricau-Bordes",
"Arrien-en-Bethmale",
"Arrodets-ez-Angles",
"Arromanches-les-Bains",
"Arros-de-Nay",
"Arros-d'Oloron",
"arrow-root",
"Arsac-en-Velay",
"Ars-en-Ré",
"ars-laquenexois",
"Ars-Laquenexois",
"ars-laquenexoise",
"Ars-Laquenexoise",
"ars-laquenexoises",
"Ars-Laquenexoises",
"Ars-Laquenexy",
"Ars-les-Favets",
"Ars-sur-Formans",
"Ars-sur-Moselle",
"Arsure-Arsurette",
"Artaise-le-Vivier",
"Artalens-Souin",
"Artannes-sur-Indre",
"Artannes-sur-Thouet",
"artério-sclérose",
"artério-scléroses",
"Arthaz-Pont-Notre-Dame",
"Arthez-d'Armagnac",
"Arthez-d'Asson",
"Arthez-de-Béarn",
"Arthon-en-Retz",
"Artignosc-sur-Verdon",
"Artigues-près-Bordeaux",
"artisan-créateur",
"artisans-créateurs",
"Art-sur-Meurthe",
"art-thérapie",
"art-thérapies",
"Arzacq-Arraziguet",
"Arzenc-d'Apcher",
"Arzenc-de-Randon",
"Arzillières-Neuville",
"Asasp-Arros",
"Asbach-Bäumenheim",
"Asbach-Sickenberg",
"Aschères-le-Marché",
"a-sexualisa",
"a-sexualisai",
"a-sexualisaient",
"a-sexualisais",
"a-sexualisait",
"a-sexualisâmes",
"a-sexualisant",
"a-sexualisas",
"a-sexualisasse",
"a-sexualisassent",
"a-sexualisasses",
"a-sexualisassiez",
"a-sexualisassions",
"a-sexualisât",
"a-sexualisâtes",
"a-sexualise",
"a-sexualisé",
"a-sexualisée",
"a-sexualisées",
"a-sexualisent",
"a-sexualiser",
"a-sexualiser",
"a-sexualisera",
"a-sexualiserai",
"a-sexualiseraient",
"a-sexualiserais",
"a-sexualiserait",
"a-sexualiseras",
"a-sexualisèrent",
"a-sexualiserez",
"a-sexualiseriez",
"a-sexualiserions",
"a-sexualiserons",
"a-sexualiseront",
"a-sexualises",
"a-sexualisés",
"a-sexualisez",
"a-sexualisiez",
"a-sexualisions",
"a-sexualisons",
"Asnans-Beauvoisin",
"Asnières-en-Bessin",
"Asnières-en-Montagne",
"Asnières-en-Poitou",
"Asnières-la-Giraud",
"Asnières-lès-Dijon",
"Asnières-sous-Bois",
"Asnières-sur-Blour",
"Asnières-sur-Nouère",
"Asnières-sur-Oise",
"Asnières-sur-Saône",
"Asnières-sur-Seine",
"Asnières-sur-Vègre",
"Aspach-le-Bas",
"Aspach-le-Haut",
"Aspach-Michelbach",
"Aspin-Aure",
"Aspin-en-Lavedan",
"Aspres-lès-Corps",
"Aspres-sur-Buëch",
"Aspret-Sarrat",
"assa-foetida",
"Assais-les-Jumeaux",
"Assé-le-Bérenger",
"Assé-le-Boisne",
"Assé-le-Riboul",
"assemble-nuages",
"assiette-à-beurre",
"assis-debout",
"Assis-sur-Serre",
"assurance-chômage",
"assurance-chômages",
"assurance-emploi",
"assurances-chômage",
"assurances-vie",
"assurance-vie",
"assyro-chaldéen",
"Assyro-Chaldéen",
"Aste-Béon",
"Aston-Jonction",
"astronome-astrologue",
"astronomes-astrologues",
"astur-léonais",
"ataxie-télangiectasie",
"Athée-sur-Cher",
"Athesans-Etroitefontaine",
"Athesans-Étroitefontaine",
"Athies-sous-Laon",
"Athis-de-l'Orne",
"Athis-Mons",
"Athos-Aspis",
"attache-bossette",
"attache-bossettes",
"attaché-case",
"attaché-cases",
"attache-doudou",
"attache-doudous",
"attachés-cases",
"Attenrode-Wever",
"attentats-suicides",
"attentat-suicide",
"Attignat-Oncin",
"atto-ohm",
"atto-ohms",
"attrape-couillon",
"attrape-couillons",
"attrape-minette",
"attrape-minettes",
"attrape-minon",
"attrape-minons",
"attrape-mouche",
"attrape-mouches",
"attrape-nigaud",
"attrape-nigauds",
"attrape-rêves",
"attrape-tout",
"attrape-vilain",
"Aubenas-les-Alpes",
"Aubencheul-au-Bac",
"Aubencheul-aux-Bois",
"Aubepierre-Ozouer-le-Repos",
"Aubepierre-sur-Aube",
"Auberives-en-Royans",
"Auberives-sur-Varèze",
"Aubermesnil-aux-Erables",
"Aubermesnil-aux-Érables",
"Aubermesnil-Beaumais",
"Aubert-Gallionnais",
"Auberville-la-Campagne",
"Auberville-la-Manuel",
"Auberville-la-Renault",
"Aubeterre-sur-Dronne",
"aube-vigne",
"Aubie-et-Espessas",
"Aubigné-Briand",
"Aubigné-Racan",
"Aubigné-sur-Layon",
"Aubigny-au-Bac",
"Aubigny-aux-Kaisnes",
"Aubigny-en-Artois",
"Aubigny-en-Laonnois",
"Aubigny-en-Plaine",
"Aubigny-la-Ronce",
"Aubigny-les-Clouzeaux",
"Aubigny-les-Pothées",
"Aubigny-lès-Sombernon",
"Aubigny-sur-Badin",
"Aubigny-sur-Nère",
"Aubin-Saint-Vaast",
"Auboncourt-Vauzelles",
"Aubry-du-Hainaut",
"Aubry-en-Exmes",
"Aubry-le-Panthou",
"Aubusson-d'Auvergne",
"Auby-sur-Semois",
"Aucey-la-Plaine",
"Auchay-sur-Vendée",
"Auchy-au-Bois",
"Auchy-la-Montagne",
"Auchy-lès-Hesdin",
"Auchy-les-Mines",
"Auchy-lez-Orchies",
"au-deçà",
"au-dedans",
"au-dehors",
"au-delà",
"au-delàs",
"Aude-Line",
"Audenhove-Sainte-Marie",
"Audenhove-Saint-Géry",
"au-dessous",
"au-dessus",
"au-devant",
"audio-numérique",
"audio-numériques",
"audio-prothésiste",
"audio-prothésistes",
"audio-visuel",
"audio-visuelle",
"audio-visuelles",
"audio-visuels",
"Audouville-la-Hubert",
"Audun-le-Roman",
"Audun-le-Tiche",
"Auffreville-Brasseuil",
"Auffrique-et-Nogent",
"Auger-Saint-Vincent",
"Augers-en-Brie",
"Augerville-la-Rivière",
"Auge-Saint-Médard",
"Augy-sur-Aubois",
"Aujan-Mournède",
"aujourd'hui",
"Aulhat-Flat",
"Aulhat-Saint-Privat",
"aulnaie-frênaie",
"aulnaies-frênaies",
"Aulnay-aux-Planches",
"Aulnay-l'Aître",
"Aulnay-la-Rivière",
"Aulnay-sous-Bois",
"Aulnay-sur-Iton",
"Aulnay-sur-Marne",
"Aulnay-sur-Mauldre",
"Aulnois-en-Perthois",
"Aulnois-sous-Laon",
"Aulnois-sous-Vertuzey",
"Aulnois-sur-Seille",
"Aulnoye-Aymeries",
"Aulnoy-lez-Valenciennes",
"Aulnoy-sur-Aube",
"au-lof",
"auloi-jumeaux",
"Aulus-les-Bains",
"Aulx-lès-Cromary",
"Auménancourt-le-Petit",
"Aumeville-Lestre",
"Aumont-Aubrac",
"Aumont-en-Halatte",
"Aunac-sur-Charente",
"Aunay-en-Bazois",
"Aunay-les-Bois",
"Aunay-sous-Auneau",
"Aunay-sous-Crécy",
"Aunay-sur-Odon",
"Auneau-Bleury-Saint-Symphorien",
"Aunou-le-Faucon",
"Aunou-sur-Orne",
"Aurec-sur-Loire",
"Aurelle-Verlac",
"Auriac-de-Bourzac",
"Auriac-du-Périgord",
"Auriac-Lagast",
"Auriac-l'Eglise",
"Auriac-l'Église",
"Auriac-sur-Dropt",
"Auriac-sur-Vendinelle",
"Auribeau-sur-Siagne",
"auriculo-ventriculaire",
"auriculo-ventriculaires",
"Aurions-Idernes",
"aurum-musivum",
"Aussac-Vadalle",
"aussi-tost",
"aussi-tôt",
"Australie-Méridionale",
"Australie-Occidentale",
"australo-américain",
"austro-asiatique",
"austro-asiatiques",
"austro-hongrois",
"Austro-Hongrois",
"austro-hongroise",
"Austro-Hongroise",
"austro-hongroises",
"Austro-Hongroises",
"austro-occidental",
"austro-occidentale",
"austro-occidentales",
"austro-occidentaux",
"Autechaux-Roide",
"auteur-compositeur",
"auteure-compositrice",
"auteures-compositrices",
"auteurs-compositeurs",
"Autevielle-Saint-Martin-Bideren",
"Autheuil-Authouillet",
"Autheuil-en-Valois",
"Authieux-Ratiéville",
"Authon-du-Perche",
"Authon-Ebéon",
"Authon-Ébéon",
"Authon-la-Plaine",
"Autigny-la-Tour",
"Autigny-le-Grand",
"Autigny-le-Petit",
"autos-caravanes",
"autos-mitrailleuses",
"autos-scooters",
"autos-tamponnantes",
"autos-tamponneuses",
"au-tour",
"Autrecourt-et-Pourron",
"Autrécourt-sur-Aire",
"Autre-Église",
"autre-églisois",
"Autre-Églisois",
"Autre-Églisoise",
"autre-littérature",
"Autréville-Saint-Lambert",
"Autreville-sur-la-Renne",
"Autreville-sur-Moselle",
"Autrey-lès-Cerre",
"Autrey-lès-Gray",
"Autrey-le-Vay",
"Autriche-Hongrie",
"Autruy-sur-Juine",
"Autry-Issards",
"Autry-le-Châtel",
"Auvergne-Rhône-Alpes",
"Auvers-le-Hamon",
"Auvers-Saint-Georges",
"Auvers-sous-Montfaucon",
"Auvers-sur-Oise",
"Auvet-et-la-Chapelotte",
"Auvillars-sur-Saône",
"Auvillers-les-Forges",
"Auvilliers-en-Gâtinais",
"Aux-Aussat",
"Auxelles-Bas",
"Auxelles-Haut",
"Auxey-Duresses",
"Auxi-le-Château",
"Auxon-Dessous",
"Auxon-Dessus",
"Auzat-la-Combelle",
"Auzat-sur-Allier",
"Auzéville-en-Argonne",
"Auzeville-Tolosane",
"Auzouer-en-Touraine",
"Auzouville-Auberbosc",
"Auzouville-l'Esneval",
"Auzouville-sur-Ry",
"Auzouville-sur-Saâne",
"Availles-en-Châtellerault",
"Availles-Limouzine",
"Availles-sur-Chizé",
"Availles-sur-Seiche",
"Availles-Thouarsais",
"avale-tout",
"avale-tout-cru",
"avale-touts",
"Avanne-Aveney",
"avants-centres",
"avants-postes",
"Avaux-la-Ville",
"Ave-et-Auffe",
"ave-et-auffois",
"Ave-et-Auffois",
"Ave-et-Auffoise",
"Avenay-Val-d'Or",
"Avernas-le-Bauduin",
"Avernes-Saint-Gourgon",
"Avernes-sous-Exmes",
"averno-méditerranéen",
"averno-méditerranéenne",
"averno-méditerranéennes",
"averno-méditerranéens",
"Avéron-Bergelle",
"Avesnes-Chaussoy",
"Avesnes-en-Bray",
"Avesnes-en-Saosnois",
"Avesnes-en-Val",
"Avesnes-le-Comte",
"Avesnes-les-Aubert",
"Avesnes-lès-Bapaume",
"Avesnes-le-Sec",
"Avesnes-sur-Helpe",
"aveugle-né",
"aveugle-née",
"aveugles-nés",
"Avezac-Prat-Lahitte",
"A.-Vict.",
"Avignonet-Lauragais",
"Avignon-lès-Saint-Claude",
"Avillers-Sainte-Croix",
"Avilly-Saint-Léonard",
"avion-cargo",
"avions-cargos",
"Avirey-Lingey",
"avoir-du-poids",
"Avon-la-Pèze",
"Avon-les-Roches",
"Avrigney-Virey",
"Avrillé-les-Ponceaux",
"Avril-sur-Loire",
"Awala-Yalimapo",
"Ax-les-Thermes",
"axo-missien",
"Axo-Missien",
"axo-missienne",
"Axo-Missienne",
"axo-missiennes",
"Axo-Missiennes",
"axo-missiens",
"Axo-Missiens",
"Ayala-Aiara",
"ayant-cause",
"ayant-droit",
"ayants-cause",
"ayants-droit",
"Ayat-sur-Sioule",
"Aÿ-Champagne",
"aye-aye",
"Ayer's-Cliffois",
"ayes-ayes",
"Ayguatébia-Talau",
"Ayguemorte-les-Graves",
"Ayros-Arbouix",
"Ay-sur-Moselle",
"ayur-veda",
"Ayzac-Ost",
"Azannes-et-Soumazannes",
"Azanuy-Alins",
"Azat-Châtenet",
"Azat-le-Ris",
"Azat-le-Riz",
"Azay-le-Brûlé",
"Azay-le-Ferron",
"Azay-le-Rideau",
"Azay-sur-Cher",
"Azay-sur-Indre",
"Azay-sur-Thouet",
"Azilone-Ampaza",
"azinphos-éthyl",
"azinphos-méthyl",
"Azy-le-Vif",
"Azy-sur-Marne",
"B-52",
"Baaks-Sweijer",
"Baar-Ebenhausen",
"Baarle-Nassau",
"Baarle-Nassau-Grens",
"baa'thisa",
"baa'thisai",
"baa'thisaient",
"baa'thisais",
"baa'thisait",
"baa'thisâmes",
"baa'thisant",
"baa'thisas",
"baa'thisasse",
"baa'thisassent",
"baa'thisasses",
"baa'thisassiez",
"baa'thisassions",
"baa'thisât",
"baa'thisâtes",
"baa'thise",
"baa'thisé",
"baa'thisée",
"baa'thisées",
"baa'thisent",
"baa'thiser",
"baa'thisera",
"baa'thiserai",
"baa'thiseraient",
"baa'thiserais",
"baa'thiserait",
"baa'thiseras",
"baa'thisèrent",
"baa'thiserez",
"baa'thiseriez",
"baa'thiserions",
"baa'thiserons",
"baa'thiseront",
"baa'thises",
"baa'thisés",
"baa'thisez",
"baa'thisiez",
"baa'thisions",
"baa'thisons",
"b-a-ba",
"b.a.-ba",
"Babeau-Bouldoux",
"babil's",
"babine-witsuwit'en",
"baby-beef",
"baby-beefs",
"baby-boom",
"baby-boomer",
"baby-boomers",
"baby-boomeur",
"baby-boomeurs",
"baby-boomeuse",
"baby-boomeuses",
"baby-foot",
"baby-foots",
"baby-sitter",
"baby-sitters",
"baby-sitting",
"baby-sittings",
"bachat-long",
"bachat-longs",
"bachi-bouzouck",
"bachi-bouzoucks",
"bachi-bouzouk",
"bachi-bouzouks",
"Bachos-Binos",
"Bachte-Maria-Leerne",
"Bacouel-sur-Selle",
"Bacqueville-en-Caux",
"Badecon-le-Pin",
"Badefols-d'Ans",
"Badefols-de-Cadouin",
"Badefols-sur-Dordogne",
"Baden-Baden",
"Bade-Wurtemberg",
"Badménil-aux-Bois",
"Badonvilliers-Gérauvilliers",
"Baerle-Duc",
"Bagat-en-Quercy",
"Bâgé-la-Ville",
"Bâgé-le-Châtel",
"Bagnac-sur-Célé",
"Bagneaux-sur-Loing",
"Bagnères-de-Bigorre",
"Bagnères-de-Luchon",
"Bagneux-la-Fosse",
"Bagnoles-de-l'Orne",
"Bagnols-en-Forêt",
"Bagnols-les-Bains",
"Bagnols-sur-Cèze",
"Baguer-Morvan",
"Baguer-Pican",
"bahá'í",
"bahá'íe",
"bahá'íes",
"bahá'ís",
"Bahá'u'lláh",
"Bahus-Soubiran",
"Baie-Catherinois",
"Baie-Comelien",
"Baie-Comellien",
"Baie-Comien",
"Baie-Comois",
"Baie-des-Sablien",
"Baie-du-Febvre",
"Baie-Jolien",
"Baie-Mahault",
"baie-mahaultien",
"Baie-Mahaultien",
"baie-mahaultienne",
"Baie-Mahaultienne",
"baie-mahaultiennes",
"Baie-Mahaultiennes",
"baie-mahaultiens",
"Baie-Mahaultiens",
"Baie-Saint-Paulois",
"Baie-Trinitois",
"Baignes-Sainte-Radegonde",
"Baigneux-les-Juifs",
"Baigts-de-Béarn",
"Bailleau-Armenonville",
"Bailleau-le-Pin",
"Bailleau-l'Evêque",
"Bailleau-l'Évêque",
"baille-blé",
"Baillet-en-France",
"Bailleul-aux-Cornailles",
"Bailleul-la-Vallée",
"Bailleul-le-Soc",
"Bailleul-lès-Pernes",
"Bailleul-Neuville",
"Bailleul-Sir-Berthoult",
"Bailleul-sur-Thérain",
"Bailly-aux-Forges",
"Bailly-Carrois",
"Bailly-en-Rivière",
"Bailly-le-Franc",
"Bailly-Romainvilliers",
"Bain-de-Bretagne",
"bain-douche",
"bain-marie",
"bains-douches",
"Bains-les-Bains",
"bains-marie",
"Bains-sur-Oust",
"Bainville-aux-Miroirs",
"Bainville-aux-Saules",
"Bainville-sur-Madon",
"Bairon-le-Mont-Dieu",
"Bairon-Mont-Dieu",
"baise-en-ville",
"baise-main",
"Baisy-Thy",
"Bakkum-Noord",
"Balagny-sur-Thérain",
"Balaguier-d'Olt",
"Balaguier-sur-Rance",
"balai-brosse",
"balais-brosses",
"Balaives-et-Butz",
"Balaruc-les-Bains",
"Balaruc-le-Vieux",
"Bâle-Campagne",
"baleine-pilote",
"baleines-pilotes",
"Balesmes-sur-Marne",
"Bâle-Ville",
"Baliracq-Maumusson",
"Ballancourt-sur-Essonne",
"Ballan-Miré",
"balle-molle",
"balle-queue",
"Balleroy-sur-Drôme",
"ballon-panier",
"Ballon-Saint-Mars",
"ballon-sonde",
"ballons-panier",
"ballons-paniers",
"ballons-sondes",
"ballon-volant",
"Ballrechten-Dottingen",
"ball-trap",
"bal-musette",
"Balnot-la-Grange",
"Balnot-sur-Laignes",
"bals-musette",
"bana-bana",
"bana-banas",
"banana-split",
"banana-splits",
"Banassac-Canilhac",
"bande-annonce",
"Ban-de-Laveline",
"ban-de-lavelinois",
"Ban-de-Lavelinois",
"ban-de-lavelinoise",
"Ban-de-Lavelinoise",
"ban-de-lavelinoises",
"Ban-de-Lavelinoises",
"bandes-annonces",
"Ban-de-Sapt",
"bande-son",
"bank-note",
"bank-notes",
"Banneville-la-Campagne",
"Banneville-sur-Ajon",
"Bannost-Villegagnon",
"Banogne-Recouvrance",
"Ban-Saint-Martin",
"ban-saint-martinois",
"Ban-Saint-Martinois",
"ban-saint-martinoise",
"Ban-Saint-Martinoise",
"ban-saint-martinoises",
"Ban-Saint-Martinoises",
"Ban-sur-Meurthe",
"Ban-sur-Meurthe-Clefcy",
"Banyuls-dels-Aspres",
"Banyuls-sur-Mer",
"Baons-le-Comte",
"Bapeaume-lès-Rouen",
"Barbazan-Debat",
"Barbazan-Dessus",
"barbe-à-papa",
"Barbe-Bleue",
"barbe-de-bouc",
"barbe-de-capucin",
"barbe-de-chèvre",
"barbe-de-Jupiter",
"Barberey-Saint-Sulpice",
"barbes-de-capucin",
"barbes-de-Jupiter",
"Barbey-Seroux",
"Barbezieux-Saint-Hilaire",
"Barbirey-sur-Ouche",
"Barbonne-Fayel",
"Barcelonne-du-Gers",
"Bard-le-Régulier",
"Bard-lès-Epoisses",
"Bard-lès-Époisses",
"Bard-lès-Pesmes",
"Barenton-Bugny",
"Barenton-Cel",
"Barenton-sur-Serre",
"Barésia-sur-l'Ain",
"Bar-et-Harricourt",
"Barger-Compascuum",
"Barger-Erfscheidenveen",
"Barger-Oosterveen",
"Barger-Oosterveld",
"Bargfeld-Stegen",
"Barisey-au-Plain",
"Barisey-la-Côte",
"Barisis-aux-Bois",
"barium-adulaire",
"barium-adulaires",
"barium-anorthite",
"barium-anorthites",
"barium-phlogopite",
"barium-phlogopites",
"barium-sanidine",
"barium-sanidines",
"Bar-le-Duc",
"Bar-lès-Buzancy",
"Barletta-Andria-Trani",
"Barneville-Carteret",
"Barneville-la-Bertran",
"Barneville-sur-Seine",
"Baron-sur-Odon",
"Barou-en-Auge",
"Barrais-Bussolles",
"Barraute-Camu",
"barré-bandé",
"Barre-des-Cévennes",
"barrés-bandés",
"Barret-de-Lioure",
"Barret-le-Bas",
"Barret-le-Haut",
"Barret-sur-Méouge",
"Barriac-les-Bosquets",
"Barrow-in-Furness",
"Barry-d'Islemade",
"bars-tabacs",
"Bar-sur-Aube",
"Bar-sur-Seine",
"bar-tabac",
"bar-tabacs",
"Bartenshagen-Parkentin",
"Barvaux-Condroz",
"Barville-en-Gâtinais",
"baryton-basse",
"barytons-basses",
"baryum-orthose",
"baryum-orthoses",
"Barzy-en-Thiérache",
"Barzy-sur-Marne",
"Basadingen-Schlattingen",
"basco-béarnaise",
"basco-navarrais",
"base-ball",
"base-balls",
"base-jump",
"base-jumpeur",
"base-jumpeurs",
"base-jumpeuse",
"base-jumpeuses",
"basi-sphénoïdal",
"basket-ball",
"basket-balls",
"Baslieux-lès-Fismes",
"Baslieux-sous-Châtillon",
"baso-cellulaire",
"baso-cellulaires",
"basque-uruguayen",
"basset-hound",
"bassi-colica",
"bassi-colicas",
"Bassignac-le-Bas",
"Bassignac-le-Haut",
"Bassillac-et-Auberoche",
"Bassillon-Vauzé",
"bassins-versants",
"bassin-versant",
"Bassoles-Aulers",
"bat-à-beurre",
"bat-à-bourre",
"bateau-bus",
"bateau-citerne",
"bateau-dragon",
"bateau-école",
"bateau-feu",
"bateau-lavoir",
"bateau-logement",
"bateau-mère",
"bateau-mouche",
"bateau-phare",
"bateau-usine",
"bateau-vanne",
"bateaux-bus",
"bateaux-citernes",
"bateaux-dragons",
"bateaux-écoles",
"bateaux-feu",
"bateaux-lavoirs",
"bateaux-logements",
"bateaux-mères",
"bateaux-mouches",
"bateaux-phare",
"bateaux-usines",
"bateaux-vanne",
"bat-flanc",
"bat-flancs",
"Bathelémont-lès-Bauzemont",
"Batignolles-Monceaux",
"Batilly-en-Gâtinais",
"Batilly-en-Puisaye",
"bat-l'eau",
"bats-à-beurre",
"bats-à-bourre",
"bats-l'eau",
"battant-l'oeil",
"battant-l'œil",
"battants-l'oeil",
"battants-l'œil",
"batte-lessive",
"batte-mare",
"Battenans-les-Mines",
"Battenans-Varin",
"batte-plate",
"batte-queue",
"battes-plates",
"Batz-sur-Mer",
"Baudinard-sur-Verdon",
"Baugé-en-Anjou",
"Baulme-la-Roche",
"Baulne-en-Brie",
"Baume-les-Dames",
"Baume-les-Messieurs",
"baussery-montain",
"Baussery-Montain",
"baussery-montaine",
"Baussery-Montaine",
"baussery-montaines",
"Baussery-Montaines",
"baussery-montains",
"Baussery-Montains",
"Bayard-sur-Marne",
"Bayenghem-lès-Eperlecques",
"Bayenghem-lès-Éperlecques",
"Bayenghem-lès-Seninghem",
"Bayerfeld-Steckweiler",
"bay-ice",
"bay-ices",
"Bayon-sur-Gironde",
"Bayonville-sur-Mad",
"Bay-sur-Aube",
"Bazeilles-sur-Othain",
"Bazincourt-sur-Epte",
"Bazincourt-sur-Saulx",
"Bazoches-au-Houlme",
"Bazoches-en-Dunois",
"Bazoches-lès-Bray",
"Bazoches-les-Gallerandes",
"Bazoches-les-Hautes",
"Bazoches-sur-Guyonne",
"Bazoches-sur-Hoëne",
"Bazoches-sur-le-Betz",
"Bazoches-sur-Vesles",
"Bazoges-en-Paillers",
"Bazoges-en-Pareds",
"Bazoilles-et-Ménil",
"Bazoilles-sur-Meuse",
"Bazouges-la-Pérouse",
"Bazouges-sous-Hédé",
"Bazouges-sur-le-Loir",
"Bazus-Aure",
"Bazus-Neste",
"beach-volley",
"beach-volleys",
"beagle-harrier",
"Béard-Géovreissiat",
"Beaubec-la-Rosière",
"Beaucamps-le-Jeune",
"Beaucamps-le-Vieux",
"Beaucamps-Ligny",
"Beauchamps-sur-Huillard",
"beau-chasseur",
"Beauchery-Saint-Martin",
"Beaucourt-en-Santerre",
"Beaucourt-sur-l'Ancre",
"Beaucourt-sur-l'Hallue",
"beau-dabe",
"Beauficel-en-Lyons",
"beau-fils",
"Beaufort-Blavincourt",
"Beaufort-en-Anjou",
"Beaufort-en-Argonne",
"Beaufort-en-Santerre",
"Beaufort-en-Vallée",
"Beaufort-sur-Gervanne",
"Beaufour-Druval",
"beau-frais",
"beau-frère",
"Beaugies-sous-Bois",
"Beaujeu-Saint-Vallier-Pierrejux-et-Quitteur",
"beaujolais-villages",
"Beaulieu-en-Argonne",
"Beaulieu-les-Fontaines",
"Beaulieu-lès-Loches",
"Beaulieu-sous-Bressuire",
"Beaulieu-sous-la-Roche",
"Beaulieu-sous-Parthenay",
"Beaulieu-sur-Dordogne",
"Beaulieu-sur-Layon",
"Beaulieu-sur-Loire",
"Beaulieu-sur-Mer",
"Beaulieu-sur-Oudon",
"Beaulieu-sur-Sonnette",
"beau-livre",
"Beaulne-et-Chivy",
"Beaumerie-Saint-Martin",
"Beaumes-de-Venise",
"Beaumetz-lès-Aire",
"Beaumetz-lès-Cambrai",
"Beaumetz-lès-Loges",
"Beaumont-de-Lomagne",
"Beaumont-de-Pertuis",
"Beaumont-du-Gâtinais",
"Beaumont-du-Lac",
"Beaumont-du-Périgord",
"Beaumont-du-Ventoux",
"Beaumont-en-Argonne",
"Beaumont-en-Auge",
"Beaumont-en-Beine",
"Beaumont-en-Cambrésis",
"Beaumont-en-Diois",
"Beaumont-en-Verdunois",
"Beaumont-en-Véron",
"Beaumont-Hague",
"Beaumont-Hamel",
"Beaumont-la-Chartre",
"Beaumont-la-Ferrière",
"Beaumont-la-Ronce",
"Beaumont-le-Hareng",
"Beaumont-le-Roger",
"Beaumont-les-Autels",
"Beaumont-les-Nonains",
"Beaumont-lès-Randan",
"Beaumont-lès-Valence",
"Beaumont-Louestault",
"Beaumont-Monteux",
"Beaumont-Pied-de-Bœuf",
"Beaumont-Saint-Cyr",
"Beaumont-Sardolles",
"Beaumont-sur-Dême",
"Beaumont-sur-Grosne",
"Beaumont-sur-Lèze",
"Beaumont-sur-Oise",
"Beaumont-sur-Sarthe",
"Beaumont-sur-Vesle",
"Beaumont-sur-Vingeanne",
"Beaumont-Village",
"Beaumotte-Aubertans",
"Beaumotte-lès-Montbozon-et-Aubertans",
"Beaumotte-lès-Pin",
"Beaune-d'Allier",
"Beaune-la-Rolande",
"Beaune-les-Mines",
"Beaune-sur-Arzon",
"beau-papa",
"beau-parent",
"beau-partir",
"beau-père",
"beau-petit-fils",
"Beaupréau-en-Mauges",
"Beaurains-lès-Noyon",
"Beauregard-Baret",
"Beauregard-de-Terrasson",
"Beauregard-et-Bassac",
"Beauregard-l'Evêque",
"Beauregard-l'Évêque",
"Beauregard-Vendon",
"Beaurepaire-en-Bresse",
"Beaurepaire-sur-Sambre",
"beau-revoir",
"beau-semblant",
"Beaussais-sur-Mer",
"Beaussais-Vitré",
"Beauvais-sur-Matha",
"Beauvais-sur-Tescou",
"Beauval-en-Caux",
"Beauvoir-de-Marc",
"Beauvoir-en-Lyons",
"Beauvoir-en-Royans",
"Beauvoir-Rivière",
"Beauvoir-sur-Mer",
"Beauvoir-sur-Niort",
"Beauvoir-sur-Sarce",
"Beauvoir-Wavans",
"Beauvois-en-Cambrésis",
"Beauvois-en-Vermandois",
"beaux-arts",
"Beaux-Arts",
"beaux-dabes",
"beaux-enfants",
"beaux-esprits",
"beaux-fils",
"beaux-frères",
"beaux-oncles",
"beaux-parents",
"beaux-pères",
"beaux-petits-fils",
"Beaux-Rivageois",
"bébé-bulle",
"bébé-bus",
"bébé-éprouvette",
"bébé-médicament",
"bébé-nageur",
"bébés-bulles",
"bébés-éprouvette",
"bébés-médicament",
"bébés-nageurs",
"bêche-de-mer",
"bêches-de-mer",
"Bech-Kleinmacher",
"Bécon-les-Granits",
"Bécordel-Bécourt",
"becque-cornu",
"becques-cornus",
"becs-cornus",
"becs-courbes",
"becs-d'âne",
"becs-d'argent",
"becs-de-cane",
"becs-de-canon",
"becs-de-cigogne",
"becs-de-cire",
"becs-de-corbeau",
"becs-de-crosse",
"becs-de-cygne",
"becs-de-faucon",
"becs-de-grue",
"becs-de-hache",
"becs-de-héron",
"becs-de-lézard",
"becs-de-lièvre",
"becs-de-perroquet",
"becs-de-pigeon",
"becs-de-vautour",
"becs-d'oie",
"becs-durs",
"becs-en-ciseaux",
"becs-en-fourreau",
"becs-ouverts",
"becs-plats",
"becs-pointus",
"becs-ronds",
"becs-tranchants",
"Bedburg-Hau",
"Bédeilhac-et-Aynat",
"bedlington-terrier",
"Bédouès-Cocurès",
"Beemte-Broekland",
"Beffu-et-le-Morthomme",
"bégler-beg",
"béglier-beg",
"Bégrolles-en-Mauges",
"behā'ī",
"Béhasque-Lapiste",
"Behren-lès-Forbach",
"Behren-Lübchin",
"Beiersdorf-Freudenberg",
"Beine-Nauroy",
"Beintza-Labaien",
"Beire-le-Châtel",
"Beire-le-Fort",
"bekkō-amé",
"Belan-sur-Ource",
"Belbèze-de-Lauragais",
"Belbèze-en-Comminges",
"Belbèze-en-Lomagne",
"Belbèze-Escoulis",
"Belcastel-et-Buc",
"bel-enfant",
"bel-esprit",
"Bélesta-en-Lauragais",
"bel-étage",
"Belforêt-en-Perche",
"Belfort-du-Quercy",
"Belfort-sur-Rebenty",
"belgo-hollandais",
"Belhomert-Guéhouville",
"Belin-Béliet",
"Belle-Ansois",
"belle-à-voir",
"Bellecombe-en-Bauges",
"Bellecombe-Tarendol",
"belle-dabe",
"belle-dame",
"belle-de-jour",
"belle-de-nuit",
"belle-doche",
"belle-d'onze-heures",
"belle-d'un-jour",
"Belle-Eglise",
"Belle-Église",
"Belle-et-Houllefort",
"belle-étoile",
"belle-famille",
"belle-fille",
"belle-fleur",
"Bellegarde-du-Razès",
"Bellegarde-en-Diois",
"Bellegarde-en-Forez",
"Bellegarde-en-Marche",
"Bellegarde-Marsal",
"Bellegarde-Poussieu",
"Bellegarde-Sainte-Marie",
"Bellegarde-sur-Valserine",
"Belle-Île-en-Mer",
"Belle-Isle-en-Mer",
"Belle-Isle-en-Terre",
"belle-maman",
"belle-mère",
"Bellenod-sous-Origny",
"Bellenod-sur-Seine",
"Bellenot-sous-Pouilly",
"belle-petite-fille",
"belle-pucelle",
"Bellerive-sur-Allier",
"belles-dabes",
"belles-dames",
"Belles-Dames",
"belles-de-jour",
"belles-de-nuit",
"belles-doches",
"belles-d'un-jour",
"belles-étoiles",
"belles-familles",
"belles-filles",
"belles-fleurs",
"Belles-Forêts",
"belles-lettres",
"belles-mères",
"belle-soeur",
"belle-sœur",
"belles-pucelles",
"belles-soeurs",
"belles-sœurs",
"belles-tantes",
"belle-tante",
"Bellevaux-Ligneuville",
"Bellevigne-en-Layon",
"Belleville-en-Caux",
"Belleville-et-Châtillon-sur-Bar",
"Belleville-sur-Bar",
"Belleville-sur-Loire",
"Belleville-sur-Mer",
"Belleville-sur-Meuse",
"Belleville-sur-Vie",
"Bellevue-la-Montagne",
"Belloc-Saint-Clamens",
"Bellou-en-Houlme",
"Bellou-le-Trichard",
"Bellou-sur-Huisne",
"Belloy-en-France",
"Belloy-en-Santerre",
"Belloy-Saint-Léonard",
"Belloy-sur-Somme",
"Belmont-Bretenoux",
"Belmont-d'Azergues",
"Belmont-de-la-Loire",
"Belmont-lès-Darney",
"Belmont-Luthézieu",
"Belmont-Sainte-Foi",
"Belmont-sur-Buttant",
"Belmont-sur-Lausanne",
"Belmont-sur-Rance",
"Belmont-sur-Vair",
"Belmont-sur-Yverdon",
"Belmont-Tramonet",
"bel-oncle",
"bel-outil",
"Belrupt-en-Verdunois",
"bels-outils",
"Belt-Schutsloot",
"Belval-Bois-des-Dames",
"Belval-en-Argonne",
"Belval-et-Sury",
"Belval-sous-Châtillon",
"Belvédère-Campomoro",
"Belvès-de-Castillon",
"Belvèze-du-Razès",
"Belvianes-et-Cavirac",
"Ben-Ahin",
"ben-ahinois",
"Ben-Ahinois",
"Ben-Ahinoise",
"Beneden-Haastrecht",
"Beneden-Leeuwen",
"Benerville-sur-Mer",
"Bénesse-lès-Dax",
"Bénesse-Maremne",
"Bénévent-et-Charbillac",
"Bénévent-l'Abbaye",
"Beney-en-Woëvre",
"Bengy-sur-Craon",
"Beni-Khiran",
"Béning-lès-Saint-Avold",
"béni-non-non",
"béni-oui-oui",
"Bénivay-Ollon",
"benne-kangourou",
"Benque-Dessous-et-Dessus",
"Benqué-Molère",
"bensulfuron-méthyle",
"Bentayou-Sérée",
"bény-bocain",
"Bény-Bocain",
"bény-bocaine",
"Bény-Bocaine",
"bény-bocaines",
"Bény-Bocaines",
"bény-bocains",
"Bény-Bocains",
"Bény-sur-Mer",
"benzoylprop-éthyl",
"bêque-bois",
"bèque-fleur",
"bèque-fleurs",
"Berbérust-Lias",
"Bercenay-en-Othe",
"Bercenay-le-Hayer",
"Berchem-Sainte-Agathe",
"Berchem-Saint-Laurent",
"Berchères-les-Pierres",
"Berchères-Saint-Germain",
"Berchères-sur-Vesgre",
"Berd'huis",
"berd'huisien",
"Berd'huisien",
"berd'huisienne",
"Berd'huisienne",
"berd'huisiennes",
"Berd'huisiennes",
"berd'huisiens",
"Berd'huisiens",
"Berendrecht-Zandvliet-Lillo",
"Bérengeville-la-Campagne",
"Bergères-lès-Vertus",
"Bergères-sous-Montmirail",
"Berg-op-Zoom",
"Bergouey-Viellenave",
"Berg-sur-Moselle",
"Bergues-sur-Sambre",
"Bérig-Vintrange",
"Berkel-Enschot",
"Berkholz-Meyenburg",
"Berlencourt-le-Cauroy",
"Berles-au-Bois",
"Berles-Monchel",
"Berlin-Est",
"Berlin-Ouest",
"Bernac-Debat",
"Bernac-Dessus",
"Bernadets-Debat",
"Bernadets-Dessus",
"bernard-l'ermite",
"bernard-l'hermite",
"Bernay-en-Champagne",
"Bernay-en-Ponthieu",
"Bernay-Saint-Martin",
"Bernay-Vilbert",
"Berne-Mittelland",
"Bernes-sur-Oise",
"Berneuil-en-Bray",
"Berneuil-sur-Aisne",
"Berneval-le-Grand",
"bernico-montois",
"Bernico-Montois",
"bernico-montoise",
"Bernico-Montoise",
"bernico-montoises",
"Bernico-Montoises",
"Bernières-d'Ailly",
"Bernières-le-Patry",
"Bernières-sur-Mer",
"Bernières-sur-Seine",
"Bernkastel-Kues",
"Bernkastel-Wittlich",
"Bernos-Beaulac",
"Bernuy-Zapardiel",
"Berny-en-Santerre",
"Berny-Rivière",
"Berny-sur-Noye",
"Bérou-la-Mulotière",
"Berre-des-Alpes",
"Berre-les-Alpes",
"Berre-l'Etang",
"Berre-l'Étang",
"Berrias-et-Casteljau",
"Berrogain-Laruns",
"Berry-au-Bac",
"Berry-Bouy",
"Bersac-sur-Rivalier",
"Bersillies-l'Abbaye",
"Bertaucourt-Epourdon",
"Berteaucourt-les-Dames",
"Berteaucourt-lès-Thennes",
"Bertreville-Saint-Ouen",
"Bertric-Burée",
"Bertsdorf-Hörnitz",
"Berville-en-Roumois",
"Berville-la-Campagne",
"Berviller-en-Moselle",
"Berville-sur-Mer",
"Berville-sur-Seine",
"Berzé-la-Ville",
"Berzé-le-Châtel",
"Berzy-le-Sec",
"Besny-et-Loizy",
"Bessais-le-Fromental",
"Bessay-sur-Allier",
"Bessède-de-Sault",
"Besse-et-Saint-Anastaise",
"Bessé-sur-Braye",
"Besse-sur-Issole",
"Bessey-en-Chaume",
"Bessey-la-Cour",
"Bessey-lès-Cîteaux",
"Bessines-sur-Gartempe",
"Bessy-sur-Cure",
"béta-cyfluthrine",
"béta-gal",
"Betbezer-d'Armagnac",
"Betcave-Aguin",
"Béthancourt-en-Valois",
"Béthancourt-en-Vaux",
"Béthemont-la-Forêt",
"Béthencourt-sur-Mer",
"Béthencourt-sur-Somme",
"Béthisy-Saint-Martin",
"Béthisy-Saint-Pierre",
"Beton-Bazoches",
"Betoncourt-lès-Brotte",
"Betoncourt-les-Ménétriers",
"Betoncourt-Saint-Pancras",
"Betoncourt-sur-Mance",
"Betpouey-Barèges",
"Bettancourt-la-Ferrée",
"Bettancourt-la-Longue",
"Bettange-sur-Mess",
"Bettegney-Saint-Brice",
"bette-marine",
"Bettencourt-Rivière",
"Bettencourt-Saint-Ouen",
"bettes-marines",
"Betting-lès-Saint-Avold",
"Betton-Bettonet",
"Bettoncourt-le-Haut",
"Betz-le-Château",
"Beulotte-Saint-Laurent",
"beun'aise",
"Beura-Cardezza",
"Beurey-Bauguay",
"Beurey-sur-Saulx",
"beurre-frais",
"Beuvron-en-Auge",
"Beuvry-la-Forêt",
"Beuvry-Nord",
"Beuzec-Cap-Sizun",
"Beuzec-Conq",
"Beuzeville-au-Plain",
"Beuzeville-la-Bastille",
"Beuzeville-la-Grenier",
"Beuzeville-la-Guérard",
"Beveland-Nord",
"Béville-le-Comte",
"Bexhill-on-Sea",
"Beychac-et-Caillau",
"Beynac-et-Cazenac",
"Beyne-Heusay",
"Beyrède-Jumet",
"Beyren-lès-Sierck",
"Beyrie-en-Béarn",
"Beyrie-sur-Joyeuse",
"Bey-sur-Seille",
"Bezange-la-Grande",
"Bezange-la-Petite",
"Bézaudun-les-Alpes",
"Bézaudun-sur-Bîne",
"Bez-et-Esparon",
"Bezins-Garraux",
"Bézues-Bajon",
"Bézu-la-Forêt",
"Bézu-le-Guéry",
"Bézu-Saint-Eloi",
"Bézu-Saint-Éloi",
"Bézu-Saint-Germain",
"B-frame",
"Biache-Saint-Vaast",
"Bians-les-Usiers",
"Biars-sur-Cère",
"biche-cochon",
"Bichelsee-Balterswil",
"Bidania-Goiatz",
"Bief-des-Maisons",
"Bief-du-Fourg",
"Biefvillers-lès-Bapaume",
"Biel-Benken",
"Biencourt-sur-Orge",
"Bienne-lez-Happart",
"biens-fonds",
"Bienville-la-Petite",
"Bienvillers-au-Bois",
"bière-pong",
"Bierre-lès-Semur",
"Bierry-les-Belles-Fontaines",
"Biesme-sous-Thuin",
"Biest-Houtakker",
"Bietigheim-Bissingen",
"Biéville-Beuville",
"Biéville-en-Auge",
"Biéville-Quétiéville",
"Biéville-sur-Orne",
"Big-bang",
"big-endian",
"Bignicourt-sur-Marne",
"Bignicourt-sur-Saulx",
"bil-ka",
"bil-kas",
"Billens-Hennens",
"Billigheim-Ingenheim",
"Billy-Berclau",
"Billy-Chevannes",
"Billy-le-Grand",
"Billy-lès-Chanceaux",
"Billy-Montigny",
"Billy-sous-les-Côtes",
"Billy-sous-Mangiennes",
"Billy-sur-Aisne",
"Billy-sur-Oisy",
"Billy-sur-Ourcq",
"bin-bin",
"bin-bins",
"binge-watcha",
"binge-watchai",
"binge-watchaient",
"binge-watchais",
"binge-watchait",
"binge-watchâmes",
"binge-watchant",
"binge-watchas",
"binge-watchasse",
"binge-watchassent",
"binge-watchasses",
"binge-watchassiez",
"binge-watchassions",
"binge-watchât",
"binge-watchâtes",
"binge-watche",
"binge-watché",
"binge-watchée",
"binge-watchées",
"binge-watchent",
"binge-watcher",
"binge-watchera",
"binge-watcherai",
"binge-watcheraient",
"binge-watcherais",
"binge-watcherait",
"binge-watcheras",
"binge-watchèrent",
"binge-watcherez",
"binge-watcheriez",
"binge-watcherions",
"binge-watcherons",
"binge-watcheront",
"binge-watches",
"binge-watchés",
"binge-watchez",
"binge-watchiez",
"binge-watchions",
"binge-watchons",
"Binic-Étables-sur-Mer",
"Binnen-Moerdijk",
"bin's",
"Binson-et-Orquigny",
"Bioley-Magnoux",
"Bioley-Orjulaz",
"Bionville-sur-Nied",
"Birac-sur-Trec",
"Birken-Honigsessen",
"Bischtroff-sur-Sarre",
"Bissao-Guinéen",
"bissau-guinéen",
"Bissau-Guinéen",
"Bissau-Guinéenne",
"Bissau-Guinéennes",
"Bissey-la-Côte",
"Bissey-la-Pierre",
"Bissey-sous-Cruchaud",
"Bissy-la-Mâconnaise",
"Bissy-sous-Uxelles",
"Bissy-sur-Fley",
"Bisten-en-Lorraine",
"bistro-brasserie",
"bistro-brasseries",
"bit-el-mal",
"Bithaine-et-le-Val",
"Bitschwiller-lès-Thann",
"Bitterfeld-Wolfen",
"bitter-pit",
"Biurrun-Olcoz",
"Biville-la-Baignarde",
"Biville-la-Rivière",
"Biville-sur-Mer",
"Bize-Minervois",
"bla-bla",
"bla-bla-bla",
"black-bass",
"black-blanc-beur",
"black-bottom",
"black-bottoms",
"Black-Lakien",
"black-out",
"black-outa",
"black-outai",
"black-outaient",
"black-outais",
"black-outait",
"black-outâmes",
"black-outant",
"black-outas",
"black-outasse",
"black-outassent",
"black-outasses",
"black-outassiez",
"black-outassions",
"black-outât",
"black-outâtes",
"black-oute",
"black-outé",
"black-outée",
"black-outées",
"black-outent",
"black-outer",
"black-outera",
"black-outerai",
"black-outeraient",
"black-outerais",
"black-outerait",
"black-outeras",
"black-outèrent",
"black-outerez",
"black-outeriez",
"black-outerions",
"black-outerons",
"black-outeront",
"black-outes",
"black-outés",
"black-outez",
"black-outiez",
"black-outions",
"black-outons",
"black-outs",
"black-rot",
"Blagny-sur-Vingeanne",
"Blaincourt-lès-Précy",
"Blaincourt-sur-Aube",
"Blainville-Crevon",
"Blainville-sur-l'Eau",
"Blainville-sur-Mer",
"Blainville-sur-Orne",
"Blaise-sous-Arzillières",
"Blaise-sous-Hauteville",
"Blaison-Gohier",
"Blaison-Saint-Sulpice",
"Blaisy-Bas",
"Blaisy-Haut",
"blanche-coiffe",
"Blanche-Eglise",
"Blanche-Église",
"Blanchefosse-et-Bay",
"Blanche-Neige",
"blanche-queue",
"blanche-raie",
"blanches-coiffes",
"blancs-becs",
"blancs-bocs",
"blancs-bois",
"blancs-de-baleine",
"blancs-d'Espagne",
"blancs-en-bourre",
"blancs-estocs",
"blancs-étocs",
"blancs-mangers",
"blancs-manteaux",
"blancs-raisins",
"blancs-seings",
"blancs-signés",
"Blandouet-Saint-Jean",
"Blangerval-Blangermont",
"Blangy-le-Château",
"Blangy-sous-Poix",
"Blangy-sur-Bresle",
"Blangy-sur-Ternoise",
"Blangy-Tronville",
"Blankenfelde-Mahlow",
"Blanquefort-sur-Briolance",
"Blanzac-lès-Matha",
"Blanzac-Porcheresse",
"Blanzaguet-Saint-Cybard",
"Blanzay-sur-Boutonne",
"Blanzy-la-Salonnaise",
"Blanzy-lès-Fismes",
"Blaydon-on-Tyne",
"Blaye-et-Sainte-Luce",
"Blaye-les-Mines",
"Bleigny-le-Carreau",
"Blénod-lès-Pont-à-Mousson",
"Blénod-lès-Toul",
"bleu-bite",
"bleu-manteau",
"bleu-merle",
"Bleury-Saint-Symphorien",
"bleus-manteaux",
"Bleyen-Genschmar",
"Blies-Ebersing",
"Blies-Ébersing",
"blies-ebersingeois",
"Blies-Ebersingeois",
"blies-ébersingeois",
"Blies-Ébersingeois",
"blies-ebersingeoise",
"Blies-Ebersingeoise",
"blies-ébersingeoise",
"Blies-Ébersingeoise",
"blies-ebersingeoises",
"Blies-Ebersingeoises",
"blies-ébersingeoises",
"Blies-Ébersingeoises",
"Blies-Guersviller",
"Bligny-en-Othe",
"Bligny-lès-Beaune",
"Bligny-le-Sec",
"Bligny-sous-Beaune",
"Bligny-sur-Ouche",
"bling-bling",
"bling-blings",
"Blis-et-Born",
"blis-et-bornois",
"Blis-et-Bornois",
"blis-et-bornoise",
"Blis-et-Bornoise",
"blis-et-bornoises",
"Blis-et-Bornoises",
"bloc-cylindres",
"bloc-eau",
"bloc-film",
"bloc-films",
"block-système",
"bloc-moteur",
"bloc-moteurs",
"bloc-note",
"bloc-notes",
"blocs-eau",
"blocs-films",
"blocs-notes",
"Blois-sur-Seille",
"Blonville-sur-Mer",
"Blosseville-Bonsecours",
"Blot-l'Eglise",
"Blot-l'Église",
"Blousson-Sérian",
"blue-jean",
"blue-jeans",
"blue-lias",
"blu-ray",
"boat-people",
"bobby-soxer",
"bobby-soxers",
"Bobenheim-Roxheim",
"Bobo-Dioulasso",
"Bodeghem-Saint-Martin",
"Bodegraven-Reeuwijk",
"Bodenrode-Westhausen",
"Bodman-Ludwigshafen",
"body-building",
"Boeil-Bezing",
"Boën-sur-Lignon",
"Boëssé-le-Sec",
"boeuf-carotte",
"bœuf-carotte",
"bœuf-carottes",
"bœuf-garou",
"Bœurs-en-Othe",
"Boevange-sur-Attert",
"Bogis-Bossey",
"Bogny-lès-Murtin",
"Bogny-sur-Meuse",
"Bohain-en-Vermandois",
"Bohas-Meyriat-Rignat",
"Böhl-Iggelheim",
"Boigny-sur-Bionne",
"Boinville-en-Mantois",
"Boinville-en-Woëvre",
"Boinville-le-Gaillard",
"Boiry-Becquerelle",
"Boiry-Notre-Dame",
"Boiry-Sainte-Rictrude",
"Boiry-Saint-Martin",
"Boisleux-au-Mont",
"Boisleux-Saint-Marc",
"Boissei-la-Lande",
"Boisse-Penchot",
"Boisset-et-Gaujac",
"Boisset-lès-Montrond",
"Boisset-les-Prévanches",
"Boisset-Saint-Priest",
"Boissey-le-Châtel",
"Boissise-la-Bertrand",
"Boissise-le-Roi",
"Boissy-aux-Cailles",
"Boissy-en-Drouais",
"Boissy-Fresnoy",
"Boissy-l'Aillerie",
"Boissy-Lamberville",
"Boissy-la-Rivière",
"Boissy-le-Bois",
"Boissy-le-Châtel",
"Boissy-le-Cutté",
"Boissy-le-Repos",
"Boissy-le-Sec",
"Boissy-lès-Perche",
"boissy-maugien",
"Boissy-Maugien",
"boissy-maugienne",
"boissy-maugiennes",
"boissy-maugiens",
"Boissy-Maugis",
"Boissy-Mauvoisin",
"Boissy-Saint-Léger",
"Boissy-sans-Avoir",
"Boissy-sous-Saint-Yon",
"Boissy-sur-Damville",
"Boisville-la-Saint-Père",
"boîtes-à-musique",
"boîtes-à-musiques",
"boit-sans-soif",
"Bokholt-Hanredder",
"bolivo-paraguayen",
"Bollendorf-Pont",
"bombardiers-torpilleurs",
"bombardier-torpilleur",
"Bonac-Irazein",
"bon-air",
"bon-bec",
"Bonchamp-lès-Laval",
"bon-chrétien",
"Boncourt-le-Bois",
"Boncourt-sur-Meuse",
"bon-creux",
"bon-encontrais",
"Bon-Encontrais",
"bon-encontraise",
"Bon-Encontraise",
"bon-encontraises",
"Bon-Encontraises",
"Bon-Encontre",
"bon-fieux",
"bon-fils",
"bon-henri",
"bonheur-du-jour",
"Bonlieu-sur-Roubion",
"bon-mot",
"Bonnac-la-Côte",
"bonne-dame",
"bonne-encontre",
"bonne-ente",
"bonne-ententiste",
"bonne-ententistes",
"bonne-femme",
"bonne-grâce",
"bonne-main",
"bonne-maman",
"bonnes-dames",
"bonnes-entes",
"bonnes-femmes",
"bonnes-grâces",
"bonnes-mamans",
"bonnes-vilaines",
"bonnes-voglies",
"bonnet-chinois",
"bonnet-de-prêtre",
"bonnet-rouge",
"bonnets-chinois",
"bonnets-de-prêtres",
"bonnets-verts",
"bonnet-vert",
"Bonneuil-en-France",
"Bonneuil-en-Valois",
"Bonneuil-les-Eaux",
"Bonneuil-Matours",
"Bonneuil-sur-Marne",
"Bonneval-en-Diois",
"Bonneval-sur-Arc",
"Bonnevaux-le-Prieuré",
"Bonnevent-et-Velloreille-lès-Bonnevent",
"Bonnevent-Velloreille",
"bonne-vilaine",
"Bonneville-Aptot",
"Bonneville-et-Saint-Avit-de-Fumadières",
"Bonneville-la-Louvet",
"Bonneville-sur-Touques",
"bonne-voglie",
"Bonnières-sur-Seine",
"Bonningues-lès-Ardres",
"Bonningues-lès-Calais",
"Bonny-sur-Loire",
"bon-ouvrier",
"bon-ouvriers",
"bon-papa",
"bon-plein",
"Bonrepos-Riquet",
"Bonrepos-sur-Aussonnelle",
"bons-chrétiens",
"Bon-Secourois",
"Bon-Secours",
"Bons-en-Chablais",
"bons-mots",
"bons-papas",
"Bons-Tassilly",
"bon-tour",
"Bonvillers-Mont",
"boogie-woogie",
"boogie-woogies",
"Boô-Silhen",
"Bootle-cum-Linacre",
"Bora-Bora",
"Boran-sur-Oise",
"Borcq-sur-Airvault",
"Bordeaux-en-Gâtinais",
"Bordeaux-Saint-Clair",
"Börde-Hakel",
"Bordel's",
"borde-plats",
"Bordères-et-Lamensans",
"Bordères-Louron",
"Bordères-sur-l'Echez",
"Bordères-sur-l'Échez",
"border-terrier",
"Bordes-de-Rivière",
"Bordes-Uchentein",
"bord-opposé",
"Bord-Saint-Georges",
"bore-out",
"bore-outs",
"Boresse-et-Martron",
"Bor-et-Bar",
"Borgdorf-Seedorf",
"Börgerende-Rethwisch",
"Borger-Odoorn",
"Bormes-les-Mimosas",
"Born-de-Champs",
"borne-couteau",
"borne-fontaine",
"borne-fusible",
"borne-fusibles",
"bornes-couteaux",
"bornes-fontaines",
"Bors-de-Baignes",
"Bors-de-Montmoreau",
"Borstel-Hohenraden",
"Bort-les-Orgues",
"Bort-l'Etang",
"Bort-l'Étang",
"Bosc-Bénard-Commin",
"Bosc-Bénard-Crescy",
"Bosc-Bérenger",
"Bosc-Bordel",
"Bosc-Edeline",
"Bosc-Édeline",
"bosc-guérardais",
"Bosc-Guérardais",
"bosc-guérardaise",
"Bosc-Guérardaise",
"bosc-guérardaises",
"Bosc-Guérardaises",
"Bosc-Guérard-Saint-Adrien",
"Bosc-Hyons",
"Bosc-le-Hard",
"Bosc-Mesnil",
"Bosc-Renoult-en-Ouche",
"Bosc-Renoult-en-Roumois",
"bosc-renoulthien",
"Bosc-Renoulthien",
"bosc-renoulthienne",
"Bosc-Renoulthienne",
"bosc-renoulthiennes",
"Bosc-Renoulthiennes",
"bosc-renoulthiens",
"Bosc-Renoulthiens",
"Bosc-Roger-sur-Buchy",
"Bosguérard-de-Marcouville",
"Bösleben-Wüllersleben",
"Bosmie-l'Aiguille",
"Bosmont-sur-Serre",
"Bosmoreau-les-Mines",
"Bosnie-et-Herzégovine",
"Bosnie-Herzégovine",
"bosno-serbe",
"bosno-serbes",
"Bossay-sur-Claise",
"Bosseval-et-Briancourt",
"Bossus-lès-Rumigny",
"Bossut-Gottechain",
"botte-chaussettes",
"bottom-up",
"Botz-en-Mauges",
"Boubers-lès-Hesmond",
"Boubers-sur-Canche",
"Bouchamps-lès-Craon",
"Bouchavesnes-Bergen",
"bouche-à-bouche",
"bouche-en-flûte",
"bouche-nez",
"bouche-pora",
"bouche-porai",
"bouche-poraient",
"bouche-porais",
"bouche-porait",
"bouche-porâmes",
"bouche-porant",
"bouche-poras",
"bouche-porasse",
"bouche-porassent",
"bouche-porasses",
"bouche-porassiez",
"bouche-porassions",
"bouche-porât",
"bouche-porâtes",
"bouche-pore",
"bouche-poré",
"bouche-porée",
"bouche-porées",
"bouche-porent",
"bouche-porer",
"bouche-porera",
"bouche-porerai",
"bouche-poreraient",
"bouche-porerais",
"bouche-porerait",
"bouche-poreras",
"bouche-porèrent",
"bouche-porerez",
"bouche-poreriez",
"bouche-porerions",
"bouche-porerons",
"bouche-poreront",
"bouche-pores",
"bouche-porés",
"bouche-porez",
"bouche-poriez",
"bouche-porions",
"bouche-porons",
"Bouches-du-Rhône",
"bouche-trou",
"bouche-trous",
"Bouchy-Saint-Genest",
"Boucieu-le-Roi",
"Boucle-Saint-Blaise",
"Boucle-Saint-Denis",
"Boucoiran-et-Nozières",
"Bouconville-sur-Madt",
"Bouconville-Vauclair",
"Bouconville-Vauclerc",
"Boudy-de-Beauregard",
"Boueilh-Boueilho-Lasque",
"bouffe-curé",
"bouffe-curés",
"bouffe-galette",
"Bougé-Chambalud",
"Bouges-le-Château",
"Bougy-lez-Neuville",
"Bougy-Villars",
"Bouhans-et-Feurg",
"Bouhans-lès-Lure",
"Bouhans-lès-Montbozon",
"boui-boui",
"bouig-bouig",
"Bouilh-Devant",
"Bouilh-Péreuilh",
"Bouillancourt-en-Séry",
"Bouillancourt-la-Bataille",
"Bouillé-Courdault",
"Bouillé-Loretz",
"Bouillé-Ménard",
"Bouillé-Saint-Paul",
"bouillon-blanc",
"Bouilly-en-Gâtinais",
"Bouin-Plumoison",
"bouis-bouis",
"Boujan-sur-Libron",
"Boulay-les-Barres",
"Boulay-les-Ifs",
"boulay-morinois",
"Boulay-Morinois",
"boulay-morinoise",
"Boulay-Morinoise",
"boulay-morinoises",
"Boulay-Morinoises",
"Boulay-Moselle",
"Boule-d'Amont",
"boule-dogue",
"boules-dogues",
"Boulieu-lès-Annonay",
"Boullay-les-Troux",
"Boulogne-Billancourt",
"Boulogne-la-Grasse",
"Boulogne-sur-Gesse",
"Boulogne-sur-Helpe",
"Boulogne-sur-Mer",
"Boult-aux-Bois",
"Boult-sur-Suippe",
"boum-boum",
"Bouray-sur-Juine",
"Bourbach-le-Bas",
"Bourbach-le-Haut",
"Bourbon-Lancy",
"Bourbon-l'Archambault",
"Bourbonne-les-Bains",
"Bourbon-Vendée",
"Bourbourg-Campagne",
"Bourcefranc-le-Chapus",
"Bourdons-sur-Rognon",
"Bouret-sur-Canche",
"bourgeois-bohème",
"bourgeois-bohèmes",
"bourgeoise-bohème",
"bourgeoises-bohèmes",
"Bourget-en-Huile",
"Bourgneuf-en-Mauges",
"Bourgneuf-en-Retz",
"Bourgneuf-Val-d'Or",
"Bourgogne-Franche-Comté",
"Bourgogne-Fresne",
"Bourgoin-Jallieu",
"Bourgtheroulde-Infreville",
"bourgue-épine",
"bourgues-épines",
"Bourguignon-lès-Conflans",
"Bourguignon-lès-la-Charité",
"Bourguignon-lès-Morey",
"Bourguignon-sous-Coucy",
"Bourguignon-sous-Montbavin",
"Bournainville-Faverolles",
"Bourneville-Sainte-Croix",
"Bournoncle-Saint-Pierre",
"Bouroum-Bouroum",
"bourre-chrétien",
"bourre-de-Marseille",
"bourre-goule",
"bourre-goules",
"bourre-noix",
"bourre-pif",
"bourre-pifs",
"bourres-de-Marseille",
"Bourriot-Bergonce",
"Bourron-Marlotte",
"bourse-à-berger",
"bourse-à-pasteur",
"Bourseigne-Neuve",
"Bourseigne-Vieille",
"bourses-à-berger",
"bourses-à-pasteur",
"Boury-en-Vexin",
"Bousignies-sur-Roc",
"Boussac-Bourg",
"Boussières-en-Cambrésis",
"Boussières-sur-Sambre",
"Boussu-en-Fagne",
"Boussu-lez-Walcourt",
"Boussy-Saint-Antoine",
"bout-avant",
"bout-d'aile",
"bout-d'argent",
"bout-dehors",
"bout-de-l'an",
"Bout-de-l'Îlien",
"bout-de-manche",
"bout-de-quièvre",
"Bout-du-Pont-de-Larn",
"bout-du-pont-de-l'arnais",
"Bout-du-Pont-de-l'Arnais",
"bout-du-pont-de-l'arnaise",
"Bout-du-Pont-de-l'Arnaise",
"bout-du-pont-de-l'arnaises",
"Bout-du-Pont-de-l'Arnaises",
"boute-à-port",
"boute-charge",
"boute-dehors",
"boute-de-lof",
"boute-en-courroie",
"boute-en-train",
"boute-feu",
"boute-hache",
"boute-hors",
"Bouteilles-Saint-Sébastien",
"boute-joie",
"boute-lof",
"Boutenac-Touvent",
"boutes-à-port",
"boute-selle",
"boute-selles",
"boute-tout-cuire",
"Boutiers-Saint-Trojan",
"Boutigny-Prouais",
"Boutigny-sur-Essonne",
"bouton-d'or",
"bouton-poussoir",
"bouton-pression",
"boutons-d'or",
"boutons-pression",
"bout-rimé",
"bout-saigneux",
"bouts-avant",
"bouts-d'aile",
"bouts-d'argent",
"bouts-dehors",
"bouts-de-l'an",
"bouts-de-manche",
"bouts-de-quièvre",
"bouts-rimés",
"bouts-saigneux",
"Bouvaincourt-sur-Bresle",
"Bouvesse-Quirieu",
"Bouvignes-sur-Meuse",
"Bouvigny-Boyeffles",
"Bouvincourt-en-Vermandois",
"Bouxières-aux-Bois",
"Bouxières-aux-Chênes",
"Bouxières-aux-Dames",
"Bouxières-sous-Froidmont",
"Boux-sous-Salmaise",
"Bouy-Luxembourg",
"Bouy-sur-Orvin",
"Bouze-lès-Beaune",
"Bouzon-Gellenave",
"Bouzonville-aux-Bois",
"Bouzonville-en-Beauce",
"Bouzy-la-Forêt",
"Bovée-sur-Barboure",
"Boven-Haastrecht",
"Boven-Hardinxveld",
"Boven-Leeuwen",
"Bovisio-Masciago",
"bow-string",
"bow-strings",
"bow-window",
"bow-windows",
"box-calf",
"boxer-short",
"boxer-shorts",
"box-office",
"box-offices",
"Boyeux-Saint-Jérôme",
"boy-scout",
"boy-scouts",
"Brabant-du-Nord",
"Brabant-en-Argonne",
"Brabant-le-Roi",
"Brabant-Septentrional",
"Brabant-sur-Meuse",
"Brabant-Wallon",
"bracelet-montre",
"bracelets-montres",
"brachio-céphalique",
"brachio-céphaliques",
"brachio-radial",
"Bragelogne-Beauvoir",
"Bragny-en-Charollais",
"Bragny-sur-Saône",
"Brailly-Cornehotte",
"Braine-l'Alleud",
"Braine-le-Château",
"Braine-le-Comte",
"Brains-sur-Gée",
"Brains-sur-les-Marches",
"Brain-sur-Allonnes",
"Brain-sur-l'Authion",
"Brain-sur-Longuenée",
"Brain-sur-Vilaine",
"Brainville-sur-Meuse",
"Braisnes-sur-Aronde",
"branches-ursines",
"branche-ursine",
"Brancourt-en-Laonnois",
"Brancourt-le-Grand",
"brancs-ursines",
"branc-ursine",
"branc-ursines",
"Brandebourg-sur-la-Havel",
"Brande-Hörnerkirchen",
"branle-bas",
"branle-gai",
"branle-long",
"branle-queue",
"branles-bas",
"branles-gais",
"branles-longs",
"Branoux-les-Taillades",
"branque-ursine",
"Branville-Hague",
"Bras-d'Asse",
"bras-d'assien",
"Bras-d'Assien",
"bras-d'assienne",
"Bras-d'Assienne",
"bras-d'assiennes",
"Bras-d'Assiennes",
"bras-d'assiens",
"Bras-d'Assiens",
"brash-ice",
"brash-ices",
"Bras-Panon",
"Brassac-les-Mines",
"brasse-camarade",
"brasse-camarades",
"Bras-sur-Meuse",
"Braud-et-Saint-Louis",
"Braunau-am-Inn",
"Braux-le-Châtel",
"Braux-Sainte-Cohière",
"Braux-Saint-Remy",
"Bray-Dunes",
"bray-dunois",
"Bray-Dunois",
"bray-dunoise",
"Bray-Dunoise",
"bray-dunoises",
"Bray-Dunoises",
"Braye-en-Laonnois",
"Braye-en-Thiérache",
"Bray-en-Val",
"Braye-sous-Faye",
"Braye-sur-Maulne",
"Bray-et-Lû",
"Bray-la-Campagne",
"Bray-lès-Mareuil",
"Bray-Saint-Aignan",
"Bray-Saint-Christophe",
"Bray-sur-Seine",
"Bray-sur-Somme",
"Brazey-en-Morvan",
"Brazey-en-Plaine",
"brazza-congolais",
"Brazza-Congolais",
"Bréal-sous-Montfort",
"Bréal-sous-Vitré",
"Bréau-et-Salagosse",
"brèche-dent",
"brèche-dents",
"Brécy-Brières",
"brécy-brièrois",
"Brécy-Brièrois",
"brécy-brièroise",
"Brécy-Brièroise",
"brécy-brièroises",
"Brécy-Brièroises",
"bredi-breda",
"Brégnier-Cordon",
"Bréhain-la-Ville",
"Bréhan-Loudéac",
"Breil-sur-Roya",
"Breistroff-la-Grande",
"Breitenbach-Haut-Rhin",
"brelic-breloque",
"brelique-breloque",
"Brémontier-Merval",
"Brem-sur-Mer",
"Brémur-et-Vaurois",
"Bresse-sur-Grosne",
"Bressey-sur-Tille",
"Bretagne-d'Armagnac",
"Bretagne-de-Marsan",
"Bretigney-Notre-Dame",
"Brétignolles-le-Moulin",
"Bretignolles-sur-Mer",
"Bretigny-sur-Morrens",
"Brétigny-sur-Orge",
"Bretnig-Hauswalde",
"Brette-les-Pins",
"Bretteville-du-Grand-Caux",
"Bretteville-le-Rabet",
"Bretteville-l'Orgueilleuse",
"Bretteville-Saint-Laurent",
"Bretteville-sur-Ay",
"Bretteville-sur-Dives",
"Bretteville-sur-Laize",
"Bretteville-sur-Odon",
"Breuil-Barret",
"breuil-bernardin",
"Breuil-Bernardin",
"breuil-bernardine",
"Breuil-Bernardine",
"breuil-bernardines",
"Breuil-Bernardines",
"breuil-bernardins",
"Breuil-Bernardins",
"Breuil-Bois-Robert",
"Breuil-Chaussée",
"Breuil-la-Réorte",
"Breuil-le-Sec",
"breuil-le-secquois",
"Breuil-le-Secquois",
"breuil-le-secquoise",
"Breuil-le-Secquoise",
"breuil-le-secquoises",
"Breuil-le-Secquoises",
"Breuil-le-Vert",
"Breuil-Magné",
"Breuil-sur-Marne",
"Breukelen-Nijenrode",
"Breukelen-Sint-Pieters",
"Breurey-lès-Faverney",
"Breuvannes-en-Bassigny",
"Breuvery-sur-Coole",
"Breux-Jouy",
"Breux-sur-Avre",
"Bréville-les-Monts",
"Bréville-sur-Mer",
"Bréxent-Enocq",
"Bréxent-Énocq",
"Brey-et-Maison-du-Bois",
"Briancourt-et-Montimont",
"Briarres-sur-Essonne",
"bric-à-brac",
"brick-goélette",
"Bricquebec-en-Cotentin",
"Bricqueville-la-Blouette",
"Bricqueville-sur-Mer",
"Brides-les-Bains",
"Brie-Comte-Robert",
"Brié-et-Angonnes",
"Briel-sur-Barse",
"Brienne-la-Vieille",
"Brienne-le-Château",
"Brienne-sur-Aisne",
"Brienon-sur-Armançon",
"Brières-et-Crécy",
"Brières-les-Scellés",
"Brieskow-Finkenheerd",
"Brie-sous-Archiac",
"Brie-sous-Barbezieux",
"Brie-sous-Chalais",
"Brie-sous-Matha",
"Brie-sous-Mortagne",
"Brieuil-sur-Chizé",
"Brieulles-sur-Bar",
"Brieulles-sur-Meuse",
"brigadier-chef",
"brigadiers-chefs",
"Brig-Glis",
"Brignac-la-Plaine",
"Brignano-Frascata",
"Brignogan-Plages",
"Brigue-Glis",
"Brigueil-le-Chantre",
"Briis-sous-Forges",
"brillat-savarin",
"brillet-pontin",
"Brillet-Pontin",
"brillet-pontine",
"Brillet-Pontine",
"brillet-pontines",
"Brillet-Pontines",
"brillet-pontins",
"Brillet-Pontins",
"Brillon-en-Barrois",
"brin-d'amour",
"brin-d'estoc",
"Brinon-sur-Beuvron",
"Brinon-sur-Sauldre",
"brins-d'amour",
"brins-d'estoc",
"Brin-sur-Seille",
"Brion-près-Thouet",
"Brion-sur-Ource",
"Briosne-lès-Sables",
"Brioux-sur-Boutonne",
"Briquemesnil-Floxicourt",
"bris-d'huis",
"brise-bise",
"brise-bises",
"brise-burnes",
"brise-cou",
"brise-cous",
"brise-fer",
"brise-fers",
"brise-flots",
"brise-glace",
"brise-glaces",
"brise-image",
"brise-images",
"brise-lame",
"brise-lames",
"brise-lunette",
"brise-mariage",
"brise-motte",
"brise-mottes",
"brise-mur",
"brise-murs",
"brise-os",
"brise-pierre",
"brise-pierres",
"brise-raison",
"brise-raisons",
"brise-roche",
"brise-roches",
"brise-scellé",
"brise-scellés",
"brise-soleil",
"brise-tout",
"brise-vent",
"brise-vents",
"Brisgau-Haute-Forêt-Noire",
"Brison-Saint-Innocent",
"Brissac-Quincé",
"Brissago-Valtravaglia",
"Brissay-Choigny",
"Brissy-Hamégicourt",
"Britanno-Colombien",
"Britanno-Colombienne",
"Britanno-Colombiennes",
"Britanno-Colombiens",
"Brive-la-Gaillarde",
"Brives-Charensac",
"Brives-sur-Charente",
"Brixey-aux-Chanoines",
"Brocourt-en-Argonne",
"Brohl-Lützing",
"Bromont-Lamothe",
"bromophos-éthyl",
"broncho-pneumonie",
"broncho-pneumonies",
"broncho-pulmonaire",
"broncho-pulmonaires",
"Broons-sur-Vilaine",
"Brot-Dessous",
"Brot-Plamboz",
"Brotte-lès-Luxeuil",
"Brotte-lès-Ray",
"brou-brou",
"broue-pub",
"broue-pubs",
"brouille-blanche",
"brouille-blanches",
"Brousse-le-Château",
"Brousses-et-Villaret",
"Broussey-en-Blois",
"Broussey-Raulecourt",
"Broussy-le-Grand",
"Broussy-le-Petit",
"Brou-sur-Chantereine",
"broute-minou",
"broute-minous",
"Broût-Vernet",
"broût-vernetois",
"Broût-Vernetois",
"broût-vernetoise",
"Broût-Vernetoise",
"broût-vernetoises",
"Broût-Vernetoises",
"Brouzet-lès-Alès",
"Brouzet-lès-Quissac",
"Brovello-Carpugnino",
"brown-nosers",
"brown-out",
"Broye-Aubigney-Montseugny",
"Broye-les-Loups-et-Verfontaine",
"Broye-lès-Pesmes-Aubigney-Montseugny",
"Broye-Vully",
"Bruay-la-Buissière",
"Bruay-sur-l'Escaut",
"Bruchhausen-Vilsen",
"Bruchmühlbach-Miesau",
"Bruchweiler-Bärenbach",
"Brücken-Hackpfüffel",
"Bruc-sur-Aff",
"Brue-Auriac",
"Brueil-en-Vexin",
"Bruère-Allichamps",
"bruesme-d'auffe",
"bruesmes-d'auffe",
"Bruges-Capbis-Mifaget",
"Brugny-Vaudancourt",
"Bruille-lez-Marchiennes",
"Bruille-Saint-Amand",
"brûle-amorce",
"brûle-bout",
"brule-gueule",
"brûle-gueule",
"brule-gueules",
"brûle-gueules",
"brule-maison",
"brûle-maison",
"brule-maisons",
"brûle-maisons",
"brule-parfum",
"brûle-parfum",
"brule-parfums",
"brûle-parfums",
"brûle-pourpoint",
"brûle-queue",
"brûle-tout",
"Brûly-de-Pesche",
"brûly-de-peschois",
"Brûly-de-Peschois",
"Brûly-de-Peschoise",
"Brunstatt-Didenheim",
"brun-suisse",
"Brunvillers-la-Motte",
"brute-bonne",
"brut-ingénu",
"bruts-ingénus",
"Bruttig-Fankel",
"Bruxelles-ville",
"Bruyères-et-Montbérault",
"Bruyères-le-Châtel",
"Bruyères-sur-Fère",
"Bruyères-sur-Oise",
"Bry-sur-Marne",
"B-spline",
"B-splines",
"Buais-Les-Monts",
"buccin-marin",
"buccins-marins",
"bucco-dentaire",
"bucco-dentaires",
"bucco-génital",
"bucco-génitale",
"bucco-génitales",
"bucco-génitaux",
"bucco-labial",
"bucco-pharyngé",
"bucco-pharyngée",
"bucco-pharyngées",
"bucco-pharyngés",
"Bucey-en-Othe",
"Bucey-lès-Gy",
"Bucey-lès-Traves",
"buck-béan",
"buck-béans",
"Bucy-le-Long",
"Bucy-le-Roi",
"Bucy-lès-Cerny",
"Bucy-lès-Pierrepont",
"Bucy-Saint-Liphard",
"Budel-Dorplein",
"Budel-Schoot",
"Bueil-en-Touraine",
"buenos-airien",
"Buenos-Airien",
"Buenos-Ayres",
"buen-retiro",
"Buhl-Lorraine",
"Buigny-l'Abbé",
"Buigny-lès-Gamaches",
"Buigny-Saint-Maclou",
"Buire-au-Bois",
"Buire-Courcelles",
"Buire-le-Sec",
"Buire-sur-l'Ancre",
"Buis-les-Baronnies",
"buis-prévenchais",
"Buis-Prévenchais",
"buis-prévenchaise",
"Buis-Prévenchaise",
"buis-prévenchaises",
"Buis-Prévenchaises",
"buisson-ardent",
"buissons-ardents",
"Buis-sur-Damville",
"Bulat-Pestivien",
"bull-dogs",
"bull-mastiff",
"bull-terrier",
"bull-terriers",
"Bully-les-Mines",
"bungee-jumping",
"bungy-jumping",
"Buno-Bonnevaux",
"bureau-chef",
"Bure-les-Templiers",
"Bures-en-Bray",
"Bures-les-Monts",
"Bures-sur-Dives",
"Bures-sur-Yvette",
"Burey-en-Vaux",
"Burey-la-Côte",
"Burg-Reuland",
"burg-reulandais",
"Burg-Reulandais",
"Burg-Reulandaise",
"Burkina-be",
"Burkina-bes",
"Burkina-Faso",
"Burkina-fassien",
"Burnhaupt-le-Bas",
"Burnhaupt-le-Haut",
"burn-out",
"burn-outa",
"burn-outai",
"burn-outaient",
"burn-outais",
"burn-outait",
"burn-outâmes",
"burn-outant",
"burn-outas",
"burn-outasse",
"burn-outassent",
"burn-outasses",
"burn-outassiez",
"burn-outassions",
"burn-outât",
"burn-outâtes",
"burn-oute",
"burn-outé",
"burn-outée",
"burn-outées",
"burn-outent",
"burn-outer",
"burn-outera",
"burn-outerai",
"burn-outeraient",
"burn-outerais",
"burn-outerait",
"burn-outeras",
"burn-outèrent",
"burn-outerez",
"burn-outeriez",
"burn-outerions",
"burn-outerons",
"burn-outeront",
"burn-outes",
"burn-outés",
"burn-outez",
"burn-outiez",
"burn-outions",
"burn-outons",
"burn-outs",
"Burosse-Mendousse",
"Burthecourt-aux-Chênes",
"Bus-la-Mésière",
"Bus-lès-Artois",
"Bussac-Forêt",
"Bussac-sur-Charente",
"Bus-Saint-Rémy",
"Busserotte-et-Montenaille",
"Bussière-Badil",
"Bussière-Boffy",
"Bussière-Dunoise",
"Bussière-Galant",
"Bussière-Nouvelle",
"Bussière-Poitevine",
"Bussière-Saint-Georges",
"Bussières-et-Pruns",
"Bussunarits-Sarrasquette",
"Bussus-Bussuel",
"Bussy-Albieux",
"Bussy-aux-Bois",
"Bussy-Chardonney",
"Bussy-en-Othe",
"Bussy-la-Côte",
"Bussy-la-Pesle",
"Bussy-le-Château",
"Bussy-le-Grand",
"Bussy-le-Repos",
"Bussy-lès-Daours",
"Bussy-lès-Poix",
"Bussy-Lettrée",
"Bussy-Saint-Georges",
"Bussy-Saint-Martin",
"Bussy-sur-Moudon",
"buste-reliquaire",
"bustes-reliquaires",
"Bustince-Iriberry",
"Butot-en-Caux",
"Butot-Vénesville",
"Butry-sur-Oise",
"but-sur-balles",
"Butte-Montmartre",
"butter-oil",
"Buttes-Chaumont",
"Buxières-d'Aillac",
"Buxières-lès-Clefmont",
"Buxières-lès-Froncles",
"Buxières-les-Mines",
"Buxières-lès-Villiers",
"Buxières-sous-les-Côtes",
"Buxières-sous-Montaigut",
"Buxières-sur-Arce",
"Buzet-sur-Baïse",
"Buzet-sur-Tarn",
"Buzy-Darmont",
"BVD-MD",
"Byans-sur-Doubs",
"bye-bye",
"Byhleguhre-Byhlen",
"by-passa",
"by-passai",
"by-passaient",
"by-passais",
"by-passait",
"by-passâmes",
"by-passant",
"by-passas",
"by-passasse",
"by-passassent",
"by-passasses",
"by-passassiez",
"by-passassions",
"by-passât",
"by-passâtes",
"by-passe",
"by-passé",
"by-passée",
"by-passées",
"by-passent",
"by-passer",
"by-passera",
"by-passerai",
"by-passeraient",
"by-passerais",
"by-passerait",
"by-passeras",
"by-passèrent",
"by-passerez",
"by-passeriez",
"by-passerions",
"by-passerons",
"by-passeront",
"by-passes",
"by-passés",
"by-passez",
"by-passiez",
"by-passions",
"by-passons",
"C-4",
"Cabanac-Cazaux",
"Cabanac-et-Villagrains",
"Cabanac-Séguenville",
"cabane-roulotte",
"cabanes-roulottes",
"Cabas-Loumassès",
"câblo-opérateur",
"câblo-opérateurs",
"Cabrières-d'Aigues",
"Cabrières-d'Avignon",
"cacasse-à-cul-nu",
"cacasses-à-cul-nu",
"c-à-d",
"c.-à-d.",
"Cadegliano-Viconago",
"Cadeilhan-Trachère",
"Cadillac-en-Fronsadais",
"cadrage-débordement",
"Cadzand-Bad",
"caf'conc",
"café-au-lait",
"café-bar",
"café-bistro",
"café-calva",
"café-comptoir",
"café-concert",
"café-crème",
"café-filtre",
"cafés-bars",
"cafés-concerts",
"cafés-crèmes",
"cafés-filtre",
"cafés-théâtres",
"café-théâtre",
"cages-théâtres",
"cage-théâtre",
"Cagnac-les-Mines",
"Cagnes-sur-Mer",
"cague-braille",
"cague-brailles",
"cahin-caha",
"Cahuzac-sur-Adour",
"Cahuzac-sur-Vère",
"cail-cédra",
"cail-cédras",
"cail-cédrin",
"cail-cédrins",
"caillé-blanc",
"caille-lait",
"caille-laits",
"caillés-blancs",
"cailleu-tassart",
"caillot-rosat",
"caillots-rosats",
"Caillouël-Crépigny",
"Caillouet-Orgeville",
"Cailloux-sur-Fontaines",
"Cailly-sur-Eure",
"caïque-bazar",
"caïques-bazars",
"caisse-outre",
"caisse-palette",
"caisses-outres",
"caisses-palettes",
"cake-walk",
"cake-walks",
"Calasca-Castiglione",
"Calatafimi-Segesta",
"calcite-rhodochrosite",
"calcites-rhodochrosites",
"calcium-autunite",
"calcium-autunites",
"calcium-pyromorphite",
"calcium-pyromorphites",
"calcium-rhodochrosite",
"calcium-rhodochrosites",
"cale-bas",
"caleçon-combinaison",
"caleçons-combinaisons",
"cale-dos",
"cale-hauban",
"cale-haubans",
"cale-pied",
"cale-pieds",
"Calleville-les-Deux-Eglises",
"Calleville-les-Deux-Églises",
"call-girl",
"call-girls",
"Calmels-et-le-Viala",
"calo-moulinotin",
"Calo-Moulinotin",
"calo-moulinotine",
"Calo-Moulinotine",
"calo-moulinotines",
"Calo-Moulinotines",
"calo-moulinotins",
"Calo-Moulinotins",
"Calonne-Ricouart",
"Calonne-sur-la-Lys",
"Calp.-Flac.",
"Caluire-et-Cuire",
"Calumet-Pointois",
"Calumet-Pontois",
"Calviac-en-Périgord",
"Camaret-sur-Aigues",
"Camaret-sur-Mer",
"Cambes-en-Plaine",
"Camblain-Châtelain",
"Camblain-l'Abbé",
"Camblanes-et-Meynac",
"Cambo-les-Bains",
"Cambon-et-Salvergues",
"Cambon-lès-Lavaur",
"Cambounet-sur-le-Sor",
"Cambron-Casteau",
"Cambronne-lès-Clermont",
"Cambronne-lès-Ribécourt",
"Cambron-Saint-Vincent",
"came-cruse",
"caméra-lucida",
"caméra-piéton",
"caméra-piétons",
"Camiac-et-Saint-Denis",
"camion-bélier",
"camion-citerne",
"camion-cuisine",
"camion-cuisines",
"camion-poubelle",
"camions-béliers",
"camions-bennes",
"camions-citernes",
"camions-poubelles",
"Camou-Cihigue",
"Camou-Mixe-Suhast",
"Campagnac-lès-Quercy",
"Campagna-de-Sault",
"Campagne-d'Armagnac",
"Campagne-lès-Boulonnais",
"Campagne-lès-Guines",
"Campagne-lès-Hesdin",
"Campagne-lès-Wardrecques",
"Campagne-sur-Arize",
"Campagne-sur-Aude",
"Campandré-Valcongrain",
"campanulo-infundibiliforme",
"campanulo-infundibiliformes",
"Camp-Auriol",
"Camp-Dumy",
"Campestre-et-Luc",
"Campet-et-Lamolère",
"Campezo-Kanpezu",
"Camphin-en-Carembault",
"Camphin-en-Pévèle",
"Campiglione-Fenile",
"Campigneulles-les-Grandes",
"Campigneulles-les-Petites",
"Campillos-Paravientos",
"Campillos-Sierra",
"camping-car",
"camping-cars",
"camping-gaz",
"Camping-Gaz",
"Camplong-d'Aude",
"Camp-Mégier",
"Camp-Méjan",
"campo-haltien",
"Campo-Haltien",
"campo-haltienne",
"campo-haltiennes",
"campo-haltiens",
"campo-laïcien",
"Campo-Laïcien",
"campo-laïcienne",
"Campo-Laïcienne",
"campo-laïciennes",
"Campo-Laïciennes",
"campo-laïciens",
"Campo-Laïciens",
"Camp-Public",
"Camp-Réal",
"Camps-en-Amiénois",
"Camps-la-Source",
"Camps-Saint-Mathurin-Léobazel",
"Camps-sur-l'Agly",
"Camps-sur-l'Isle",
"camps-volants",
"camp-volant",
"Canada-Uni",
"canadien-français",
"Canale-di-Verde",
"canapé-lit",
"canapés-lits",
"Canaules-et-Argentières",
"candau-casteidois",
"Candau-Casteidois",
"candau-casteidoise",
"Candau-Casteidoise",
"candau-casteidoises",
"Candau-Casteidoises",
"Candes-Saint-Martin",
"Candé-sur-Beuvron",
"Canenx-et-Réaut",
"Canet-de-Salars",
"Canet-en-Roussillon",
"Caniac-du-Causse",
"cani-joering",
"cani-rando",
"canne-épée",
"Cannes-Ecluse",
"Cannes-Écluse",
"cannes-épées",
"Cannes-et-Clairan",
"cannib's",
"Canny-sur-Matz",
"Canny-sur-Thérain",
"canoë-kayak",
"canoë-kayaks",
"canon-revolver",
"canons-revolvers",
"Cantaing-sur-Escaut",
"Cante-Greil",
"Cante-Grel",
"Cante-Grillet",
"Cantenay-Epinard",
"Cantenay-Épinard",
"Cante-Perdris",
"C.-Antip.",
"Cantonnier-de-l'Est",
"Canville-la-Rocque",
"Canville-les-Deux-Eglises",
"Canville-les-Deux-Églises",
"Cany-Barville",
"Caorches-Saint-Nicolas",
"Caouënnec-Lanvézéac",
"Capaccio-Paestum",
"Capdenac-Gare",
"Capelle-Fermont",
"capelle-filismontin",
"Capelle-Filismontin",
"capelle-filismontine",
"Capelle-Filismontine",
"capelle-filismontines",
"Capelle-Filismontines",
"capelle-filismontins",
"Capelle-Filismontins",
"Capelle-les-Grands",
"Capelle-lès-Hesdin",
"capélo-hugonais",
"Capélo-Hugonais",
"capélo-hugonaise",
"Capélo-Hugonaise",
"capélo-hugonaises",
"Capélo-Hugonaises",
"Capesterre-Belle-Eau",
"Capesterre-de-Marie-Galante",
"capi-aga",
"capi-agas",
"capigi-bassi",
"capigi-bassis",
"Capitale-Nationale",
"capital-risque",
"capital-risques",
"capital-risqueur",
"capital-risqueurs",
"capitan-pacha",
"capitan-pachas",
"capitaux-risqueurs",
"caporal-chef",
"caporaux-chefs",
"Capoulet-et-Junac",
"Cappelle-Brouck",
"Cappelle-en-Pévèle",
"Cappelle-la-Grande",
"capsule-congé",
"capsules-congés",
"capuchon-de-moine",
"caput-mortuum",
"caque-denier",
"carbo-azotine",
"carbonate-apatite",
"carbonate-apatites",
"Carbon-Blanc",
"carbone-14",
"carbones-14",
"Carbonia-Iglesias",
"Carcarès-Sainte-Croix",
"Carcen-Ponson",
"carcere-duro",
"Carcheto-Brustico",
"cardio-chirurgien",
"cardio-chirurgienne",
"cardio-chirurgiennes",
"cardio-chirurgiens",
"cardio-kickboxing",
"cardio-kickboxings",
"cardio-thoracique",
"cardio-thoraciques",
"cardio-training",
"cardio-vasculaire",
"cardio-vasculaires",
"Cardo-Torgia",
"carême-prenant",
"carfentrazone-éthyle",
"car-ferries",
"car-ferry",
"car-ferrys",
"cargo-dortoir",
"cargos-dortoirs",
"Carhaix-Plouguer",
"Carignan-de-Bordeaux",
"car-jacking",
"Carla-Bayle",
"Carla-de-Roquefort",
"Carla-le-Comte",
"Carlencas-et-Levas",
"Carmzow-Wallmow",
"Carnac-Rouffiac",
"Carnoux-en-Provence",
"caro-percyais",
"Caro-Percyais",
"caro-percyaise",
"Caro-Percyaise",
"caro-percyaises",
"Caro-Percyaises",
"carré-bossu",
"carrée-bossue",
"carrées-bossues",
"carrés-bossus",
"Carresse-Cassaber",
"Carrières-sous-Poissy",
"Carrières-sur-Seine",
"Carry-le-Rouet",
"Carsac-Aillac",
"Carsac-de-Gurson",
"Carsac-de-Villefranche",
"carte-cadeau",
"carte-fille",
"carte-index",
"carte-lettre",
"carte-maximum",
"carte-mère",
"cartes-cadeaux",
"cartes-filles",
"cartes-lettres",
"cartes-maximum",
"cartes-mères",
"carte-soleil",
"cartes-vues",
"carte-vue",
"Cartigny-l'Epinay",
"Cartigny-l'Épinay",
"carton-index",
"carton-pâte",
"carton-pierre",
"cartons-pâte",
"Carville-la-Folletière",
"Carville-Pot-de-Fer",
"Cascastel-des-Corbières",
"Case-Pilote",
"Cases-de-Pène",
"cash-back",
"cash-flow",
"cash-flows",
"cas-limite",
"cas-limites",
"casque-de-Jupiter",
"Cassagnabère-Tournas",
"Cassagnes-Bégonhès",
"casse-aiguille",
"casse-bélier",
"casse-béliers",
"casse-bonbon",
"casse-bonbons",
"casse-bouteille",
"casse-bras",
"casse-burnes",
"casse-claouis",
"casse-coeur",
"casse-cœur",
"casse-coeurs",
"casse-cœurs",
"casse-cou",
"casse-couille",
"casse-couilles",
"casse-cous",
"casse-croute",
"casse-croûte",
"casse-croutes",
"casse-croûtes",
"casse-cul",
"casse-culs",
"casse-dalle",
"casse-dalles",
"casse-fer",
"casse-fil",
"casse-fils",
"casse-graine",
"casse-graines",
"casse-gueule",
"casse-gueules",
"casse-langue",
"casse-langues",
"casse-lunette",
"casse-lunettes",
"casse-mariages",
"casse-motte",
"casse-museau",
"casse-museaux",
"casse-noisette",
"casse-noisettes",
"casse-noix",
"casse-nole",
"casse-noyaux",
"casse-olives",
"casse-patte",
"casse-pattes",
"casse-péter",
"casse-pied",
"casse-pieds",
"casse-pierre",
"casse-pierres",
"casse-pipe",
"casse-pipes",
"casse-poitrine",
"casse-pot",
"casse-tête",
"casse-têtes",
"casse-vessie",
"cassi-ascher",
"cassi-aschers",
"Castaignos-Souslens",
"Castanet-le-Haut",
"Castanet-Tolosan",
"Casteide-Cami",
"Casteide-Candau",
"Casteide-Doat",
"castel-ambillouçois",
"Castel-Ambillouçois",
"castel-ambillouçoise",
"Castel-Ambillouçoise",
"castel-ambillouçoises",
"Castel-Ambillouçoises",
"Castelbello-Ciardes",
"castel-chalonnais",
"Castel-Chalonnais",
"castel-chalonnaise",
"Castel-Chalonnaise",
"castel-chalonnaises",
"Castel-Chalonnaises",
"Castell'Alfero",
"Castellare-di-Casinca",
"Castellare-di-Mercurio",
"Castell'Arquato",
"Castell'Azzara",
"Castellet-lès-Sausses",
"castel-lévézien",
"Castel-Lévézien",
"castel-lévézienne",
"Castel-Lévézienne",
"castel-lévéziennes",
"Castel-Lévéziennes",
"castel-lévéziens",
"Castel-Lévéziens",
"Castello-di-Rostino",
"Castell'Umberto",
"Castelmoron-d'Albret",
"Castelmoron-sur-Lot",
"Castelnau-Barbarens",
"Castelnau-Chalosse",
"Castelnau-d'Anglès",
"Castelnau-d'Arbieu",
"Castelnau-d'Aude",
"Castelnau-d'Auzan",
"Castelnaud-de-Gratecambe",
"Castelnau-de-Brassac",
"Castelnau-de-Guers",
"Castelnau-de-Lévis",
"Castelnau-de-Mandailles",
"Castelnau-de-Médoc",
"Castelnau-de-Montmiral",
"Castelnau-d'Estrétefonds",
"Castelnaud-la-Chapelle",
"Castelnau-Durban",
"castelnau-durbannais",
"Castelnau-Durbannais",
"castelnau-durbannaise",
"Castelnau-Durbannaise",
"castelnau-durbannaises",
"Castelnau-Durbannaises",
"Castelnau-le-Lez",
"Castelnau-Magnoac",
"Castelnau-Montratier",
"Castelnau-Montratier-Sainte-Alauzie",
"Castelnau-Pégayrols",
"Castelnau-Picampeau",
"Castelnau-Rivière-Basse",
"Castelnau-sur-Gupie",
"Castelnau-sur-l'Auvignon",
"Castelnau-Tursan",
"Castelnau-Valence",
"castel-pontin",
"Castel-Pontin",
"castel-pontine",
"Castel-Pontine",
"castel-pontines",
"Castel-Pontines",
"castel-pontins",
"Castel-Pontins",
"Castel-Sarrazin",
"Castels-et-Bézenac",
"castel-symphorinois",
"Castel-Symphorinois",
"castel-symphorinoise",
"Castel-Symphorinoise",
"castel-symphorinoises",
"Castel-Symphorinoises",
"Castéra-Bouzet",
"Castéra-Lanusse",
"Castéra-Lectourois",
"Castéra-Lou",
"Castéra-Loubix",
"Castéra-Verduzan",
"Castéra-Vignoles",
"Castet-Arrouy",
"castet-arrouyais",
"Castet-Arrouyais",
"castet-arrouyaise",
"Castet-Arrouyaise",
"castet-arrouyaises",
"Castet-Arrouyaises",
"Castetnau-Camblong",
"Castets-en-Dorthe",
"Castex-d'Armagnac",
"Casties-Labrande",
"castillano-aragonais",
"Castille-et-León",
"Castillejo-Sierra",
"Castillo-Albaráñez",
"Castillon-Debats",
"Castillon-de-Castets",
"Castillon-de-Larboust",
"Castillon-de-Saint-Martory",
"Castillon-du-Gard",
"Castillon-en-Auge",
"Castillon-en-Couserans",
"Castillon-et-Capitourlan",
"Castillon-la-Bataille",
"Castillon-Massas",
"Castillon-Savès",
"Casti-Wergenstein",
"Castres-Gironde",
"Castrillo-Tejeriego",
"Castrop-Rauxel",
"Castro-Urdiales",
"catalan-valencien-baléare",
"catalase-positive",
"cat-boat",
"Catillon-Fumechon",
"Catillon-sur-Sambre",
"cato-cathartique",
"cato-cathartiques",
"Caubios-Loos",
"Caubon-Saint-Sauveur",
"Cauchy-à-la-Tour",
"Caudebec-en-Caux",
"Caudebec-lès-Elbeuf",
"Caudiès-de-Conflent",
"Caudiès-de-Fenouillèdes",
"Caumont-l'Eventé",
"Caumont-l'Éventé",
"Caumont-sur-Aure",
"Caumont-sur-Durance",
"Caumont-sur-Garonne",
"Caumont-sur-Orne",
"Caunes-Minervois",
"Caunettes-en-Val",
"Caunette-sur-Lauquet",
"Caupenne-d'Armagnac",
"Cauroy-lès-Hermonville",
"Cause-de-Clérans",
"Caussade-Rivière",
"Causse-Bégon",
"Causse-de-la-Selle",
"Causse-et-Diège",
"Causses-et-Veyran",
"Cauverville-en-Roumois",
"Cauville-sur-Mer",
"Caux-et-Sauzens",
"ça-va-ça-vient",
"Cavaglio-Spoccia",
"Cavalaire-sur-Mer",
"Cavallino-Treporti",
"ça-voir",
"ça-voirs",
"Cavron-Saint-Martin",
"Cayeux-en-Santerre",
"Cayeux-sur-Mer",
"Cayre-four",
"Cazals-des-Baylès",
"Cazarilh-Laspènes",
"Cazaril-Laspènes",
"Cazaril-Tambourès",
"Cazaux-d'Anglès",
"Cazaux-Debat",
"Cazaux-Fréchet-Anéran-Camors",
"Cazaux-Layrisse",
"Cazaux-Savès",
"Cazaux-Villecomtal",
"Cazeaux-de-Larboust",
"Cazenave-Serres-et-Allens",
"Cazeneuve-Montaut",
"Cazères-sur-l'Adour",
"Cazes-Mondenard",
"Cazouls-d'Hérault",
"Cazouls-lès-Béziers",
"C-blanc",
"C-blancs",
"c-commanda",
"c-commandai",
"c-commandaient",
"c-commandais",
"c-commandait",
"c-commandâmes",
"c-commandant",
"c-commandas",
"c-commandasse",
"c-commandassent",
"c-commandasses",
"c-commandassiez",
"c-commandassions",
"c-commandât",
"c-commandâtes",
"c-commande",
"c-commandé",
"c-commandée",
"c-commandées",
"c-commandent",
"c-commander",
"c-commandera",
"c-commanderai",
"c-commanderaient",
"c-commanderais",
"c-commanderait",
"c-commanderas",
"c-commandèrent",
"c-commanderez",
"c-commanderiez",
"c-commanderions",
"c-commanderons",
"c-commanderont",
"c-commandes",
"c-commandés",
"c-commandez",
"c-commandiez",
"c-commandions",
"c-commandons",
"CD-R",
"CD-ROM",
"CD-RW",
"CD-WORM",
"Céaux-d'Allègre",
"Ceaux-en-Couhé",
"Ceaux-en-Loudun",
"cédez-le-passage",
"Ceilhes-et-Rocozels",
"cejourd'hui",
"céleri-rave",
"cèleri-rave",
"céléri-rave",
"cèleri-raves",
"céleris-raves",
"Céleste-Empire",
"celle-ci",
"celle-là",
"Celle-Lévescault",
"celles-ci",
"Celles-en-Bassigny",
"celles-là",
"Celles-lès-Condé",
"Celles-sur-Aisne",
"Celles-sur-Belle",
"Celles-sur-Durolle",
"Celles-sur-Ource",
"Celles-sur-Plaine",
"Cellier-du-Luc",
"celto-nordique",
"celto-nordiques",
"celui-ci",
"celui-là",
"Cély-en-Bière",
"Cénac-et-Saint-Julien",
"Cenne-Monestiés",
"Cenon-sur-Vienne",
"cent-cinquante-cinq",
"cent-cinquante-cinquièmes",
"cent-garde",
"cent-gardes",
"cent-lances",
"cent-mille",
"centre-bourg",
"centre-droit",
"Centre-du-Québec",
"Centre-Est",
"centre-gauche",
"Centre-Mauricien",
"Centre-Nord",
"Centre-Ouest",
"centres-bourgs",
"Centre-Sud",
"centres-villes",
"centre-tir",
"centre-ville",
"Centro-Américain",
"Centro-Américaine",
"Centro-Américains",
"cent-suisse",
"cent-suisses",
"céphalo-pharyngien",
"céphalo-pharyngienne",
"céphalo-pharyngiennes",
"céphalo-pharyngiens",
"céphalo-rachidien",
"Cérans-Foulletourte",
"Cercy-la-Tour",
"cérébro-lésion",
"cérébro-lésions",
"cérébro-rachidien",
"cérébro-rachidienne",
"cérébro-rachidiennes",
"cérébro-rachidiens",
"cérébro-spinal",
"cérébro-spinale",
"cérébro-spinales",
"cérébro-spinaux",
"Céré-la-Ronde",
"Cerexhe-Heuseux",
"cerfs-veaux",
"cerfs-volants",
"cerfs-volistes",
"cerf-veau",
"cerf-volant",
"cerf-voliste",
"Cerisy-Belle-Etoile",
"Cerisy-Belle-Étoile",
"Cerisy-Buleux",
"Cerisy-Gailly",
"Cerisy-la-Forêt",
"Cerisy-la-Salle",
"Cernay-en-Dormois",
"Cernay-la-Ville",
"Cernay-l'Eglise",
"Cernay-l'Église",
"Cernay-lès-Reims",
"Cernoy-en-Berry",
"Cerny-en-Laonnois",
"Cerny-lès-Bucy",
"Céroux-Mousty",
"Cerre-lès-Noroy",
"certificat-cadeau",
"césaro-papisme",
"césaro-papismes",
"césaro-papiste",
"césaro-papistes",
"Césarville-Dossainville",
"césium-analcime",
"césium-analcimes",
"Cesny-aux-Vignes",
"Cesny-Bois-Halbout",
"cesoird'hui",
"Cessenon-sur-Orb",
"Cessey-sur-Tille",
"cessez-le-feu",
"cession-bail",
"Cesson-Sévigné",
"Cessoy-en-Montois",
"Cessy-les-Bois",
"c'est-à-dire",
"cesta-punta",
"Cette-Eygun",
"ceux-ci",
"ceux-là",
"chabada-bada",
"cha'ban",
"chabazite-Ca",
"chabazite-Cas",
"chabazite-Na",
"chabazite-Nas",
"cha-cha",
"cha-cha-cha",
"cha-chas",
"Chagny-lès-Omont",
"Chaillac-sur-Vienne",
"Chaillé-les-Marais",
"Chail-les-Bains",
"Chaillé-sous-les-Ormeaux",
"Chailly-en-Bière",
"Chailly-en-Brie",
"Chailly-en-Gâtinais",
"Chailly-lès-Ennery",
"Chailly-sur-Armançon",
"Chailly-sur-Montreux",
"Chainaz-les-Frasses",
"Chaînée-des-Coupis",
"Chaintrix-Bierges",
"Chaise-Dieu-du-Theil",
"Chalain-d'Uzore",
"Chalain-le-Comtal",
"Chalandry-Elaire",
"Chalautre-la-Grande",
"Chalautre-la-Petite",
"Chalautre-la-Reposte",
"Châlette-sur-Loing",
"Chalette-sur-Voire",
"Chalivoy-Milon",
"Challain-la-Potherie",
"Challand-Saint-Anselme",
"Challand-Saint-Victor",
"Challes-la-Montagne",
"Challes-les-Eaux",
"Chalmazel-Jeansagnière",
"Chalonnes-sous-le-Lude",
"Chalonnes-sur-Loire",
"Châlon's",
"Châlons-du-Maine",
"Chalons-en-Champagne",
"Châlons-en-Champagne",
"Châlons-sur-Marne",
"Châlons-sur-Vesle",
"Chalon-sur-Saône",
"Chalo-Saint-Mars",
"Chalou-Moulineux",
"Chamalières-sur-Loire",
"Chamarandes-Choignes",
"Chambaron-sur-Morge",
"Chambéry-le-Vieux",
"Chambley-Bussières",
"chambolle-musigny",
"Chambolle-Musigny",
"Chambon-la-Forêt",
"Chambon-le-Château",
"Chambon-Sainte-Croix",
"Chambon-sur-Cisse",
"Chambon-sur-Dolore",
"Chambon-sur-Lac",
"Chambon-sur-Voueize",
"Chambornay-lès-Bellevaux",
"Chambornay-lès-Pin",
"Chambost-Allières",
"Chambost-Longessaigne",
"chamboule-tout",
"Chambourg-sur-Indre",
"Chambray-lès-Tours",
"chamito-sémitique",
"chamito-sémitiques",
"Chamonix-Mont-Blanc",
"Chamoux-sur-Gelon",
"Champagnac-de-Belair",
"Champagnac-la-Noaille",
"Champagnac-la-Prune",
"Champagnac-la-Rivière",
"Champagnac-le-Vieux",
"Champagnat-le-Jeune",
"Champagne-Ardenne",
"Champagne-au-Mont-d'Or",
"Champagne-de-Blanzac",
"Champagne-en-Valromey",
"Champagne-et-Fontaine",
"Champagné-le-Sec",
"Champagné-les-Marais",
"Champagne-Mouton",
"Champagné-Saint-Hilaire",
"Champagne-sur-Loue",
"Champagne-sur-Oise",
"Champagne-sur-Seine",
"Champagne-sur-Vingeanne",
"Champagne-Vigny",
"Champagny-en-Vanoise",
"Champagny-sous-Uxelles",
"Champaubert-aux-Bois",
"Champdeniers-Saint-Denis",
"Champdor-Corcelles",
"Champeau-en-Morvan",
"Champeaux-et-la-Chapelle-Pommier",
"Champeaux-sur-Sarthe",
"Champey-sur-Moselle",
"Champigneul-Champagne",
"Champigneulles-en-Bassigny",
"Champigneul-sur-Vence",
"Champignol-lez-Mondeville",
"Champigny-en-Beauce",
"Champigny-en-Rochereau",
"Champigny-la-Futelaye",
"Champigny-le-Sec",
"Champigny-lès-Langres",
"Champigny-sous-Varennes",
"Champigny-sur-Aube",
"Champigny-sur-Marne",
"Champigny-sur-Veude",
"Champigny-sur-Yonne",
"Champlat-et-Boujacourt",
"Champlitte-la-Ville",
"Champniers-et-Reilhac",
"Champrond-en-Gâtine",
"Champrond-en-Perchet",
"champs-clos",
"Champs-Élysées",
"Champs-Romain",
"Champs-sur-Marne",
"Champs-sur-Tarentaine-Marchal",
"Champs-sur-Yonne",
"Champs-zé",
"Champteussé-sur-Baconne",
"Champtocé-sur-Loire",
"Champvans-les-Baume",
"Champvans-les-Moulins",
"Chanac-les-Mines",
"Chanat-la-Mouteyre",
"Chanceaux-près-Loches",
"Chanceaux-sur-Choisille",
"Chang-Haï",
"Changis-sur-Marne",
"changxing'ien",
"Changxing'ien",
"Channay-sur-Lathan",
"Chanos-Curson",
"chanos-cursonnais",
"Chanos-Cursonnais",
"chanos-cursonnaise",
"Chanos-Cursonnaise",
"chanos-cursonnaises",
"Chanos-Cursonnaises",
"Chanoz-Châtenay",
"Chante-Clair",
"Chanteloup-en-Brie",
"Chanteloup-les-Bois",
"Chanteloup-les-Vignes",
"Chantemerle-les-Blés",
"Chantemerle-lès-Grignan",
"Chantemerle-sur-la-Soie",
"Chantenay-Saint-Imbert",
"Chantenay-Villedieu",
"chantilly-tiffany",
"Chapdes-Beaufort",
"chape-chuta",
"chape-chutai",
"chape-chutaient",
"chape-chutais",
"chape-chutait",
"chape-chutâmes",
"chape-chutant",
"chape-chutas",
"chape-chutasse",
"chape-chutassent",
"chape-chutasses",
"chape-chutassiez",
"chape-chutassions",
"chape-chutât",
"chape-chutâtes",
"chape-chute",
"chape-chuté",
"chape-chutent",
"chape-chuter",
"chape-chutera",
"chape-chuterai",
"chape-chuteraient",
"chape-chuterais",
"chape-chuterait",
"chape-chuteras",
"chape-chutèrent",
"chape-chuterez",
"chape-chuteriez",
"chape-chuterions",
"chape-chuterons",
"chape-chuteront",
"chape-chutes",
"chape-chutez",
"chape-chutiez",
"chape-chutions",
"chape-chutons",
"chapelloise-fortinienne",
"Chapelloise-Fortinienne",
"chapelloises-fortiniennes",
"Chapelloises-Fortiniennes",
"chapellois-fortinien",
"Chapellois-Fortinien",
"chapellois-fortiniens",
"Chapellois-Fortiniens",
"Chapon-Seraing",
"chapon-sérésien",
"Chapon-Sérésien",
"Chapon-Sérésienne",
"char-à-bancs",
"charbon-de-pierre",
"charbon-de-terre",
"Charbonnières-les-Bains",
"Charbonnières-les-Sapins",
"Charbonnières-les-Varennes",
"Charbonnières-les-Vieilles",
"Charbonnier-les-Mines",
"charbons-de-pierre",
"charbons-de-terre",
"Charcé-Saint-Ellier-sur-Aubance",
"chardon-Marie",
"chardon-Roland",
"chardons-Marie",
"Chareil-Cintrat",
"Charency-Vezin",
"Charente-Inférieure",
"Charente-Maritime",
"Charenton-du-Cher",
"Charenton-le-Pont",
"Charette-Varennes",
"chargeuse-pelleteuse",
"Chargey-lès-Gray",
"Chargey-lès-Port",
"Charles-Quint",
"Charleville-Mézières",
"Charleville-sous-Bois",
"Charlevoisien-de-l'Est",
"Charly-Oradour",
"Charly-sur-Marne",
"charme-houblon",
"Charmes-en-l'Angle",
"charmes-houblons",
"Charmes-la-Côte",
"Charmes-la-Grande",
"Charmes-Saint-Valbert",
"Charmes-sur-l'Herbasse",
"Charmes-sur-Rhône",
"Charmois-devant-Bruyères",
"Charmois-l'Orgueilleux",
"Charmont-en-Beauce",
"Charmontois-l'Abbé",
"Charmont-sous-Barbuise",
"Charmont-sur-Marne",
"Charnay-lès-Chalon",
"Charnay-lès-Mâcon",
"Charnoz-sur-Ain",
"Charny-le-Bachot",
"Charny-sur-Meuse",
"Charrey-sur-Saône",
"Charrey-sur-Seine",
"Charritte-de-Bas",
"chars-à-bancs",
"charte-partie",
"Chartres-de-Bretagne",
"Chartrier-Ferrière",
"Charvieu-Chavagneux",
"Chasné-sur-Illet",
"Chassagne-Montrachet",
"Chassagne-Saint-Denis",
"chasse-avant",
"chasse-bondieu",
"chasse-bondieux",
"chasse-carrée",
"chasse-carrées",
"chasse-chien",
"chasse-chiens",
"chasse-clou",
"chasse-clous",
"chasse-cœur",
"chasse-coquin",
"chasse-cousin",
"chasse-cousins",
"chasse-crapaud",
"chassé-croisé",
"chasse-derrière",
"chasse-derrières",
"chasse-diable",
"chasse-diables",
"chasse-ennui",
"chasse-fièvre",
"chasse-fleurée",
"chasse-fleurées",
"chasse-goupille",
"chasse-goupilles",
"chasse-gueux",
"chasse-marée",
"chasse-marées",
"chasse-morte",
"chasse-mouche",
"chasse-mouches",
"chasse-mulet",
"chasse-mulets",
"chasse-neige",
"chasse-neiges",
"Chasseneuil-du-Poitou",
"Chasseneuil-sur-Bonnieure",
"chasse-noix",
"chasse-partie",
"chasse-parties",
"chasse-pierre",
"chasse-pierres",
"chasse-poignée",
"chasse-pointe",
"chasse-pointes",
"chasse-pommeau",
"chasse-punaise",
"chasse-rivet",
"chasse-rivets",
"chasse-rondelle",
"chasse-roue",
"chasse-roues",
"chassés-croisés",
"chasses-parties",
"Chasse-sur-Rhône",
"chasse-taupe",
"chasseur-bombardier",
"chasseur-cueilleur",
"chasseurs-bombardiers",
"chasseurs-cueilleurs",
"Chassey-Beaupré",
"Chassey-le-Camp",
"Chassey-lès-Montbozon",
"Chassey-lès-Scey",
"chassez-déchassez",
"chassez-huit",
"Chassigny-sous-Dun",
"châssis-support",
"châssis-supports",
"Chastel-Arnaud",
"Chastellux-sur-Cure",
"Chastel-Nouvel",
"Chastel-sur-Murat",
"Chastenay-le-Bas",
"Chastenay-le-Haut",
"Chastre-Villeroux-Blanmont",
"châtaigne-d'eau",
"châtaigne-de-mer",
"châtaignes-d'eau",
"châtaignes-de-mer",
"Châteauneuf-Calcernier",
"Châteauneuf-de-Bordette",
"Châteauneuf-de-Chabre",
"Châteauneuf-de-Contes",
"Châteauneuf-de-Gadagne",
"Châteauneuf-de-Galaure",
"Châteauneuf-d'Entraunes",
"Châteauneuf-de-Randon",
"Châteauneuf-de-Vernoux",
"Châteauneuf-d'Ille-et-Vilaine",
"Châteauneuf-d'Isère",
"Châteauneuf-d'Oze",
"Châteauneuf-du-Faou",
"châteauneuf-du-pape",
"Châteauneuf-du-Pape",
"Châteauneuf-du-Rhône",
"Châteauneuf-en-Thymerais",
"Châteauneuf-Grasse",
"Châteauneuf-la-Forêt",
"Châteauneuf-le-Rouge",
"Châteauneuf-les-Bains",
"Châteauneuf-les-Martigues",
"Châteauneuf-lès-Moustiers",
"Châteauneuf-Miravail",
"Châteauneuf-sur-Charente",
"Châteauneuf-sur-Cher",
"Châteauneuf-sur-Isère",
"Châteauneuf-sur-Loire",
"Châteauneuf-sur-Sarthe",
"Châteauneuf-Val-de-Bargis",
"Châteauneuf-Val-Saint-Donat",
"Châteauneuf-Villevieille",
"Châteauroux-les-Alpes",
"Châteauvieux-les-Fossés",
"châteaux-forts",
"Châtelaillon-Plage",
"Châtel-Censoir",
"Chatel-Chéhéry",
"Châtel-de-Joux",
"Châtel-de-Neuvre",
"Châtel-en-Trièves",
"Châtel-Gérard",
"Châtel-Guyon",
"Châtel-Montagne",
"Châtel-Moron",
"Châtelraould-Saint-Louvent",
"Châtel-Saint-Denis",
"Châtel-Saint-Germain",
"Châtel-sur-Montsalvens",
"Châtel-sur-Moselle",
"Châtelus-le-Marcheix",
"Châtelus-Malvaleix",
"Châtenay-en-France",
"Chatenay-Mâcheron",
"Châtenay-Malabry",
"Châtenay-sur-Seine",
"Chatenay-Vaudin",
"Châtenois-les-Forges",
"Châtenoy-en-Bresse",
"Châtenoy-le-Royal",
"Châtillon-Coligny",
"Châtillon-en-Bazois",
"Châtillon-en-Diois",
"Châtillon-en-Dunois",
"Châtillon-en-Michaille",
"Châtillon-en-Vendelais",
"Châtillon-Guyotte",
"Châtillon-la-Borde",
"Châtillon-la-Palud",
"Châtillon-le-Duc",
"Châtillon-le-Roi",
"Châtillon-lès-Sons",
"Châtillon-Saint-Jean",
"Châtillon-sous-les-Côtes",
"Châtillon-sous-Maîche",
"Châtillon-sur-Bar",
"Châtillon-sur-Broué",
"Châtillon-sur-Chalaronne",
"Châtillon-sur-Cher",
"Châtillon-sur-Cluses",
"Châtillon-sur-Colmont",
"Châtillon-sur-Indre",
"Châtillon-sur-Lison",
"Châtillon-sur-Loire",
"Châtillon-sur-Marne",
"Châtillon-sur-Morin",
"Châtillon-sur-Oise",
"Châtillon-sur-Saône",
"Châtillon-sur-Seiche",
"Châtillon-sur-Seine",
"Châtillon-sur-Thouet",
"Chatonrupt-Sommermont",
"Châtres-la-Forêt",
"Châtres-sur-Cher",
"Chatuzange-le-Goubet",
"chauche-branche",
"chauche-branches",
"chauche-poule",
"Chauconin-Neufmontiers",
"Chaudefonds-sur-Layon",
"Chaudenay-la-Ville",
"Chaudenay-le-Château",
"Chaudeney-sur-Moselle",
"Chaudière-Appalaches",
"Chaudon-Norante",
"Chaudron-en-Mauges",
"chauffe-assiette",
"chauffe-assiettes",
"chauffe-bain",
"chauffe-bains",
"chauffe-biberon",
"chauffe-biberons",
"chauffe-bloc",
"chauffe-blocs",
"chauffe-chemise",
"chauffe-cire",
"chauffe-double",
"chauffe-eau",
"chauffe-eaux",
"chauffe-la-couche",
"chauffe-linge",
"chauffe-linges",
"chauffe-lit",
"chauffe-lits",
"chauffe-moteur",
"chauffe-pied",
"chauffe-pieds",
"chauffe-plat",
"chauffe-plats",
"chauffes-doubles",
"Chauffour-lès-Bailly",
"Chauffour-lès-Etréchy",
"Chauffour-lès-Étréchy",
"Chauffour-sur-Vell",
"Chaufour-lès-Bonnières",
"Chaufour-Notre-Dame",
"Chaume-et-Courchamp",
"Chaume-lès-Baigneux",
"Chaumes-en-Brie",
"Chaumes-en-Retz",
"Chaumont-d'Anjou",
"Chaumont-devant-Damvillers",
"Chaumont-en-Vexin",
"Chaumont-Gistoux",
"Chaumont-la-Ville",
"Chaumont-le-Bois",
"Chaumont-le-Bourg",
"Chaumont-Porcien",
"Chaumont-Saint-Quentin",
"Chaumont-sur-Aire",
"Chaumont-sur-Loire",
"Chaumont-sur-Tharonne",
"Chaumoux-Marcilly",
"Chaussée-Notre-Dame-Louvignies",
"chausse-pied",
"chausse-pieds",
"chausse-trape",
"chausse-trapes",
"chausse-trappe",
"chausse-trappes",
"Chaussoy-Epagny",
"Chauvac-Laux-Montaux",
"Chauvency-le-Château",
"Chauvency-Saint-Hubert",
"chauve-souriceau",
"chauve-souricelle",
"chauve-souricière",
"chauve-souricières",
"chauve-souris",
"chauve-souris-garou",
"chauves-souriceaux",
"chauves-souricelles",
"chauves-souris",
"chauves-souris-garous",
"Chauvigny-du-Perche",
"Chauvincourt-Provemont",
"Chauvirey-le-Châtel",
"Chauvirey-le-Vieil",
"chaux-azote",
"chaux-azotes",
"Chaux-Champagny",
"Chaux-de-Fonnier",
"Chaux-des-Crotenay",
"Chaux-des-Prés",
"Chaux-la-Lotière",
"Chaux-lès-Clerval",
"Chaux-lès-Passavant",
"Chaux-lès-Port",
"Chaux-Neuve",
"Chavagnes-en-Paillers",
"Chavagnes-les-Redoux",
"Chavagneux-Montbertand",
"Chavaniac-Lafayette",
"Chavannes-de-Bogis",
"Chavannes-des-Bois",
"Chavannes-le-Chêne",
"Chavannes-les-Grands",
"Chavannes-le-Veyron",
"Chavannes-près-Renens",
"Chavannes-sur-l'Etang",
"Chavannes-sur-l'Étang",
"Chavannes-sur-Moudon",
"Chavannes-sur-Reyssouze",
"Chavannes-sur-Suran",
"Chavigny-Bailleul",
"Chavot-Courcourt",
"Chazay-d'Azergues",
"Chazé-Henry",
"Chazelles-sur-Albe",
"Chazelles-sur-Lavieu",
"Chazelles-sur-Lyon",
"Chazé-sur-Argos",
"Chazey-Bons",
"Chazey-sur-Ain",
"check-up",
"check-ups",
"cheese-cake",
"cheese-cakes",
"chef-boutonnais",
"Chef-Boutonnais",
"chef-boutonnaise",
"Chef-Boutonnaise",
"chef-boutonnaises",
"Chef-Boutonnaises",
"Chef-Boutonne",
"chef-d'oeuvre",
"chef-d'œuvre",
"Chef-du-Pont",
"Cheffreville-Tonnencourt",
"Chef-Haut",
"chef-lieu",
"chef-mets",
"chef-mois",
"chefs-d'oeuvre",
"chefs-d'œuvre",
"chefs-lieux",
"Cheignieu-la-Balme",
"Cheilly-lès-Maranges",
"Chein-Dessus",
"Cheix-en-Retz",
"Chelle-Debat",
"Chelle-Spou",
"Chémeré-le-Roi",
"Chémery-Chéhéry",
"Chémery-les-Deux",
"Chémery-sur-Bar",
"Chemillé-en-Anjou",
"Chemillé-Melay",
"Chemillé-sur-Dême",
"Chemillé-sur-Indrois",
"Chemilly-les-Raves",
"Chemilly-près-Seignelay",
"Chemilly-sur-Serein",
"Chemilly-sur-Yonne",
"Chemin-d'Aisey",
"Chemiré-en-Charnie",
"Chemiré-le-Gaudin",
"Chemiré-sur-Sarthe",
"Chenac-Saint-Seurin-d'Uzet",
"Chenailler-Mascheix",
"Chenay-le-Châtel",
"Chêne-Arnoult",
"Chêne-Bernard",
"Chêne-Bougeries",
"Chêne-Bourg",
"Chêne-Carré",
"Chenecey-Buillon",
"Chêne-Chenu",
"Chêne-Dolley",
"Chêne-en-Semine",
"chêne-gomme",
"Chênehutte-Trèves-Cunault",
"chêne-liège",
"chêne-marin",
"Chêne-Pâquier",
"chêne-pommier",
"Chêne-Sec",
"chênes-gommes",
"chênes-lièges",
"chênes-marins",
"Chenevrey-et-Morogne",
"Chenillé-Champteussé",
"Chenillé-Changé",
"Chennery-et-Landreville",
"Chennevières-lès-Louvres",
"Chennevières-sur-Marne",
"Chens-sur-Léman",
"Cheppes-la-Prairie",
"chèque-cadeau",
"chèque-repas",
"chèque-restaurant",
"chèques-cadeaux",
"chèques-repas",
"chèques-restaurants",
"chèques-vacances",
"chèque-vacances",
"Cherbourg-en-Cotentin",
"Cherbourg-Octeville",
"cherche-fiche",
"cherche-merde",
"cherche-midi",
"cherche-pointe",
"Chérencé-le-Héron",
"Chérencé-le-Roussel",
"Chermizy-Ailles",
"Cherveix-Cubas",
"Cherves-Châtelars",
"Cherves-Richemont",
"Chéry-Chartreuve",
"chéry-chartreuvois",
"Chéry-Chartreuvois",
"chéry-chartreuvoise",
"Chéry-Chartreuvoise",
"chéry-chartreuvoises",
"Chéry-Chartreuvoises",
"Chéry-lès-Pouilly",
"Chéry-lès-Rozoy",
"Chesalles-sur-Moudon",
"Cheseaux-Noréaz",
"Cheseaux-sur-Lausanne",
"Chesne-Arnoul",
"Chesne-Carré",
"Chesne-Dolley",
"Chesnois-Auboncourt",
"Chessy-les-Prés",
"Chester-le-Street",
"Chevagny-les-Chevrières",
"Chevagny-sur-Guye",
"Chevaigné-du-Maine",
"Cheval-Blanc",
"cheval-fondu",
"cheval-garou",
"cheval-heure",
"cheval-jupon",
"cheval-vapeur",
"Chevannes-Changy",
"chevau-léger",
"chevau-légers",
"chevaux-léger",
"chevaux-légers",
"chevaux-vapeur",
"cheveu-de-Marie-Madeleine",
"cheveux-de-Marie-Madeleine",
"Chevigney-lès-Vercel",
"Chevigney-sur-l'Ognon",
"Chevigny-en-Valière",
"Chevigny-Saint-Sauveur",
"Chevillon-sur-Huillard",
"Chevilly-Larue",
"Cheviré-le-Rouge",
"chèvre-choutiste",
"chèvre-choutistes",
"chèvre-feuille",
"chèvre-pied",
"chèvre-pieds",
"chèvres-feuilles",
"Chevresis-Monceau",
"Chevry-Cossigny",
"Chevry-en-Sereine",
"Chevry-sous-le-Bignon",
"chewing-gum",
"chewing-gums",
"Cheylard-l'Evêque",
"Cheylard-l'Évêque",
"Chezal-Benoît",
"Chézery-Forens",
"chez-moi",
"chez-soi",
"chez-sois",
"Chézy-en-Orxois",
"Chézy-sur-Marne",
"Chibougamo-Chapien",
"chiche-face",
"chiche-kebab",
"chiche-kébab",
"chiches-faces",
"chiches-kebabs",
"chie-en-lit",
"chie-en-lits",
"chien-assis",
"chien-cerf",
"chien-chaud",
"chien-chauds",
"chien-de-mer",
"chien-garou",
"chien-loup",
"chienne-louve",
"chiennes-louves",
"chien-nid",
"chien-rat",
"chiens-assis",
"chiens-cerf",
"chiens-de-mer",
"chiens-garous",
"chiens-loups",
"chiens-nids",
"chiens-rats",
"chiffres-clés",
"chiffres-taxes",
"chiffre-taxe",
"Chigny-les-Roses",
"Chilleurs-aux-Bois",
"Chilly-le-Vignoble",
"Chilly-Mazarin",
"Chilly-sur-Salins",
"china-paya",
"Chiopris-Viscone",
"chiotte-kès",
"chiottes-kès",
"Chirac-Bellevue",
"Chirat-l'Eglise",
"Chirat-l'Église",
"Chiré-en-Montreuil",
"chirurgien-dentiste",
"chirurgiens-dentistes",
"Chiry-Ourscamp",
"Chiry-Ourscamps",
"Chissay-en-Touraine",
"Chissey-en-Morvan",
"Chissey-lès-Mâcon",
"Chissey-sur-Loue",
"Chitry-les-Mines",
"Chivres-en-Laonnois",
"Chivres-Val",
"Chivy-lès-Etouvelles",
"Chivy-lès-Étouvelles",
"ch'kâra",
"ch'kâras",
"ch.-l.",
"chloro-IPC",
"chlorpyriphos-éthyl",
"chlorpyriphos-méthyl",
"ch'ni",
"choano-organismes",
"choche-pierre",
"choche-poule",
"Choilley-Dardenay",
"Choisy-au-Bac",
"Choisy-en-Brie",
"Choisy-la-Victoire",
"Choisy-le-Roi",
"Choloy-Ménillot",
"Chonas-l'Amballan",
"Chonville-Malaumont",
"Choqueuse-les-Bénards",
"Chorey-les-Beaune",
"choux-choux",
"choux-fleurs",
"choux-navets",
"choux-palmistes",
"choux-raves",
"Chouzé-sur-Loire",
"Chouzy-sur-Cisse",
"chow-chow",
"chow-chows",
"chrétiens-démocrates",
"christe-marine",
"christes-marines",
"chrom-brugnatellite",
"chrom-brugnatellites",
"chrome-clinozoïsite",
"chrome-clinozoïsites",
"chrome-fluorite",
"chrome-fluorites",
"chrome-pistazite",
"chrome-pistazites",
"chrome-trémolite",
"chrome-trémolites",
"chrome-zoïsite",
"chrome-zoïsites",
"chrono-localisation",
"chrono-localisations",
"ch't'aime",
"ch'ti",
"ch'tiisa",
"ch'tiisai",
"ch'tiisaient",
"ch'tiisais",
"ch'tiisait",
"ch'tiisâmes",
"ch'tiisant",
"ch'tiisas",
"ch'tiisasse",
"ch'tiisassent",
"ch'tiisasses",
"ch'tiisassiez",
"ch'tiisassions",
"ch'tiisât",
"ch'tiisâtes",
"ch'tiise",
"ch'tiisé",
"ch'tiisée",
"ch'tiisées",
"ch'tiisent",
"ch'tiiser",
"ch'tiisera",
"ch'tiiserai",
"ch'tiiseraient",
"ch'tiiserais",
"ch'tiiserait",
"ch'tiiseras",
"ch'tiisèrent",
"ch'tiiserez",
"ch'tiiseriez",
"ch'tiiserions",
"ch'tiiserons",
"ch'tiiseront",
"ch'tiises",
"ch'tiisés",
"ch'tiisez",
"ch'tiisiez",
"ch'tiisions",
"ch'tiisons",
"ch'timi",
"ch'tis",
"Chuffilly-Roche",
"chuteur-op",
"chuteurs-ops",
"cia-cia",
"ci-après",
"ci-attaché",
"ci-contre",
"ci-delez",
"ci-dessous",
"ci-dessus",
"ci-devant",
"Cier-de-Luchon",
"Cier-de-Rivière",
"Cierges-sous-Montfaucon",
"Cierp-Gaud",
"ci-gisent",
"ci-git",
"ci-gît",
"ci-haut",
"ci-hauts",
"ci-incluse",
"ci-incluses",
"ci-joint",
"ci-jointe",
"ci-jointes",
"ci-joints",
"ciné-club",
"ciné-clubs",
"cinéma-dinatoire",
"cinéma-dinatoires",
"ciné-parc",
"cinq-cents",
"cinq-dix-quinze",
"cinq-huitième",
"cinq-marsien",
"Cinq-Marsien",
"cinq-marsienne",
"Cinq-Marsienne",
"cinq-marsiennes",
"Cinq-Marsiennes",
"cinq-marsiens",
"Cinq-Marsiens",
"Cinq-Mars-la-Pile",
"cinq-mâts",
"cinq-quatre-un",
"cinq-six",
"cinquante-cinq",
"cinquante-cinquante",
"cinquante-deux",
"cinquante-et-un",
"cinquante-et-une",
"cinquante-et-unième",
"cinquante-et-unièmes",
"cinquante-huit",
"cinquante-neuf",
"cinquante-quatre",
"cinquante-sept",
"cinquante-six",
"cinquante-trois",
"ci-plus-bas",
"ci-plus-haut",
"circolo-mezzo",
"circonscriptions-clés",
"Circourt-sur-Mouzon",
"circum-aural",
"circum-continental",
"Ciré-d'Aunis",
"cire-pompe",
"cire-pompes",
"Cires-lès-Mello",
"Cirey-lès-Mareilles",
"Cirey-lès-Pontailler",
"Cirey-sur-Blaise",
"Cirey-sur-Vezouze",
"Cirfontaines-en-Azois",
"Cirfontaines-en-Ornois",
"cirque-ménagerie",
"cirques-ménageries",
"cirques-théâtres",
"cirque-théâtre",
"Ciry-le-Noble",
"Ciry-Salsogne",
"Cisai-Saint-Aubin",
"cis-gangétique",
"cis-gangétiques",
"Cissac-Médoc",
"Cisternes-la-Forêt",
"cis-verbénol",
"cité-dortoir",
"cité-État",
"cités-dortoirs",
"cités-États",
"citizen-band",
"citron-pays",
"citrons-pays",
"Civrac-de-Blaye",
"Civrac-de-Dordogne",
"Civrac-en-Médoc",
"Civrac-sur-Dordogne",
"Civray-de-Touraine",
"Civray-sur-Cher",
"Civray-sur-Esves",
"Civrieux-d'Azergues",
"Civry-en-Montagne",
"Civry-la-Forêt",
"Civry-sur-Serein",
"Cizay-la-Madeleine",
"clac-clac",
"clac-clacs",
"Clacton-on-Sea",
"Clacy-et-Thierret",
"Clairefontaine-en-Yvelines",
"Clairvaux-d'Aveyron",
"Clairvaux-les-Lacs",
"Clairy-Saulchoix",
"claque-merde",
"claque-oreille",
"claque-oreilles",
"claque-patin",
"claque-patins",
"Clarafond-Arcine",
"Clausthal-Zellerfeld",
"Clavans-en-Haut-Oisans",
"clavi-cylindre",
"clavi-harpe",
"Claville-Motteville",
"clavi-lyre",
"Clavy-Warby",
"Claye-Souilly",
"Cléden-Cap-Sizun",
"Cléden-Poher",
"clématites-viornes",
"clématite-viorne",
"Clémence-d'Ambel",
"Cléon-d'Andran",
"Cléré-du-Bois",
"Cléré-les-Pins",
"Cléré-sur-Layon",
"Clérey-la-Côte",
"Clérey-sur-Brenon",
"clérico-nationaliste",
"clérico-nationalistes",
"Clermont-Créans",
"Clermont-de-Beauregard",
"Clermont-Dessous",
"Clermont-Dessus",
"Clermont-d'Excideuil",
"Clermont-en-Argonne",
"Clermont-Ferrand",
"Clermont-le-Fort",
"Clermont-les-Fermes",
"Clermont-l'Hérault",
"Clermont-Pouyguillès",
"Clermont-Savès",
"Clermont-Soubiran",
"Clermont-sous-Huy",
"Clermont-sur-Lauquet",
"Cléry-en-Vexin",
"Cléry-Grand",
"Cléry-le-Grand",
"Cléry-le-Petit",
"Cléry-Petit",
"Cléry-Saint-André",
"Cléry-sur-Somme",
"clic-clac",
"Clichy-sous-Bois",
"client-cible",
"client-cibles",
"client-serveur",
"cligne-musette",
"climato-sceptique",
"climato-sceptiques",
"Clinchamps-sur-Orne",
"clin-foc",
"clin-focs",
"cloche-pied",
"cloche-pieds",
"cloche-plaque",
"clodinafop-propargyl",
"Clohars-Carnoët",
"Clohars-Fouesnant",
"Clonas-sur-Varèze",
"clopin-clopant",
"cloquintocet-mexyl",
"Clos-Fontaine",
"clos-fontainois",
"Clos-Fontainois",
"clos-fontainoise",
"Clos-Fontainoise",
"clos-fontainoises",
"Clos-Fontainoises",
"clos-masure",
"clos-masures",
"clos-vougeot",
"clos-vougeots",
"Cloyes-les-Trois-Rivières",
"Cloyes-sur-le-Loir",
"Cloyes-sur-Marne",
"club-house",
"clubs-houses",
"Cluj-Napoca",
"Clun's",
"Clussais-la-Pommeraie",
"Clux-Villeneuve",
"Cluze-et-Pâquier",
"Coat-Méal",
"coat-méalien",
"Coat-Méalien",
"coat-méalienne",
"Coat-Méalienne",
"coat-méaliennes",
"Coat-Méaliennes",
"coat-méaliens",
"Coat-Méaliens",
"cobalt-gris",
"cobalt-mica",
"cobalt-ochre",
"cobalto-épsomite",
"cobalto-épsomites",
"cobalto-sphaérosidérite",
"cobalto-sphaérosidérites",
"cobalts-gris",
"cobalts-micas",
"cobalts-ochres",
"Cochem-Zell",
"cochon-garou",
"cochons-garous",
"coco-de-mer",
"coco-fesses",
"cocotte-minute",
"Cocquio-Trevisago",
"codes-barres",
"codes-clés",
"cœur-de-Jeannette",
"coeur-de-pigeon",
"cœur-de-pigeon",
"coeurs-de-pigeon",
"coeurs-de-pigeons",
"cœurs-de-pigeons",
"Cœuvres-et-Valsery",
"coffre-fort",
"coffres-forts",
"Cognac-la-Forêt",
"Cognac-le-Froid",
"Cognat-Lyonne",
"Cognin-les-Gorges",
"Cognocoli-Monticchi",
"Coiffy-le-Bas",
"Coiffy-le-Haut",
"coin-coin",
"coin-coins",
"Coin-lès-Cuvry",
"Coin-sur-Seille",
"Coise-Saint-Jean-Pied-Gauthier",
"Coizard-Joches",
"Colayrac-Saint-Cirq",
"colin-maillard",
"colin-tampon",
"colis-route",
"colis-routes",
"Collandres-Quincarnon",
"collant-pipette",
"collant-pipettes",
"collé-serré",
"collés-serrés",
"collet-monté",
"Colleville-Montgomery",
"Colleville-sur-Mer",
"Colleville-sur-Orne",
"Collex-Bossy",
"Colligis-Crandelain",
"Colligny-Maizery",
"Colline-Beaumont",
"colloid-calcite",
"colloid-calcites",
"Collombey-Muraz",
"Collonge-Bellerive",
"Collonge-en-Charollais",
"Collonge-la-Madeleine",
"Collonges-au-Mont-d'Or",
"Collonges-la-Rouge",
"Collonges-lès-Bévy",
"Collonges-lès-Premières",
"Collonges-sous-Salève",
"Colmar-Berg",
"Colmesnil-Manneville",
"Colmier-le-Bas",
"Colmier-le-Haut",
"col-nu",
"Colombé-la-Fosse",
"Colombe-lès-Bithaine",
"Colombé-le-Sec",
"Colombe-lès-Vesoul",
"Colombey-les-Belles",
"Colombey-lès-Choiseul",
"Colombey-les-Deux-Eglises",
"Colombey-les-Deux-Églises",
"Colombie-Anglaise",
"Colombie-Britannique",
"Colombier-Châtelot",
"Colombier-en-Brionnais",
"Colombières-sur-Orb",
"Colombier-Fontaine",
"Colombier-le-Cardinal",
"Colombier-le-Jeune",
"Colombier-le-Vieux",
"Colombier-Saugnieu",
"Colombiers-du-Plessis",
"Colombiers-sur-Seulles",
"Colomby-Anguerny",
"Colomby-sur-Thaon",
"Colonard-Corubert",
"Colpach-Bas",
"Colpach-Haut",
"Colroy-la-Grande",
"Colroy-la-Roche",
"cols-nus",
"cols-verts",
"col-vert",
"col-verts",
"colville-okanagan",
"Comberanche-et-Epeluche",
"Comberanche-et-Épeluche",
"combi-short",
"combi-shorts",
"Comblain-au-Pont",
"Comblain-Fairon",
"comble-lacune",
"comble-lacunes",
"Combles-en-Barrois",
"Combres-sous-les-Côtes",
"Combs-la-Ville",
"com'com",
"come-back",
"comédie-ballet",
"comédies-ballets",
"Comezzano-Cizzago",
"Comines-Warneton",
"Comin-Yanga",
"Commelle-Vernay",
"commissaire-priseur",
"commissaires-priseurs",
"commis-voyageur",
"commis-voyageurs",
"Communailles-en-Montagne",
"compère-loriot",
"compères-loriot",
"compositeur-typographe",
"compositeur-typographes",
"Comps-la-Grand-Ville",
"Comps-sur-Artuby",
"comptes-rendus",
"concavo-concave",
"concavo-convexe",
"Conches-en-Ouche",
"Conches-sur-Gondoire",
"Conchez-de-Béarn",
"Conchil-le-Temple",
"Conchy-les-Pots",
"Conchy-sur-Canche",
"Concœur-et-Corboin",
"Concourson-sur-Layon",
"Condat-en-Combraille",
"Condat-lès-Montboissier",
"Condat-sur-Ganaveix",
"Condat-sur-Trincou",
"Condat-sur-Vézère",
"Condat-sur-Vienne",
"Condé-en-Brie",
"Condé-en-Normandie",
"Condé-Folie",
"Condeixa-a-Nova",
"Condé-lès-Autry",
"Condé-lès-Herpy",
"Condé-lès-Vouziers",
"Condé-Northen",
"Condé-Sainte-Libiaire",
"Condé-sur-Aisne",
"Condé-sur-Huisne",
"Condé-sur-Ifs",
"Condé-sur-Iton",
"Condé-sur-l'Escaut",
"Condé-sur-Marne",
"Condé-sur-Noireau",
"Condé-sur-Risle",
"Condé-sur-Sarthe",
"Condé-sur-Seulles",
"Condé-sur-Suippe",
"Condé-sur-Vesgre",
"Condé-sur-Vire",
"Condom-d'Aubrac",
"conférences-débats",
"Conflans-en-Jarnisy",
"Conflans-Sainte-Honorine",
"Conflans-sur-Anille",
"Conflans-sur-Lanterne",
"Conflans-sur-Loing",
"Conflans-sur-Seine",
"Confolent-Port-Dieu",
"conforte-main",
"Confort-Meilars",
"Congerville-Thionville",
"Congé-sur-Orne",
"Congis-sur-Thérouanne",
"Congo-Brazzaville",
"congo-kinois",
"Congo-Kinshasa",
"Congo-Léo",
"Congo-Léopoldville",
"congolo-kinois",
"congolo-kinoise",
"congolo-kinoises",
"Conie-Molitard",
"Conilhac-Corbières",
"Conilhac-de-la-Montagne",
"Connantray-Vaurefroy",
"Conne-de-la-Barde",
"Conne-de-Labarde",
"Conques-en-Rouergue",
"Conques-sur-Orbiel",
"conseil-général",
"Cons-la-Grandville",
"Consolation-Maisonnettes",
"Cons-Sainte-Colombe",
"Contamine-Sarzin",
"Contamine-sur-Arve",
"Conteville-en-Ternois",
"Conteville-lès-Boulogne",
"contra-latéral",
"contrat-cadre",
"contrats-cadres",
"Contres-en-Vairais",
"contrôle-commande",
"Contz-les-Bains",
"convexo-concave",
"copiable-collable",
"copiables-collables",
"copia-colla",
"copiage-collage",
"copiages-collages",
"copiai-collai",
"copiaient-collaient",
"copiais-collais",
"copiait-collait",
"copiâmes-collâmes",
"copiant-collant",
"copias-collas",
"copiasse-collasse",
"copiassent-collassent",
"copiasses-collasses",
"copiassiez-collassiez",
"copiassions-collassions",
"copiât-collât",
"copiâtes-collâtes",
"copie-colle",
"copié-collé",
"copié-collés",
"copiée-collée",
"copiées-collées",
"copie-lettres",
"copient-collent",
"copiera-collera",
"copierai-collerai",
"copieraient-colleraient",
"copierais-collerais",
"copierait-collerait",
"copieras-colleras",
"copier-coller",
"copier-collers",
"copièrent-collèrent",
"copierez-collerez",
"copieriez-colleriez",
"copierions-collerions",
"copierons-collerons",
"copieront-colleront",
"copies-colles",
"copiés-collés",
"copiez-collez",
"copiez-colliez",
"copions-collions",
"copions-collons",
"coq-à-l'âne",
"coq-de-roche",
"coq-héron",
"coqs-de-roche",
"coq-souris",
"coquel'œil",
"coquel'œils",
"coral-rag",
"corbeau-pêcheur",
"corbeaux-pêcheurs",
"Corbeil-Cerf",
"Corbeil-Essonnes",
"corbeil-essonnois",
"Corbeil-Essonnois",
"corbeil-essonnoise",
"Corbeil-Essonnoise",
"corbeil-essonnoises",
"Corbeil-Essonnoises",
"Corbère-Abères",
"Corbère-les-Cabanes",
"Corcelle-Mieslot",
"Corcelles-Cormondrèche",
"Corcelles-en-Beaujolais",
"Corcelles-Ferrières",
"Corcelles-le-Jorat",
"Corcelles-les-Arts",
"Corcelles-lès-Cîteaux",
"Corcelles-les-Monts",
"Corcelles-près-Concise",
"Corcelles-près-Payerne",
"Corcelles-sur-Chavornay",
"Corcoué-sur-Logne",
"Cordes-sur-Ciel",
"Cordes-Tolosannes",
"cordons-bleus",
"Corée-du-Nord",
"Corée-du-Sud",
"Corgnac-sur-l'Isle",
"Cormaranche-en-Bugey",
"Corme-Ecluse",
"Corme-Écluse",
"Cormeilles-en-Parisis",
"Cormeilles-en-Vexin",
"Cormelles-le-Royal",
"Corme-Royal",
"Cormoranche-sur-Saône",
"Cormot-le-Grand",
"Cormot-Vauchignon",
"corned-beef",
"corned-beefs",
"Corneilla-de-Conflent",
"Corneilla-del-Vercol",
"Corneilla-la-Rivière",
"Corneville-la-Fouquetière",
"Corneville-sur-Risle",
"corn-flake",
"corn-flakes",
"Cornillé-les-Caves",
"Cornillon-Confoux",
"Cornillon-en-Trièves",
"Cornillon-sur-l'Oule",
"Corny-la-Ville",
"Corny-Machéroménil",
"Corny-sur-Moselle",
"Corpataux-Magnedens",
"Corpoyer-la-Chapelle",
"corps-mort",
"corps-morts",
"Corps-Nuds",
"Corral-Rubio",
"Corrençon-en-Vercors",
"Corroy-le-Château",
"Corroy-le-Grand",
"Corse-du-Sud",
"Corsier-sur-Vevey",
"cortico-cortical",
"cortico-corticale",
"cortico-corticales",
"cortico-corticaux",
"Cortil-Noirmont",
"cortil-noirmontois",
"Cortil-Noirmontois",
"Cortil-Noirmontoise",
"Cortil-Wodon",
"Corvol-d'Embernard",
"Corvol-l'Orgueilleux",
"Coslédaà-Lube-Boast",
"Cosne-Cours-sur-Loire",
"Cosne-d'Allier",
"Cosnes-et-Romain",
"Cossé-d'Anjou",
"Cossé-en-Champagne",
"Cossé-le-Vivien",
"costard-cravate",
"costards-cravates",
"Costa-Rica",
"Costa-Ricain",
"costa-ricien",
"Costa-Ricien",
"costa-ricienne",
"Costa-Ricienne",
"costa-riciennes",
"Costa-Riciennes",
"costa-riciens",
"Costa-Riciens",
"costo-claviculaire",
"costo-sternal",
"costo-thoracique",
"costo-vertébral",
"costo-vertébrale",
"costo-vertébrales",
"costo-vertébraux",
"cosy-corner",
"cosy-corners",
"Coteau-Landais",
"Coteau-Libre",
"Coteaux-du-Lizon",
"Côtes-d'Armor",
"côtes-de-toul",
"Côtes-du-Nord",
"côtes-du-rhône",
"côtes-du-Rhône",
"côtes-du-rhônes",
"Coti-Chiavari",
"coton-poudre",
"coton-poudres",
"cotons-poudres",
"cotons-tiges",
"coton-tige",
"cotte-hardie",
"cottes-hardies",
"couble-soiffière",
"couche-culotte",
"couche-point",
"couche-points",
"couches-culottes",
"Couches-les-Mines",
"couche-tard",
"couche-tôt",
"couci-couça",
"couci-couci",
"Coucy-la-Ville",
"Coucy-le-Château",
"Coucy-le-Château-Auffrique",
"Coucy-lès-Eppes",
"Coucy-les-Saints",
"coude-à-coude",
"cou-de-jatte",
"Coudekerque-Branche",
"Coudekerque-sur-le-Rhin",
"Coudekerque-Village",
"cou-de-pied",
"coude-pied",
"Coudeville-sur-Mer",
"Coudray-au-Perche",
"Coudray-Rabut",
"Couesmes-Vaucé",
"Couffy-sur-Sarsonne",
"Couilly-Pont-aux-Dames",
"cou-jaune",
"Coulanges-la-Vineuse",
"Coulanges-lès-Nevers",
"Coulanges-sur-Yonne",
"Coulans-sur-Gée",
"Coulans-sur-Lizon",
"coule-sang",
"Coulmier-le-Sec",
"Coulombs-en-Valois",
"Coulommes-et-Marqueny",
"Coulommes-la-Montagne",
"Coulommes-lès-Attigny",
"Coulommiers-la-Tour",
"Coulonges-Cohan",
"Coulonges-les-Sablons",
"Coulonges-sur-l'Autize",
"Coulonges-sur-Sarthe",
"Coulonges-Thouarsais",
"Coulonge-sur-Charente",
"Couloumé-Mondebat",
"Coulounieix-Chamiers",
"Coulouvray-Boisbenâtre",
"cou-nu",
"coupé-cabriolet",
"coupé-collé",
"coupé-décalé",
"coupé-lit",
"Coupelle-Neuve",
"Coupelle-Vieille",
"couper-coller",
"coupés-cabriolets",
"coupés-collés",
"coupés-décalés",
"coupés-lits",
"coupon-réponse",
"coupons-réponses",
"coups-de-poing",
"courant-jet",
"courants-jets",
"Courcelles-au-Bois",
"Courcelles-Chaussy",
"Courcelles-de-Touraine",
"Courcelles-en-Barrois",
"Courcelles-en-Bassée",
"Courcelles-en-Montagne",
"Courcelles-Epayelles",
"Courcelles-Frémoy",
"Courcelles-la-Forêt",
"Courcelles-le-Comte",
"Courcelles-lès-Châtillon",
"Courcelles-lès-Gisors",
"Courcelles-lès-Lens",
"Courcelles-lès-Montbard",
"Courcelles-lès-Montbéliard",
"Courcelles-lès-Semur",
"Courcelles-Sapicourt",
"Courcelles-sous-Châtenois",
"Courcelles-sous-Moyencourt",
"Courcelles-sous-Thoix",
"Courcelles-sur-Aire",
"Courcelles-sur-Aujon",
"Courcelles-sur-Blaise",
"Courcelles-sur-Nied",
"Courcelles-sur-Seine",
"Courcelles-sur-Vesle",
"Courcelles-sur-Vesles",
"Courcelles-sur-Viosne",
"Courcelles-sur-Voire",
"Courcelles-Val-d'Esnoms",
"Cour-Cheverny",
"Courcy-aux-Loges",
"Courdimanche-sur-Essonne",
"Cour-et-Buis",
"coure-vite",
"Cour-l'Evêque",
"Cour-l'Évêque",
"Courlon-sur-Yonne",
"cour-masure",
"Cournon-d'Auvergne",
"Cour-Saint-Maurice",
"Coursan-en-Othe",
"Cours-de-Monségur",
"Cours-de-Pile",
"cours-de-pilois",
"Cours-de-Pilois",
"cours-de-piloise",
"Cours-de-Piloise",
"cours-de-piloises",
"Cours-de-Piloises",
"course-poursuite",
"courses-poursuites",
"Courseulles-sur-Mer",
"Cours-la-Ville",
"Cours-les-Bains",
"Cours-les-Barres",
"Courson-les-Carrières",
"Courson-Monteloup",
"Cour-sur-Heure",
"Cour-sur-Loire",
"courte-botte",
"courte-épée",
"courte-épine",
"courte-épines",
"courte-graisse",
"courte-lettre",
"Courtemont-Varennes",
"courte-pointe",
"courte-pointier",
"courte-queue",
"courtes-bottes",
"courtes-épées",
"courtes-lettres",
"Courtesoult-et-Gatey",
"courtes-pattes",
"courtes-pointes",
"courtes-queues",
"Courtetain-et-Salans",
"Courtine-le-Trucq",
"Courtois-sur-Yonne",
"Courtonne-la-Meurdrac",
"Courtonne-les-Deux-Eglises",
"Courtonne-les-Deux-Églises",
"Courtrai-Dutsel",
"Courtrizy-et-Fussigny",
"courts-bandages",
"courts-boutons",
"courts-circuits",
"courts-côtés",
"courts-cureaux",
"courts-jus",
"courts-métrages",
"courts-tours",
"Courville-sur-Eure",
"Cousances-au-Bois",
"Cousances-les-Forges",
"Cousances-lès-Triconville",
"cous-cous",
"cous-de-jatte",
"cous-de-pied",
"cous-jaunes",
"Coussac-Bonneval",
"Coussay-les-Bois",
"cout'donc",
"couteau-de-chasse",
"couteau-scie",
"couteaux-de-chasse",
"couteaux-scie",
"Couthures-sur-Garonne",
"Couture-d'Argenson",
"Couture-Saint-Germain",
"Couture-sur-Loir",
"couvre-casque",
"couvre-casques",
"couvre-chaussure",
"couvre-chaussures",
"couvre-chef",
"couvre-chefs",
"couvre-clef",
"couvre-clefs",
"couvre-face",
"couvre-faces",
"couvre-feu",
"couvre-feux",
"couvre-giberne",
"couvre-gibernes",
"couvre-joint",
"couvre-joints",
"couvre-lit",
"couvre-lits",
"couvre-livre",
"couvre-livres",
"couvre-lumière",
"couvre-lumières",
"couvre-manche",
"couvre-manches",
"couvre-nuque",
"couvre-nuques",
"couvre-objet",
"couvre-objets",
"couvre-orteil",
"couvre-orteils",
"couvre-pied",
"couvre-pieds",
"couvre-plat",
"couvre-plats",
"couvre-shako",
"couvre-shakos",
"couvre-sol",
"couvre-sols",
"couvreur-zingueur",
"Couvron-et-Aumencourt",
"Coux-et-Bigaroque",
"Couze-et-Saint-Front",
"Couzon-au-Mont-d'Or",
"Couzon-sur-Coulange",
"cover-girl",
"cover-girls",
"cow-boy",
"cow-boys",
"coxa-retrorsa",
"coxo-fémoral",
"Coye-la-Forêt",
"c'que",
"c'qui",
"crabe-araignée",
"crabes-araignées",
"crac-crac",
"crachouillot-thérapeute",
"craignant-Dieu",
"Crandelain-et-Malval",
"cran-gevrien",
"Cran-Gevrien",
"cran-gevrienne",
"Cran-Gevrienne",
"cran-gevriennes",
"Cran-Gevriennes",
"cran-gevriens",
"Cran-Gevriens",
"Cran-Gevrier",
"cranio-facial",
"Crannes-en-Champagne",
"Crans-près-Céligny",
"Cranves-Sales",
"cranves-salien",
"Cranves-Salien",
"cranves-saliène",
"Cranves-Saliène",
"cranves-saliènes",
"Cranves-Saliènes",
"cranves-saliens",
"Cranves-Saliens",
"crapaud-buffle",
"crapauds-buffles",
"crapet-soleil",
"Craponne-sur-Arzon",
"Cras-Avernas",
"Cras-sur-Reyssouze",
"Crasville-la-Mallet",
"Crasville-la-Rocquefort",
"Cravant-les-Côteaux",
"crayon-feutre",
"crayons-feutre",
"crayons-feutres",
"crayon-souris",
"créateur-typographe",
"Crécey-sur-Tille",
"Crêches-sur-Saône",
"Crécy-au-Mont",
"Crécy-Couvé",
"Crécy-en-Ponthieu",
"Crécy-la-Chapelle",
"Crécy-sur-Serre",
"crédit-bail",
"crédits-bail",
"crédits-bails",
"crédits-baux",
"crédits-temps",
"crédit-temps",
"Crégy-lès-Meaux",
"Crempigny-Bonneguête",
"Creney-près-Troyes",
"Crennes-sur-Fraubée",
"Créon-d'Armagnac",
"Crépieux-la-Pape",
"Crépy-en-Laonnois",
"Crépy-en-Valois",
"Crespy-le-Neuf",
"Cressac-Saint-Genis",
"Cressin-Rochefort",
"Cressy-Omencourt",
"Cressy-sur-Somme",
"Crest-Voland",
"crest-volantain",
"Crest-Volantain",
"crest-volantaine",
"Crest-Volantaine",
"crest-volantaines",
"Crest-Volantaines",
"crest-volantains",
"Crest-Volantains",
"Cré-sur-Loir",
"crête-de-coq",
"crête-marine",
"crêtes-de-coq",
"crêtes-marines",
"Creutzwald-la-Croix",
"Creuzier-le-Neuf",
"Creuzier-le-Vieux",
"Crevans-et-la-Chapelle-lès-Granges",
"Crevant-Laveine",
"crève-chassis",
"crève-chien",
"crève-chiens",
"crève-coeur",
"crève-cœur",
"Crèvecoeur-en-Auge",
"Crèvecœur-en-Auge",
"Crèvecœur-en-Brie",
"Crèvecœur-le-Grand",
"Crèvecœur-le-Petit",
"crève-coeurs",
"crève-cœurs",
"Crèvecoeur-sur-l'Escaut",
"Crèvecœur-sur-l'Escaut",
"crève-la-dalle",
"crève-la-faim",
"crevette-mante",
"crevettes-mantes",
"crève-vessie",
"crève-vessies",
"Creys-Mépieu",
"Creyssensac-et-Pissot",
"Crézançay-sur-Cher",
"Crézancy-en-Sancerre",
"cric-crac",
"crico-trachéal",
"crico-trachéale",
"crico-trachéales",
"crico-trachéaux",
"Cricqueville-en-Auge",
"Cricqueville-en-Bessin",
"cri-cri",
"cri-cris",
"Criel-sur-Mer",
"Crillon-le-Brave",
"Criquebeuf-en-Caux",
"Criquebeuf-la-Campagne",
"Criquebeuf-sur-Seine",
"Criquetot-le-Mauconduit",
"Criquetot-l'Esneval",
"Criquetot-sur-Longueville",
"Criquetot-sur-Ouville",
"Crissay-sur-Manse",
"cristallo-électrique",
"cristallo-électriques",
"criste-marine",
"Criteuil-la-Magdeleine",
"croad-langshan",
"croc-en-jambe",
"crocs-en-jambe",
"croiseur-école",
"croiseurs-écoles",
"Croissy-Beaubourg",
"Croissy-sur-Celle",
"Croissy-sur-Seine",
"Croisy-sur-Andelle",
"Croisy-sur-Eure",
"croix-caluois",
"Croix-Caluois",
"croix-caluoise",
"Croix-Caluoise",
"croix-caluoises",
"Croix-Caluoises",
"Croix-Caluyau",
"Croix-Chapeau",
"croix-de-feu",
"croix-de-Malte",
"Croix-de-Vie",
"Croix-en-Ternois",
"Croix-Fonsomme",
"Croix-Fonsommes",
"Croix-lez-Rouveroy",
"Croix-Mare",
"Croix-Moligneaux",
"croix-pile",
"Croix-Rousse",
"croix-roussien",
"Croix-roussien",
"croix-roussienne",
"Croix-roussienne",
"croix-roussiennes",
"Croix-roussiennes",
"croix-roussiens",
"Croix-roussiens",
"Croix-Valmer",
"Croizet-sur-Gand",
"Cro-Magnon",
"Cro-Magnons",
"cromlec'h",
"cromlec'hs",
"croque-abeilles",
"croque-au-sel",
"croque-en-bouche",
"croque-lardon",
"croque-lardons",
"croque-madame",
"croque-madames",
"croque-mademoiselle",
"croque-mademoiselles",
"croque-messieurs",
"croque-mitaine",
"croque-mitaines",
"croque-monsieur",
"croque-monsieurs",
"croque-mort",
"croque-morts",
"croque-moutons",
"croque-noisette",
"croque-noisettes",
"croque-noix",
"croque-note",
"Cros-de-Géorand",
"Cros-de-Montvert",
"Cros-de-Ronesque",
"Crosey-le-Grand",
"Crosey-le-Petit",
"crossing-over",
"Crosville-la-Vieille",
"Crosville-sur-Douve",
"Crosville-sur-Scie",
"crotte-du-diable",
"crotte-du-Diable",
"crottes-du-diable",
"crottes-du-Diable",
"Crottes-en-Pithiverais",
"Crouttes-sur-Marne",
"Crouy-en-Thelle",
"Crouy-Saint-Pierre",
"Crouy-sur-Cosson",
"Crouy-sur-Ourcq",
"Crouzet-Migette",
"crown-glass",
"Crozes-Hermitage",
"Crozon-sur-Vauvre",
"Crucey-Villages",
"cruci-capétien",
"Cruci-Capétien",
"cruci-capétienne",
"Cruci-Capétienne",
"cruci-capétiennes",
"Cruci-Capétiennes",
"cruci-capétiens",
"Cruci-Capétiens",
"cruci-falgardien",
"Cruci-Falgardien",
"cruci-falgardienne",
"Cruci-Falgardienne",
"cruci-falgardiennes",
"Cruci-Falgardiennes",
"cruci-falgardiens",
"Cruci-Falgardiens",
"crud-ammoniac",
"Cruquius-Oost",
"Cruviers-Lascours",
"Crux-la-Ville",
"Cruzilles-lès-Mépillat",
"Cruzy-le-Châtel",
"crypto-communiste",
"crypto-luthérien",
"crypto-luthérienne",
"crypto-luthériennes",
"crypto-luthériens",
"crypto-monnaie",
"crypto-monnaies",
"c'te",
"Cubières-sur-Cinoble",
"cubito-carpien",
"cubito-carpienne",
"cubito-carpiennes",
"cubito-carpiens",
"Cubjac-Auvézère-Val-d'Ans",
"cubo-prismatique",
"cubo-prismatiques",
"Cubry-lès-Faverney",
"Cubry-lès-Soing",
"Cubzac-les-Ponts",
"cucu-la-praline",
"cucul-la-praline",
"cueille-essaim",
"cueille-fruits",
"cueilleur-égreneur",
"cueilleurs-égreneurs",
"cueilleuse-égreneuse",
"cueilleuse-épanouilleuse",
"cueilleuses-égreneuses",
"cueilleuses-épanouilleuses",
"Cuges-les-Bains",
"Cuges-les-Pins",
"Cugliate-Fabiasco",
"Cugny-lès-Crouttes",
"cui-cui",
"Cuigy-en-Bray",
"çui-là",
"cuir-laine",
"Cuiry-Housse",
"cuiry-houssien",
"Cuiry-Houssien",
"cuiry-houssienne",
"Cuiry-Houssienne",
"cuiry-houssiennes",
"Cuiry-Houssiennes",
"cuiry-houssiens",
"Cuiry-Houssiens",
"Cuiry-lès-Chaudardes",
"Cuiry-lès-Iviers",
"Cuise-la-Motte",
"cuisse-de-nymphe",
"cuisse-madame",
"cuisse-madames",
"Cuissy-et-Geny",
"Cuisy-en-Almont",
"cuit-poires",
"cuit-pommes",
"cuit-vapeur",
"cuit-vapeurs",
"Cujavie-Poméranie",
"cul-bas",
"cul-bénit",
"cul-blanc",
"cul-brun",
"cul-cul",
"culcul-la-praline",
"cul-culs",
"cul-de-basse-fosse",
"cul-de-bouteille",
"cul-de-chien",
"cul-de-four",
"cul-de-jatte",
"cul-de-lampe",
"cul-de-plomb",
"cul-de-porc",
"cul-de-poule",
"cul-de-sac",
"cul-des-sartois",
"Cul-des-Sartois",
"Cul-des-Sartoise",
"Cul-des-Sarts",
"cul-doré",
"Culey-le-Patry",
"culit-api",
"Culles-les-Roches",
"cul-levé",
"cul-rouge",
"cul-rousselet",
"culs-bénits",
"culs-blancs",
"culs-de-basse-fosse",
"culs-de-bouteille",
"culs-de-chien",
"culs-de-four",
"culs-de-jatte",
"culs-de-lampe",
"culs-de-plomb",
"culs-de-poule",
"culs-de-sac",
"culs-levés",
"culs-rouges",
"culs-terreux",
"cul-terreux",
"cultivateurs-tasseurs",
"cultivateur-tasseur",
"culturo-scientifique",
"culturo-scientifiques",
"Cumières-le-Mort-Homme",
"cumulo-nimbus",
"Cuncy-lès-Varzy",
"cunéo-scaphoïdien",
"cupro-allophane",
"cupro-allophanes",
"cupro-aluminium",
"cupro-aluminiums",
"cupro-ammoniacal",
"cupro-elbaïte",
"cupro-elbaïtes",
"cupro-fraipontite",
"cupro-fraipontites",
"cupro-nickel",
"cupro-nickels",
"Cuq-Toulza",
"Curçay-sur-Dive",
"Curciat-Dongalon",
"Curcy-sur-Orne",
"cure-dent",
"cure-dents",
"cure-feu",
"cure-feux",
"Cureghem-lez-Bruxelles",
"cure-langue",
"cure-langues",
"cure-môle",
"cure-ongle",
"cure-ongles",
"cure-oreille",
"cure-oreilles",
"cure-pied",
"cure-pieds",
"cure-pipe",
"cure-pipes",
"Curis-au-Mont-d'Or",
"Cursolo-Orasso",
"Curtil-Saint-Seine",
"Curtil-sous-Buffières",
"Curtil-sous-Burnand",
"Curtil-Vergy",
"curti-marignacais",
"Curti-Marignacais",
"curti-marignacaise",
"Curti-Marignacaise",
"curti-marignacaises",
"Curti-Marignacaises",
"Curzay-sur-Vonne",
"Cuse-et-Adrisans",
"Cussac-Fort-Médoc",
"Cussac-sur-Loire",
"Cussey-les-Forges",
"Cussey-sur-Lison",
"Cussey-sur-l'Ognon",
"Cussy-en-Morvan",
"Cussy-la-Colonne",
"Cussy-le-Châtel",
"Cussy-les-Forges",
"custodi-nos",
"Cuttoli-Corticchiato",
"Cuverville-sur-Yères",
"Cuxac-Cabardès",
"Cuxac-d'Aude",
"Cuyk-Sainte-Agathe",
"Cuy-Saint-Fiacre",
"cycle-car",
"cycle-cars",
"cyclo-bus",
"cyclo-cross",
"cyclo-draisine",
"cyclo-draisines",
"cyclo-nomade",
"cyclo-nomades",
"cyclo-octyl-diméthylurée",
"cyclo-pousse",
"cyclo-pousses",
"cyhalofop-butyl",
"cylindro-conique",
"Cys-la-Commune",
"cyth's",
"cyto-architectonie",
"cyto-architectonies",
"cyto-architectonique",
"cyto-architectoniques",
"Dagny-Lambercy",
"Dahme-Forêt-de-Spree",
"Dain-en-Saulnois",
"Dainville-Bertheléville",
"dalai-lama",
"dalaï-lama",
"dalai-lamas",
"dalaï-lamas",
"Dalberg-Wendelstorf",
"Dallgow-Döberitz",
"Damas-aux-Bois",
"Damas-et-Bettegney",
"Dambach-la-Ville",
"Dambenoît-lès-Colombe",
"dame-aubert",
"dame-d'onze-heures",
"dame-jeanne",
"Dame-Marie",
"Dame-Marie-les-Bois",
"dame-pipi",
"dame-ronde",
"dames-d'onze-heures",
"dames-jeannes",
"dames-pipi",
"dames-rondes",
"Dammarie-en-Puisaye",
"Dammarie-les-Lys",
"Dammarie-sur-Loing",
"Dammarie-sur-Saulx",
"Dammartin-en-Goële",
"Dammartin-en-Serve",
"Dammartin-les-Templiers",
"Dammartin-Marpain",
"Dammartin-sur-Meuse",
"Dammartin-sur-Tigeaux",
"d-amphétamine",
"Dampierre-au-Temple",
"Dampierre-en-Bray",
"Dampierre-en-Bresse",
"Dampierre-en-Burly",
"Dampierre-en-Crot",
"Dampierre-en-Graçay",
"Dampierre-en-Montagne",
"Dampierre-en-Yvelines",
"Dampierre-et-Flée",
"Dampierre-le-Château",
"Dampierre-les-Bois",
"Dampierre-lès-Conflans",
"Dampierre-Saint-Nicolas",
"Dampierre-sous-Bouhy",
"Dampierre-sous-Brou",
"Dampierre-sur-Aube",
"Dampierre-sur-Auve",
"Dampierre-sur-Avre",
"Dampierre-sur-Blévy",
"Dampierre-sur-Boutonne",
"Dampierre-sur-le-Doubs",
"Dampierre-sur-Linotte",
"Dampierre-sur-Loire",
"Dampierre-sur-Moivre",
"Dampierre-sur-Salon",
"Dampvalley-lès-Colombe",
"Dampvalley-Saint-Pancras",
"Dancourt-Popincourt",
"Dangé-Saint-Romain",
"Danne-et-Quatre-Vents",
"Dannemarie-sur-Crète",
"Dannstadt-Schauernheim",
"danse-poteau",
"Danube-Ries",
"Danvou-la-Ferrière",
"Dão-Lafões",
"dare-dare",
"dar-et-dar",
"Darmstadt-Dieburg",
"Darney-aux-Chênes",
"datte-de-mer",
"Daubeuf-la-Campagne",
"Daubeuf-près-Vatteville",
"Daubeuf-Serville",
"Daumazan-sur-Arize",
"Dauzat-sur-Vodable",
"D-Day",
"dead-line",
"dead-lines",
"débat-spectacle",
"Débats-Rivière-d'Orpra",
"débauche-embauche",
"déca-ampère",
"déca-ampères",
"de-ci",
"Décines-Charpieu",
"découd-vite",
"découpe-neige",
"découpes-neige",
"décrochez-moi-ça",
"Dégrad-Edmond",
"Dégrad-Samson",
"déjà-vu",
"de-là",
"Delap-Uliga-Darrit",
"Delley-Portalban",
"Delouze-Rosières",
"Demange-aux-Eaux",
"déméton-méthyl",
"Demitz-Thumitz",
"démocrate-chrétien",
"démocrate-chrétienne",
"démocrates-chrétiennes",
"démocrates-chrétiens",
"démonte-pneu",
"démonte-pneus",
"dena'ina",
"dena'inas",
"Deneuille-lès-Chantelle",
"Deneuille-les-Mines",
"Dénezé-sous-Doué",
"Dénezé-sous-le-Lude",
"Dennweiler-Frohnbach",
"dent-de-cheval",
"dent-de-chien",
"dent-de-lion",
"dent-de-loup",
"dent-de-rat",
"dento-facial",
"dents-de-cheval",
"dents-de-chien",
"dents-de-lion",
"dépose-minute",
"dépôts-ventes",
"dépôt-vente",
"député-maire",
"députés-maires",
"dermato-allergologue",
"dermato-allergologues",
"dernière-née",
"dernier-né",
"dernier-nés",
"derniers-nés",
"des-agreable",
"des-agreables",
"déséthyl-terbuméton",
"dès-méshui",
"Dessau-Rosslau",
"dessinateur-typographe",
"dessous-de-bouteille",
"dessous-de-bras",
"dessous-de-plat",
"dessous-de-table",
"dessous-de-tables",
"dessus-de-lit",
"dessus-de-plat",
"dessus-de-porte",
"dessus-de-tête",
"Détain-et-Bruant",
"Deuil-la-Barre",
"Deux-Acren",
"deux-cents",
"deux-cent-vingt-et-un",
"Deux-Chaises",
"deux-chaisois",
"Deux-Chaisois",
"deux-chaisoise",
"Deux-Chaisoise",
"deux-chaisoises",
"Deux-Chaisoises",
"deux-chevaux",
"deux-dents",
"Deux-Evailles",
"Deux-Évailles",
"Deux-Jumeaux",
"deux-mâts",
"deux-mille",
"Deux-Montagnais",
"Deuxnouds-aux-Bois",
"Deuxnouds-devant-Beauzée",
"deux-peccable",
"deux-peccables",
"deux-pièces",
"deux-points",
"deux-ponts",
"Deux-Ponts",
"deux-quatre",
"Deux-Rivières",
"deux-roues",
"Deux-Sèvres",
"deux-temps",
"Deux-Verges",
"Déville-lès-Rouen",
"devrai-gondragnier",
"Devrai-Gondragnier",
"devrai-gondragnière",
"Devrai-Gondragnière",
"devrai-gondragnières",
"Devrai-Gondragnières",
"devrai-gondragniers",
"Devrai-Gondragniers",
"dextro-volubile",
"Dezize-lès-Maranges",
"D-glucuronate",
"D-glucuronates",
"D-glycéraldéhyde",
"di-1-p-menthène",
"diam's",
"Diane-Capelle",
"diastéréo-isomère",
"diastéréo-isomères",
"dichloro-diphényl-dichloroéthane",
"dichlorprop-p",
"diclofop-méthyl",
"Dieffenbach-au-Val",
"Dieffenbach-lès-Woerth",
"Dieffenbach-lès-Wœrth",
"Diekhusen-Fahrstedt",
"Diennes-Aubigny",
"Diensdorf-Radlow",
"Dieppe-sous-Douaumont",
"Diera-Zehren",
"Dierrey-Saint-Julien",
"Dierrey-Saint-Pierre",
"diesel-électrique",
"diésel-électrique",
"diesels-électriques",
"diésels-électriques",
"diéthyl-diphényl-dichloroéthane",
"Dietzenrode-Vatterode",
"Dieue-sur-Meuse",
"Diffembach-lès-Hellimer",
"Digne-les-Bains",
"digue-digue",
"dihydro-oxycodéinone",
"dik-dik",
"dik-diks",
"dikégulac-sodium",
"Dilsen-Stokkem",
"diméthénamide-P",
"diméthyl-dixanthogène",
"DIN-31635",
"dîner-spectacle",
"dîners-spectacles",
"Dingolfing-Landau",
"Dingy-en-Vuache",
"Dingy-Saint-Clair",
"dining-room",
"dining-rooms",
"Dinsheim-sur-Bruche",
"Dio-et-Valquières",
"diola-kasa",
"Dion-Valmont",
"diony-sapinois",
"Diony-Sapinois",
"diony-sapinoise",
"Diony-Sapinoise",
"diony-sapinoises",
"Diony-Sapinoises",
"diptéro-sodomie",
"diptéro-sodomies",
"disc-jockey",
"disc-jockeys",
"Dissay-sous-Courcillon",
"Dissen-Striesow",
"Dissé-sous-Ballon",
"Dissé-sous-le-Lude",
"distance-temps",
"Dittelsheim-Heßloch",
"Divatte-sur-Loire",
"divergi-nervé",
"Dives-sur-Mer",
"Divitz-Spoldershagen",
"Divonne-les-Bains",
"dix-cors",
"dix-en-dix",
"dix-heura",
"dix-heurai",
"dix-heuraient",
"dix-heurais",
"dix-heurait",
"dix-heurâmes",
"dix-heurant",
"dix-heuras",
"dix-heurasse",
"dix-heurassent",
"dix-heurasses",
"dix-heurassiez",
"dix-heurassions",
"dix-heurât",
"dix-heurâtes",
"dix-heure",
"dix-heuré",
"dix-heurent",
"dix-heurer",
"dix-heurera",
"dix-heurerai",
"dix-heureraient",
"dix-heurerais",
"dix-heurerait",
"dix-heureras",
"dix-heurèrent",
"dix-heurerez",
"dix-heureriez",
"dix-heurerions",
"dix-heurerons",
"dix-heureront",
"dix-heures",
"dix-heurez",
"dix-heuriez",
"dix-heurions",
"dix-heurons",
"dix-huit",
"dix-huitième",
"dix-huitièmement",
"dix-huitièmes",
"dix-huitiémisme",
"dix-huitiémismes",
"dix-huitiémiste",
"dix-huitièmiste",
"dix-huitiémistes",
"dix-huitièmistes",
"dix-mille",
"dix-millième",
"dix-millièmes",
"dix-millionième",
"dix-millionièmes",
"dix-neuf",
"dix-neuvième",
"dix-neuvièmement",
"dix-neuvièmes",
"dix-neuviémisme",
"dix-neuviémismes",
"dix-neuviémiste",
"dix-neuvièmiste",
"dix-neuviémistes",
"dix-neuvièmistes",
"dix-roues",
"dix-sept",
"dix-septième",
"dix-septièmement",
"dix-septièmes",
"dix-septiémisme",
"dix-septiémismes",
"dix-septiémiste",
"dix-septièmiste",
"dix-septiémistes",
"dix-septièmistes",
"Dizy-le-Gros",
"djoumada-l-oula",
"djoumada-t-tania",
"DMTA-P",
"doati-casteidois",
"Doati-Casteidois",
"doati-casteidoise",
"Doati-Casteidoise",
"doati-casteidoises",
"Doati-Casteidoises",
"Dobbin-Linstow",
"Doberlug-Kirchhain",
"Doberschau-Gaußig",
"docu-fiction",
"docu-fictions",
"documentaire-choc",
"documentaires-chocs",
"dodémorphe-acétate",
"Dœuil-sur-le-Mignon",
"dog-cart",
"dog-carts",
"Dohm-Lammersdorf",
"doigt-de-gant",
"doigts-de-gant",
"Dol-de-Bretagne",
"Dolus-d'Oléron",
"Dolus-le-Sec",
"Domart-en-Ponthieu",
"Domart-sur-la-Luce",
"Dombasle-devant-Darney",
"Dombasle-en-Argonne",
"Dombasle-en-Xaintois",
"Dombasle-sur-Meurthe",
"Dombrot-le-Sec",
"Dombrot-sur-Vair",
"Domburg-Binnen",
"Domburg-Buiten",
"Domecy-sur-Cure",
"Domecy-sur-le-Vault",
"Domèvre-en-Haye",
"Domèvre-sous-Montfort",
"Domèvre-sur-Avière",
"Domèvre-sur-Durbion",
"Domèvre-sur-Vezouze",
"Domezain-Berraute",
"Domfront-en-Champagne",
"Domfront-en-Poiraie",
"Domléger-Longvillers",
"Dom-le-Mesnil",
"dommage-intérêt",
"dommages-intérêts",
"Dommarie-Eulmont",
"Dommartin-aux-Bois",
"Dommartin-Dampierre",
"Dommartin-la-Chapelle",
"Dommartin-la-Chaussée",
"Dommartin-la-Montagne",
"Dommartin-le-Coq",
"Dommartin-le-Franc",
"Dommartin-le-Saint-Père",
"Dommartin-lès-Cuiseaux",
"Dommartin-lès-Remiremont",
"Dommartin-lès-Toul",
"Dommartin-lès-Vallois",
"Dommartin-Lettrée",
"Dommartin-sous-Amance",
"Dommartin-sous-Hans",
"Dommartin-sur-Vraine",
"Dommartin-Varimont",
"Dommary-Baroncourt",
"Domnom-lès-Dieuze",
"Domnon-lès-Dieuze",
"Dompierre-aux-Bois",
"Dompierre-Becquincourt",
"Dompierre-du-Chemin",
"Dompierre-en-Morvan",
"Dompierre-les-Eglises",
"Dompierre-les-Églises",
"Dompierre-les-Ormes",
"Dompierre-les-Tilleuls",
"Dompierre-sous-Sanvignes",
"Dompierre-sur-Authie",
"Dompierre-sur-Besbre",
"Dompierre-sur-Chalaronne",
"Dompierre-sur-Charente",
"Dompierre-sur-Helpe",
"Dompierre-sur-Héry",
"Dompierre-sur-Mer",
"Dompierre-sur-Mont",
"Dompierre-sur-Nièvre",
"Dompierre-sur-Veyle",
"Dompierre-sur-Yon",
"Domptail-en-l'Air",
"dompte-venin",
"dompte-venins",
"Domremy-aux-Bois",
"Domremy-la-Canne",
"Domremy-Landéville",
"Domrémy-la-Pucelle",
"DOM-ROM",
"DOM-TOM",
"dom-tomien",
"dom-tomienne",
"dom-tomiennes",
"dom-tomiens",
"donation-partage",
"donations-partages",
"Donchery-sur-Meuse",
"Doncourt-aux-Templiers",
"Doncourt-lès-Conflans",
"Doncourt-lès-Longuyon",
"Doncourt-sur-Meuse",
"Dongen-Vaart",
"don-juanisme",
"don-juanismes",
"donnant-donnant",
"donne-jour",
"Donnemain-Saint-Mamès",
"Donnemarie-Dontilly",
"don-quichottisme",
"don-quichottismes",
"Donville-les-Bains",
"Donzy-le-National",
"Donzy-le-Pertuis",
"doom-death",
"Dore-l'Eglise",
"Dore-l'Église",
"Dörfles-Esbach",
"Dornburg-Camburg",
"Dorn-Dürkheim",
"dorso-vélaire",
"dorso-vélaires",
"dos-d'âne",
"Dossenheim-Kochersberg",
"Dossenheim-sur-Zinsel",
"doubet-talibautier",
"Doubet-Talibautier",
"doubet-talibautière",
"Doubet-Talibautière",
"doubet-talibautières",
"Doubet-Talibautières",
"doubet-talibautiers",
"Doubet-Talibautiers",
"doubles-aubiers",
"doubles-bécassines",
"doubles-bouches",
"doubles-bulbes",
"doubles-canons",
"doubles-chaînes",
"doubles-clics",
"doubles-croches",
"doubles-feuilles",
"doubles-fonds",
"doubles-mains",
"doubles-sens",
"douce-amère",
"douces-amères",
"Douchy-lès-Ayette",
"Douchy-les-Mines",
"Douchy-Montcorbon",
"Doucy-en-Bauges",
"Doudeauville-en-Vexin",
"Doué-en-Anjou",
"Doué-la-Fontaine",
"Doulaincourt-Saucourt",
"Doulevant-le-Château",
"Doulevant-le-Petit",
"dou-l-hidjja",
"dou-l-qa'da",
"Doumely-Bégny",
"Dourd'Hal",
"Douville-en-Auge",
"Douville-sur-Andelle",
"Douvres-la-Délivrande",
"doux-agnel",
"doux-à-l'agneau",
"doux-amer",
"doux-amers",
"doux-ballon",
"doux-vert",
"Douy-la-Ramée",
"down-loada",
"down-loadai",
"down-loadaient",
"down-loadais",
"down-loadait",
"down-loadâmes",
"down-loadant",
"down-loadas",
"down-loadasse",
"down-loadassent",
"down-loadasses",
"down-loadassiez",
"down-loadassions",
"down-loadât",
"down-loadâtes",
"down-loade",
"down-loadé",
"down-loadée",
"down-loadées",
"down-loadent",
"down-loader",
"down-loadera",
"down-loaderai",
"down-loaderaient",
"down-loaderais",
"down-loaderait",
"down-loaderas",
"down-loadèrent",
"down-loaderez",
"down-loaderiez",
"down-loaderions",
"down-loaderons",
"down-loaderont",
"down-loades",
"down-loadés",
"down-loadez",
"down-loadiez",
"down-loadions",
"down-loadons",
"Drachenbronn-Birlenbach",
"Dracy-le-Fort",
"Dracy-lès-Couches",
"Dracy-Saint-Loup",
"Dracy-sur-Ouanne",
"Dragey-Ronthon",
"dragonnet-lyre",
"drainage-taupe",
"draineuses-trancheuses",
"draineuse-trancheuse",
"drap-housse",
"drap-housses",
"Dreis-Brück",
"drelin-drelin",
"Drémil-Lafage",
"Dreuil-Hamel",
"Dreuil-lès-Amiens",
"Dreuil-lès-Molliens",
"Driebergen-Rijsenburg",
"drift-ice",
"drift-ices",
"dring-dring",
"drive-in",
"drive-ins",
"drive-way",
"drive-ways",
"droit-fil",
"droit-fils",
"drop-goal",
"drop-goals",
"Droue-sur-Drouette",
"Droupt-Saint-Basle",
"Droupt-Sainte-Marie",
"Drouvin-le-Marais",
"drug-store",
"drug-stores",
"Drumettaz-Clarafond",
"Druyes-les-Belles-Fontaines",
"Druy-Parigny",
"dry-tooleur",
"dry-tooleurs",
"dry-tooling",
"D-sucre",
"D-sucres",
"dual-core",
"dual-cores",
"duc-d'albe",
"duc-d'Albe",
"Duc-de-Thol",
"duché-pairie",
"duchés-pairies",
"ducs-d'albe",
"ducs-d'Albe",
"Ducy-Sainte-Marguerite",
"duffel-coat",
"duffel-coats",
"duffle-coat",
"duffle-coats",
"Dugny-sur-Meuse",
"Duhamellois-de-l'Ouest",
"Duhort-Bachen",
"Duilhac-sous-Peyrepertuse",
"Duino-Aurisina",
"dum-dum",
"Dunières-sur-Eyrieux",
"Dunière-sur-Eyrieux",
"Dun-le-Palestel",
"Dun-le-Palleteau",
"Dun-le-Poëlier",
"Dun-les-Places",
"Dun-sur-Auron",
"Dun-sur-Grandry",
"Dun-sur-Meuse",
"duo-tang",
"duo-tangs",
"duplicato-dentelé",
"Dupont-Lajoie",
"Durban-Corbières",
"Durban-sur-Arize",
"dur-bec",
"Durdat-Larequille",
"dure-mère",
"dure-peau",
"dures-mères",
"dures-peaux",
"Durfort-et-Saint-Martin-de-Sossenac",
"Durfort-Lacapelette",
"Dürrröhrsdorf-Dittersbach",
"durs-becs",
"duty-free",
"DVD-RAM",
"DVD-ROM",
"DVD-RW",
"dynamo-électrique",
"dynamo-électriques",
"E7,Z9-12:Ac",
"E7-Z9-dodécadiénylacétate",
"E8,E10-dodécadiène-1-ol",
"e-administration",
"e-administrations",
"eau-bénitier",
"eau-bénitiers",
"Eaucourt-sur-Somme",
"eau-de-vie",
"eau-forte",
"eaux-bonnais",
"Eaux-Bonnais",
"eaux-bonnaise",
"Eaux-Bonnaise",
"eaux-bonnaises",
"Eaux-Bonnaises",
"Eaux-Bonnes",
"eaux-de-vie",
"eaux-fortes",
"Eaux-Puiseaux",
"eaux-vannes",
"Ében-Émael",
"Eberbach-Seltz",
"Eberbach-Wœrth",
"Ebersbach-Musbach",
"Ebnat-Kappel",
"e-book",
"e-business",
"Ecalles-Alix",
"Écalles-Alix",
"Ecardenville-la-Campagne",
"Écardenville-la-Campagne",
"Ecardenville-sur-Eure",
"Écardenville-sur-Eure",
"e-carte",
"e-cartes",
"écarts-types",
"écart-type",
"Écaussinnes-d'Enghien",
"Écaussinnes-Lalaing",
"Eccica-Suarella",
"Echarri-Aranaz",
"Echelle-Saint-Aurin",
"Échelle-Saint-Aurin",
"Echenans-sous-Mont-Vaudois",
"Échenans-sous-Mont-Vaudois",
"Echenoz-la-Méline",
"Échenoz-la-Méline",
"Echenoz-le-Sec",
"Échenoz-le-Sec",
"écho-location",
"écho-locations",
"échos-radars",
"Echt-Susteren",
"e-cig",
"e-cigarette",
"e-cigarettes",
"e-cigs",
"e-cinéma",
"e-cinémas",
"Eclans-Nenon",
"Éclans-Nenon",
"Eclaron-Braucourt-Sainte-Livière",
"Éclaron-Braucourt-Sainte-Livière",
"e-client",
"e-clope",
"e-clopes",
"Eclose-Badinières",
"Eclusier-Vaux",
"Éclusier-Vaux",
"Ecole-Valentin",
"École-Valentin",
"e-commerçant",
"e-commerçants",
"e-commerce",
"écorche-œil",
"Ecotay-l'Olme",
"Écotay-l'Olme",
"Ecot-la-Combe",
"Écot-la-Combe",
"Écouché-les-Vallées",
"e-couponing",
"Ecourt-Saint-Quentin",
"Écourt-Saint-Quentin",
"Ecoust-Saint-Mein",
"Écoust-Saint-Mein",
"écoute-s'il-pleut",
"Écoute-s'il-pleut",
"écrase-merde",
"écrase-merdes",
"Ecretteville-lès-Baons",
"Écretteville-lès-Baons",
"Ecretteville-sur-Mer",
"Écretteville-sur-Mer",
"e-criminalité",
"e-criminalités",
"Écry-le-Franc",
"Ectot-l'Auber",
"Ectot-lès-Baons",
"Ecurey-en-Verdunois",
"Écurey-en-Verdunois",
"écurie-ménagerie",
"écuries-ménageries",
"Ecury-le-Repos",
"Écury-le-Repos",
"Ecury-sur-Coole",
"Écury-sur-Coole",
"Edam-Volendam",
"e-délinquance",
"e-délinquances",
"Ediger-Eller",
"Edingen-Neckarhausen",
"edit-a-thon",
"edit-a-thons",
"Édouard-Josse",
"EE-8,10-DDDOL",
"Eelde-Paterswolde",
"Effelder-Rauenstein",
"effet-bulle",
"effets-bulles",
"Efringen-Kirchen",
"égal-à-tous",
"Egée-Méridionale",
"Égée-Méridionale",
"Egée-Septentrionale",
"Égée-Septentrionale",
"Eggenstein-Leopoldshafen",
"Eglise-aux-Bois",
"Église-aux-Bois",
"église-halle",
"Egliseneuve-d'Entraigues",
"Égliseneuve-d'Entraigues",
"Egliseneuve-des-Liards",
"Égliseneuve-des-Liards",
"Eglise-Neuve-de-Vergt",
"Église-Neuve-de-Vergt",
"Eglise-Neuve-d'Issac",
"Église-Neuve-d'Issac",
"Egliseneuve-près-Billom",
"Égliseneuve-près-Billom",
"Egmond-Binnen",
"ego-document",
"ego-documents",
"Egriselles-le-Bocage",
"Égriselles-le-Bocage",
"Eguille-sur-Seudre",
"Éguille-sur-Seudre",
"Eguilly-sous-Bois",
"Éguilly-sous-Bois",
"Eguzon-Chantôme",
"Éguzon-Chantôme",
"égypto-lybien",
"égypto-tchado-soudanais",
"Éhein-bas",
"Ehlange-sur-Mess",
"Ehra-Lessien",
"Eifel-Bitburg-Prüm",
"Eijsden-Margraten",
"Einville-au-Jard",
"éka-actinide",
"éka-actinides",
"éka-aluminium",
"éka-astate",
"éka-bismuth",
"éka-bore",
"éka-borium",
"éka-francium",
"éka-mercure",
"éka-plomb",
"éka-polonium",
"éka-prométhium",
"éka-silicium",
"e-la",
"e-la-fa",
"e-la-mi",
"el-âsker",
"Elbe-Elster",
"Elbe-Parey",
"Elbeuf-en-Bray",
"Elbeuf-sur-Andelle",
"Elburgo-Burgelu",
"Elchesheim-Illingen",
"électron-volt",
"électron-volts",
"élément-clé",
"éléments-clés",
"Eleu-dit-Leauwette",
"Éleu-dit-Leauwette",
"Elincourt-Sainte-Marguerite",
"Élincourt-Sainte-Marguerite",
"Elisabeth-Sophien-Koog",
"Elise-Daucourt",
"Élise-Daucourt",
"elle-même",
"Ellenz-Poltersdorf",
"elles-mêmes",
"Ellignies-Sainte-Anne",
"ello-rhénan",
"ello-rhénane",
"ello-rhénanes",
"ello-rhénans",
"Elsdorf-Westermühlen",
"Elvillar-Bilar",
"e-mail",
"e-maila",
"e-mailai",
"e-mailaient",
"e-mailais",
"e-mailait",
"e-mailâmes",
"e-mailant",
"e-mailas",
"e-mailasse",
"e-mailassent",
"e-mailasses",
"e-mailassiez",
"e-mailassions",
"e-mailât",
"e-mailâtes",
"e-maile",
"e-mailé",
"e-mailée",
"e-mailées",
"e-mailent",
"e-mailer",
"e-mailera",
"e-mailerai",
"e-maileraient",
"e-mailerais",
"e-mailerait",
"e-maileras",
"e-mailèrent",
"e-mailerez",
"e-maileriez",
"e-mailerions",
"e-mailerons",
"e-maileront",
"e-mailes",
"e-mailés",
"e-maileur",
"e-maileurs",
"e-maileuse",
"e-maileuses",
"e-mailez",
"e-mailiez",
"e-mailing",
"e-mailings",
"e-mailions",
"e-mailons",
"e-marketeur",
"e-marketeurs",
"e-marketeuse",
"e-marketeuses",
"e-marketing",
"e-marketings",
"emballage-bulle",
"emballage-coque",
"emballages-bulles",
"emballages-coques",
"Embres-et-Castelmaure",
"e-merchandiser",
"émetteur-récepteur",
"émetteur-récepteurs",
"émilienne-romagnole",
"Émilienne-Romagnole",
"émiliennes-romagnoles",
"Émiliennes-Romagnoles",
"émilien-romagnol",
"Émilien-Romagnol",
"émiliens-romagnols",
"Émiliens-Romagnols",
"Émilie-Romagne",
"émirato-algérien",
"émirato-allemand",
"émirato-allemands",
"émirato-britannique",
"émirato-britanniques",
"émirato-helvétique",
"émirato-helvétiques",
"émirato-indien",
"émirato-iranien",
"émirato-japonais",
"émission-débat",
"Emmelsbüll-Horsbüll",
"Emmer-Compascuum",
"Emmer-Erfscheidenveen",
"Emmingen-Liptingen",
"emo-sexualité",
"emo-sexualités",
"emporte-pièce",
"emporte-pièces",
"énargite-beta",
"énargite-betas",
"en-avant",
"en-avants",
"en-but",
"en-buts",
"en-cas",
"Encausse-les-Thermes",
"Enclave-de-la-Martinière",
"en-cours",
"en-deçà",
"en-dessous",
"en-dessus",
"Enencourt-Léage",
"Énencourt-Léage",
"Enencourt-le-Sec",
"Énencourt-le-Sec",
"enfant-bulle",
"enfant-roi",
"enfants-bulles",
"enfant-soldat",
"enfants-robots",
"enfants-rois",
"enfants-soldats",
"enfile-aiguille",
"enfile-aiguilles",
"enfle-boeuf",
"enfle-bœuf",
"enfle-boeufs",
"enfle-bœufs",
"en-garant",
"Enge-Sande",
"Enghien-les-Bains",
"Englesqueville-en-Auge",
"Englesqueville-la-Percée",
"Enkenbach-Alsenborn",
"Ennepe-Ruhr",
"Ennetières-en-Weppes",
"enquêtes-minute",
"Enquin-les-Mines",
"Enquin-lez-Guinegatte",
"Enquin-sur-Baillons",
"enseignant-chercheur",
"enseignante-chercheuse",
"enseignantes-chercheuses",
"enseignants-chercheurs",
"Ensuès-la-Redonne",
"entéro-colite",
"entéro-colites",
"entéro-cystocèle",
"entéro-épiplocèle",
"entéro-épiplocèles",
"entéro-hémorrhagie",
"entéro-hydrocèle",
"entéro-hydromphale",
"entéro-mérocèle",
"entéro-mésentérite",
"entéro-pneumatose",
"entéro-rénal",
"entéro-rénale",
"entéro-rénales",
"entéro-rénaux",
"entéro-sarcocèle",
"entéro-sarcocèles",
"entéro-sténose",
"entéro-sténoses",
"en-tête",
"en-têtes",
"en-tout-cas",
"entr'abat",
"entr'abattaient",
"entr'abattait",
"entr'abattant",
"entr'abatte",
"entr'abattent",
"entr'abattez",
"entr'abattiez",
"entr'abattîmes",
"entr'abattions",
"entr'abattirent",
"entr'abattissent",
"entr'abattissions",
"entr'abattit",
"entr'abattît",
"entr'abattîtes",
"entr'abattons",
"entr'abattra",
"entr'abattraient",
"entr'abattrait",
"entr'abattre",
"entr'abattre",
"entr'abattrez",
"entr'abattriez",
"entr'abattrions",
"entr'abattrons",
"entr'abattront",
"entr'abattu",
"entr'abattue",
"entr'abattues",
"entr'abattus",
"entr'aborda",
"entr'abordaient",
"entr'abordait",
"entr'abordâmes",
"entr'abordant",
"entr'abordassent",
"entr'abordassiez",
"entr'abordassions",
"entr'abordât",
"entr'abordâtes",
"entr'aborde",
"entr'abordé",
"entr'abordées",
"entr'abordent",
"entr'aborder",
"entr'aborder",
"entr'abordera",
"entr'aborderaient",
"entr'aborderait",
"entr'abordèrent",
"entr'aborderez",
"entr'aborderiez",
"entr'aborderions",
"entr'aborderons",
"entr'aborderont",
"entr'abordés",
"entr'abordez",
"entr'abordiez",
"entr'abordions",
"entr'abordons",
"entr'accola",
"entr'accolaient",
"entr'accolait",
"entr'accolâmes",
"entr'accolant",
"entr'accolassent",
"entr'accolassiez",
"entr'accolassions",
"entr'accolât",
"entr'accolâtes",
"entr'accole",
"entr'accolé",
"entr'accolées",
"entr'accolent",
"entr'accoler",
"entr'accoler",
"entr'accolera",
"entr'accoleraient",
"entr'accolerait",
"entr'accolèrent",
"entr'accolerez",
"entr'accoleriez",
"entr'accolerions",
"entr'accolerons",
"entr'accoleront",
"entr'accolés",
"entr'accolez",
"entr'accoliez",
"entr'accolions",
"entr'accolons",
"entr'accorda",
"entr'accordaient",
"entr'accordait",
"entr'accordâmes",
"entr'accordant",
"entr'accordassent",
"entr'accordassiez",
"entr'accordassions",
"entr'accordât",
"entr'accordâtes",
"entr'accorde",
"entr'accordé",
"entr'accordées",
"entr'accordent",
"entr'accorder",
"entr'accorder",
"entr'accordera",
"entr'accorderaient",
"entr'accorderait",
"entr'accordèrent",
"entr'accorderez",
"entr'accorderiez",
"entr'accorderions",
"entr'accorderons",
"entr'accorderont",
"entr'accordés",
"entr'accordez",
"entr'accordiez",
"entr'accordions",
"entr'accordons",
"entr'accrocha",
"entr'accrochaient",
"entr'accrochait",
"entr'accrochâmes",
"entr'accrochant",
"entr'accrochassent",
"entr'accrochassiez",
"entr'accrochassions",
"entr'accrochât",
"entr'accrochâtes",
"entr'accroche",
"entr'accroché",
"entr'accrochées",
"entr'accrochent",
"entr'accrocher",
"entr'accrocher",
"entr'accrochera",
"entr'accrocheraient",
"entr'accrocherait",
"entr'accrochèrent",
"entr'accrocherez",
"entr'accrocheriez",
"entr'accrocherions",
"entr'accrocherons",
"entr'accrocheront",
"entr'accrochés",
"entr'accrochez",
"entr'accrochiez",
"entr'accrochions",
"entr'accrochons",
"entr'accusa",
"entr'accusaient",
"entr'accusait",
"entr'accusâmes",
"entr'accusant",
"entr'accusassent",
"entr'accusassiez",
"entr'accusassions",
"entr'accusât",
"entr'accusâtes",
"entr'accuse",
"entr'accusé",
"entr'accusées",
"entr'accusent",
"entr'accuser",
"entr'accuser",
"entr'accusera",
"entr'accuseraient",
"entr'accuserait",
"entr'accusèrent",
"entr'accuserez",
"entr'accuseriez",
"entr'accuserions",
"entr'accuserons",
"entr'accuseront",
"entr'accusés",
"entr'accusez",
"entr'accusiez",
"entr'accusions",
"entr'accusons",
"entr'acte",
"entr'actes",
"entr'adapta",
"entr'adaptaient",
"entr'adaptait",
"entr'adaptâmes",
"entr'adaptant",
"entr'adaptassent",
"entr'adaptassiez",
"entr'adaptassions",
"entr'adaptât",
"entr'adaptâtes",
"entr'adapte",
"entr'adapté",
"entr'adaptées",
"entr'adaptent",
"entr'adapter",
"entr'adapter",
"entr'adaptera",
"entr'adapteraient",
"entr'adapterait",
"entr'adaptèrent",
"entr'adapterez",
"entr'adapteriez",
"entr'adapterions",
"entr'adapterons",
"entr'adapteront",
"entr'adaptés",
"entr'adaptez",
"entr'adaptiez",
"entr'adaptions",
"entr'adaptons",
"entr'admira",
"entr'admirai",
"entr'admiraient",
"entr'admirais",
"entr'admirait",
"entr'admirâmes",
"entr'admirant",
"entr'admiras",
"entr'admirasse",
"entr'admirassent",
"entr'admirasses",
"entr'admirassiez",
"entr'admirassions",
"entr'admirât",
"entr'admirâtes",
"entr'admire",
"entr'admiré",
"entr'admirée",
"entr'admirées",
"entr'admirent",
"entr'admirer",
"entr'admirer",
"entr'admirera",
"entr'admirerai",
"entr'admireraient",
"entr'admirerais",
"entr'admirerait",
"entr'admireras",
"entr'admirèrent",
"entr'admirerez",
"entr'admireriez",
"entr'admirerions",
"entr'admirerons",
"entr'admireront",
"entr'admires",
"entr'admirés",
"entr'admirez",
"entr'admiriez",
"entr'admirions",
"entr'admirons",
"entr'admonesta",
"entr'admonestaient",
"entr'admonestait",
"entr'admonestâmes",
"entr'admonestant",
"entr'admonestassent",
"entr'admonestassiez",
"entr'admonestassions",
"entr'admonestât",
"entr'admonestâtes",
"entr'admoneste",
"entr'admonesté",
"entr'admonestées",
"entr'admonestent",
"entr'admonester",
"entr'admonester",
"entr'admonestera",
"entr'admonesteraient",
"entr'admonesterait",
"entr'admonestèrent",
"entr'admonesterez",
"entr'admonesteriez",
"entr'admonesterions",
"entr'admonesterons",
"entr'admonesteront",
"entr'admonestés",
"entr'admonestez",
"entr'admonestiez",
"entr'admonestions",
"entr'admonestons",
"entr'adressa",
"entr'adressaient",
"entr'adressait",
"entr'adressâmes",
"entr'adressant",
"entr'adressassent",
"entr'adressassiez",
"entr'adressassions",
"entr'adressât",
"entr'adressâtes",
"entr'adresse",
"entr'adressé",
"entr'adressées",
"entr'adressent",
"entr'adresser",
"entr'adresser",
"entr'adressera",
"entr'adresseraient",
"entr'adresserait",
"entr'adressèrent",
"entr'adresserez",
"entr'adresseriez",
"entr'adresserions",
"entr'adresserons",
"entr'adresseront",
"entr'adressés",
"entr'adressez",
"entr'adressiez",
"entr'adressions",
"entr'adressons",
"entr'affronta",
"entr'affrontaient",
"entr'affrontait",
"entr'affrontâmes",
"entr'affrontant",
"entr'affrontassent",
"entr'affrontassiez",
"entr'affrontassions",
"entr'affrontât",
"entr'affrontâtes",
"entr'affronte",
"entr'affronté",
"entr'affrontées",
"entr'affrontent",
"entr'affronter",
"entr'affronter",
"entr'affrontera",
"entr'affronteraient",
"entr'affronterait",
"entr'affrontèrent",
"entr'affronterez",
"entr'affronteriez",
"entr'affronterions",
"entr'affronterons",
"entr'affronteront",
"entr'affrontés",
"entr'affrontez",
"entr'affrontiez",
"entr'affrontions",
"entr'affrontons",
"entr'aida",
"entr'aidaient",
"entr'aidait",
"entr'aidâmes",
"entr'aidant",
"entr'aidassent",
"entr'aidassiez",
"entr'aidassions",
"entr'aidât",
"entr'aidâtes",
"entr'aide",
"entr'aidé",
"entr'aidées",
"entr'aident",
"entr'aider",
"entr'aider",
"entr'aidera",
"entr'aideraient",
"entr'aiderait",
"entr'aidèrent",
"entr'aiderez",
"entr'aideriez",
"entr'aiderions",
"entr'aiderons",
"entr'aideront",
"entr'aides",
"entr'aidés",
"entr'aidez",
"entr'aidiez",
"entr'aidions",
"entr'aidons",
"Entraigues-sur-la-Sorgue",
"entr'aiguisa",
"entr'aiguisaient",
"entr'aiguisait",
"entr'aiguisâmes",
"entr'aiguisant",
"entr'aiguisassent",
"entr'aiguisassiez",
"entr'aiguisassions",
"entr'aiguisât",
"entr'aiguisâtes",
"entr'aiguise",
"entr'aiguisé",
"entr'aiguisées",
"entr'aiguisent",
"entr'aiguiser",
"entr'aiguiser",
"entr'aiguisera",
"entr'aiguiseraient",
"entr'aiguiserait",
"entr'aiguisèrent",
"entr'aiguiserez",
"entr'aiguiseriez",
"entr'aiguiserions",
"entr'aiguiserons",
"entr'aiguiseront",
"entr'aiguisés",
"entr'aiguisez",
"entr'aiguisiez",
"entr'aiguisions",
"entr'aiguisons",
"entr'aima",
"entr'aimai",
"entr'aimaient",
"entr'aimais",
"entr'aimait",
"entr'aimâmes",
"entr'aimant",
"entr'aimas",
"entr'aimasse",
"entr'aimassent",
"entr'aimasses",
"entr'aimassiez",
"entr'aimassions",
"entr'aimât",
"entr'aimâtes",
"entr'aime",
"entr'aimé",
"entr'aimée",
"entr'aimées",
"entr'aiment",
"entr'aimer",
"entr'aimer",
"entr'aimera",
"entr'aimerai",
"entr'aimeraient",
"entr'aimerais",
"entr'aimerait",
"entr'aimeras",
"entr'aimèrent",
"entr'aimerez",
"entr'aimeriez",
"entr'aimerions",
"entr'aimerons",
"entr'aimeront",
"entr'aimes",
"entr'aimés",
"entr'aimez",
"entr'aimiez",
"entr'aimions",
"entr'aimons",
"Entrains-sur-Nohain",
"entr'anima",
"entr'animaient",
"entr'animait",
"entr'animâmes",
"entr'animant",
"entr'animassent",
"entr'animassiez",
"entr'animassions",
"entr'animât",
"entr'animâtes",
"entr'anime",
"entr'animé",
"entr'animées",
"entr'animent",
"entr'animer",
"entr'animer",
"entr'animera",
"entr'animeraient",
"entr'animerait",
"entr'animèrent",
"entr'animerez",
"entr'animeriez",
"entr'animerions",
"entr'animerons",
"entr'animeront",
"entr'animés",
"entr'animez",
"entr'animiez",
"entr'animions",
"entr'animons",
"entr'apercevaient",
"entr'apercevais",
"entr'apercevait",
"entr'apercevant",
"entr'apercevez",
"entr'aperceviez",
"entr'apercevions",
"entr'apercevoir",
"entr'apercevons",
"entr'apercevra",
"entr'apercevrai",
"entr'apercevraient",
"entr'apercevrais",
"entr'apercevrait",
"entr'apercevras",
"entr'apercevrez",
"entr'apercevriez",
"entr'apercevrions",
"entr'apercevrons",
"entr'apercevront",
"entr'aperçois",
"entr'aperçoit",
"entr'aperçoive",
"entr'aperçoivent",
"entr'aperçoives",
"entr'aperçu",
"entr'aperçue",
"entr'aperçues",
"entr'aperçûmes",
"entr'aperçurent",
"entr'aperçus",
"entr'aperçusse",
"entr'aperçussent",
"entr'aperçusses",
"entr'aperçussiez",
"entr'aperçussions",
"entr'aperçut",
"entr'aperçût",
"entr'aperçûtes",
"entr'apparais",
"entr'apparaissaient",
"entr'apparaissais",
"entr'apparaissait",
"entr'apparaissant",
"entr'apparaisse",
"entr'apparaissent",
"entr'apparaisses",
"entr'apparaissez",
"entr'apparaissiez",
"entr'apparaissions",
"entr'apparaissons",
"entr'apparait",
"entr'apparaît",
"entr'apparaitra",
"entr'apparaîtra",
"entr'apparaitrai",
"entr'apparaîtrai",
"entr'apparaitraient",
"entr'apparaîtraient",
"entr'apparaitrais",
"entr'apparaîtrais",
"entr'apparaitrait",
"entr'apparaîtrait",
"entr'apparaitras",
"entr'apparaîtras",
"entr'apparaitre",
"entr'apparaître",
"entr'apparaitrez",
"entr'apparaîtrez",
"entr'apparaitriez",
"entr'apparaîtriez",
"entr'apparaitrions",
"entr'apparaîtrions",
"entr'apparaitrons",
"entr'apparaîtrons",
"entr'apparaitront",
"entr'apparaîtront",
"entr'apparu",
"entr'apparue",
"entr'apparues",
"entr'apparûmes",
"entr'apparurent",
"entr'apparus",
"entr'apparusse",
"entr'apparussent",
"entr'apparusses",
"entr'apparussiez",
"entr'apparussions",
"entr'apparut",
"entr'apparût",
"entr'apparûtes",
"entr'appela",
"entr'appelaient",
"entr'appelait",
"entr'appelâmes",
"entr'appelant",
"entr'appelassent",
"entr'appelassiez",
"entr'appelassions",
"entr'appelât",
"entr'appelâtes",
"entr'appelé",
"entr'appelées",
"entr'appeler",
"entr'appeler",
"entr'appelèrent",
"entr'appelés",
"entr'appelez",
"entr'appeliez",
"entr'appelions",
"entr'appelle",
"entr'appellent",
"entr'appellera",
"entr'appelleraient",
"entr'appellerait",
"entr'appellerez",
"entr'appelleriez",
"entr'appellerions",
"entr'appellerons",
"entr'appelleront",
"entr'appelles",
"entr'appelons",
"entr'apprenaient",
"entr'apprenait",
"entr'apprenant",
"entr'apprend",
"entr'apprendra",
"entr'apprendraient",
"entr'apprendrait",
"entr'apprendre",
"entr'apprendre",
"entr'apprendriez",
"entr'apprendrions",
"entr'apprendrons",
"entr'apprendront",
"entr'apprenez",
"entr'appreniez",
"entr'apprenions",
"entr'apprenne",
"entr'apprennent",
"entr'apprennes",
"entr'apprenons",
"entr'apprîmes",
"entr'apprirent",
"entr'appris",
"entr'apprise",
"entr'apprises",
"entr'apprissent",
"entr'apprissiez",
"entr'apprissions",
"entr'apprit",
"entr'apprît",
"entr'apprîtes",
"entr'approcha",
"entr'approchaient",
"entr'approchait",
"entr'approchâmes",
"entr'approchant",
"entr'approchassent",
"entr'approchassiez",
"entr'approchassions",
"entr'approchât",
"entr'approchâtes",
"entr'approche",
"entr'approché",
"entr'approchées",
"entr'approchent",
"entr'approcher",
"entr'approcher",
"entr'approchera",
"entr'approcheraient",
"entr'approcherait",
"entr'approchèrent",
"entr'approcherez",
"entr'approcheriez",
"entr'approcherions",
"entr'approcherons",
"entr'approcheront",
"entr'approchés",
"entr'approchez",
"entr'approchiez",
"entr'approchions",
"entr'approchons",
"entr'arquebusa",
"entr'arquebusaient",
"entr'arquebusait",
"entr'arquebusâmes",
"entr'arquebusant",
"entr'arquebusassent",
"entr'arquebusassiez",
"entr'arquebusassions",
"entr'arquebusât",
"entr'arquebusâtes",
"entr'arquebuse",
"entr'arquebusé",
"entr'arquebusées",
"entr'arquebusent",
"entr'arquebuser",
"entr'arquebuser",
"entr'arquebusera",
"entr'arquebuseraient",
"entr'arquebuserait",
"entr'arquebusèrent",
"entr'arquebuserez",
"entr'arquebuseriez",
"entr'arquebuserions",
"entr'arquebuserons",
"entr'arquebuseront",
"entr'arquebusés",
"entr'arquebusez",
"entr'arquebusiez",
"entr'arquebusions",
"entr'arquebusons",
"entr'assassina",
"entr'assassinaient",
"entr'assassinait",
"entr'assassinâmes",
"entr'assassinant",
"entr'assassinassent",
"entr'assassinassiez",
"entr'assassinassions",
"entr'assassinât",
"entr'assassinâtes",
"entr'assassine",
"entr'assassiné",
"entr'assassinées",
"entr'assassinent",
"entr'assassiner",
"entr'assassiner",
"entr'assassinera",
"entr'assassineraient",
"entr'assassinerait",
"entr'assassinèrent",
"entr'assassinerez",
"entr'assassineriez",
"entr'assassinerions",
"entr'assassinerons",
"entr'assassineront",
"entr'assassinés",
"entr'assassinez",
"entr'assassiniez",
"entr'assassinions",
"entr'assassinons",
"entr'assigna",
"entr'assignaient",
"entr'assignait",
"entr'assignâmes",
"entr'assignant",
"entr'assignassent",
"entr'assignassiez",
"entr'assignassions",
"entr'assignât",
"entr'assignâtes",
"entr'assigne",
"entr'assigné",
"entr'assignées",
"entr'assignent",
"entr'assigner",
"entr'assigner",
"entr'assignera",
"entr'assigneraient",
"entr'assignerait",
"entr'assignèrent",
"entr'assignerez",
"entr'assigneriez",
"entr'assignerions",
"entr'assignerons",
"entr'assigneront",
"entr'assignés",
"entr'assignez",
"entr'assigniez",
"entr'assignions",
"entr'assignons",
"entr'assomma",
"entr'assommaient",
"entr'assommait",
"entr'assommâmes",
"entr'assommant",
"entr'assommassent",
"entr'assommassiez",
"entr'assommassions",
"entr'assommât",
"entr'assommâtes",
"entr'assomme",
"entr'assommé",
"entr'assommées",
"entr'assomment",
"entr'assommer",
"entr'assommer",
"entr'assommera",
"entr'assommeraient",
"entr'assommerait",
"entr'assommèrent",
"entr'assommerez",
"entr'assommeriez",
"entr'assommerions",
"entr'assommerons",
"entr'assommeront",
"entr'assommés",
"entr'assommez",
"entr'assommiez",
"entr'assommions",
"entr'assommons",
"entr'attaqua",
"entr'attaquaient",
"entr'attaquait",
"entr'attaquâmes",
"entr'attaquant",
"entr'attaquassent",
"entr'attaquassiez",
"entr'attaquassions",
"entr'attaquât",
"entr'attaquâtes",
"entr'attaque",
"entr'attaqué",
"entr'attaquées",
"entr'attaquent",
"entr'attaquer",
"entr'attaquer",
"entr'attaquera",
"entr'attaqueraient",
"entr'attaquerait",
"entr'attaquèrent",
"entr'attaquerez",
"entr'attaqueriez",
"entr'attaquerions",
"entr'attaquerons",
"entr'attaqueront",
"entr'attaqués",
"entr'attaquez",
"entr'attaquiez",
"entr'attaquions",
"entr'attaquons",
"entr'attend",
"entr'attendaient",
"entr'attendait",
"entr'attendant",
"entr'attende",
"entr'attendent",
"entr'attendez",
"entr'attendiez",
"entr'attendîmes",
"entr'attendions",
"entr'attendirent",
"entr'attendissent",
"entr'attendissiez",
"entr'attendissions",
"entr'attendit",
"entr'attendît",
"entr'attendîtes",
"entr'attendons",
"entr'attendra",
"entr'attendraient",
"entr'attendrait",
"entr'attendre",
"entr'attendre",
"entr'attendrez",
"entr'attendriez",
"entr'attendrions",
"entr'attendrons",
"entr'attendront",
"entr'attendu",
"entr'attendue",
"entr'attendues",
"entr'attendus",
"entr'autres",
"entr'averti",
"entr'averties",
"entr'avertîmes",
"entr'avertir",
"entr'avertir",
"entr'avertira",
"entr'avertiraient",
"entr'avertirait",
"entr'avertirent",
"entr'avertirez",
"entr'avertiriez",
"entr'avertirions",
"entr'avertirons",
"entr'avertiront",
"entr'avertis",
"entr'avertissaient",
"entr'avertissait",
"entr'avertissant",
"entr'avertisse",
"entr'avertissent",
"entr'avertissez",
"entr'avertissiez",
"entr'avertissions",
"entr'avertissons",
"entr'avertit",
"entr'avertît",
"entr'avertîtes",
"entr'avoua",
"entr'avouaient",
"entr'avouait",
"entr'avouâmes",
"entr'avouant",
"entr'avouassent",
"entr'avouassiez",
"entr'avouassions",
"entr'avouât",
"entr'avouâtes",
"entr'avoue",
"entr'avoué",
"entr'avouées",
"entr'avouent",
"entr'avouer",
"entr'avouer",
"entr'avouera",
"entr'avoueraient",
"entr'avouerait",
"entr'avouèrent",
"entr'avouerez",
"entr'avoueriez",
"entr'avouerions",
"entr'avouerons",
"entr'avoueront",
"entr'avoués",
"entr'avouez",
"entr'avouiez",
"entr'avouions",
"entr'avouons",
"entr'axe",
"entr'axes",
"Entraygues-sur-Truyère",
"entr'ébranla",
"entr'ébranlaient",
"entr'ébranlait",
"entr'ébranlâmes",
"entr'ébranlant",
"entr'ébranlassent",
"entr'ébranlassiez",
"entr'ébranlassions",
"entr'ébranlât",
"entr'ébranlâtes",
"entr'ébranle",
"entr'ébranlé",
"entr'ébranlées",
"entr'ébranlent",
"entr'ébranler",
"entr'ébranlera",
"entr'ébranleraient",
"entr'ébranlerait",
"entr'ébranlèrent",
"entr'ébranlerez",
"entr'ébranleriez",
"entr'ébranlerions",
"entr'ébranlerons",
"entr'ébranleront",
"entr'ébranlés",
"entr'ébranlez",
"entr'ébranliez",
"entr'ébranlions",
"entr'ébranlons",
"entr'éclairci",
"entr'éclaircies",
"entr'éclaircîmes",
"entr'éclaircir",
"entr'éclaircir",
"entr'éclaircira",
"entr'éclairciraient",
"entr'éclaircirait",
"entr'éclaircirent",
"entr'éclaircirez",
"entr'éclairciriez",
"entr'éclaircirions",
"entr'éclaircirons",
"entr'éclairciront",
"entr'éclaircis",
"entr'éclaircissaient",
"entr'éclaircissait",
"entr'éclaircissant",
"entr'éclaircisse",
"entr'éclaircissent",
"entr'éclaircissez",
"entr'éclaircissiez",
"entr'éclaircissions",
"entr'éclaircissons",
"entr'éclaircit",
"entr'éclaircît",
"entr'éclaircîtes",
"entr'éclore",
"entr'éclose",
"entr'écouta",
"entr'écoutaient",
"entr'écoutait",
"entr'écoutâmes",
"entr'écoutant",
"entr'écoutassent",
"entr'écoutassiez",
"entr'écoutassions",
"entr'écoutât",
"entr'écoutâtes",
"entr'écoute",
"entr'écouté",
"entr'écoutées",
"entr'écoutent",
"entr'écouter",
"entr'écoutera",
"entr'écouteraient",
"entr'écouterait",
"entr'écoutèrent",
"entr'écouterez",
"entr'écouteriez",
"entr'écouterions",
"entr'écouterons",
"entr'écouteront",
"entr'écoutés",
"entr'écoutez",
"entr'écoutiez",
"entr'écoutions",
"entr'écoutons",
"entr'écrasa",
"entr'écrasai",
"entr'écrasaient",
"entr'écrasais",
"entr'écrasait",
"entr'écrasâmes",
"entr'écrasant",
"entr'écrasas",
"entr'écrasasse",
"entr'écrasassent",
"entr'écrasasses",
"entr'écrasassiez",
"entr'écrasassions",
"entr'écrasât",
"entr'écrasâtes",
"entr'écrase",
"entr'écrasé",
"entr'écrasée",
"entr'écrasées",
"entr'écrasent",
"entr'écraser",
"entr'écraser",
"entr'écrasera",
"entr'écraserai",
"entr'écraseraient",
"entr'écraserais",
"entr'écraserait",
"entr'écraseras",
"entr'écrasèrent",
"entr'écraserez",
"entr'écraseriez",
"entr'écraserions",
"entr'écraserons",
"entr'écraseront",
"entr'écrases",
"entr'écrasés",
"entr'écrasez",
"entr'écrasiez",
"entr'écrasions",
"entr'écrasons",
"entr'écrira",
"entr'écriraient",
"entr'écrirait",
"entr'écrire",
"entr'écrire",
"entr'écrirez",
"entr'écririez",
"entr'écririons",
"entr'écrirons",
"entr'écriront",
"entr'écrit",
"entr'écrite",
"entr'écrites",
"entr'écrits",
"entr'écrivaient",
"entr'écrivait",
"entr'écrivant",
"entr'écrive",
"entr'écrivent",
"entr'écrivez",
"entr'écriviez",
"entr'écrivîmes",
"entr'écrivions",
"entr'écrivirent",
"entr'écrivissent",
"entr'écrivissions",
"entr'écrivit",
"entr'écrivît",
"entr'écrivîtes",
"entr'écrivons",
"entrée-sortie",
"entrées-sorties",
"entr'égorge",
"entr'égorgé",
"entr'égorgea",
"entr'égorgeai",
"entr'égorgeaient",
"entr'égorgeait",
"entr'égorgeâmes",
"entr'égorgeant",
"entr'égorgeassent",
"entr'égorgeassiez",
"entr'égorgeassions",
"entr'égorgeât",
"entr'égorgeâtes",
"entr'égorgée",
"entr'égorgées",
"entr'égorgemens",
"entr'égorgement",
"entr'égorgements",
"entr'égorgent",
"entr'égorgeons",
"entr'égorger",
"entr'égorger",
"entr'égorgera",
"entr'égorgeraient",
"entr'égorgerait",
"entr'égorgèrent",
"entr'égorgerez",
"entr'égorgeriez",
"entr'égorgerions",
"entr'égorgerons",
"entr'égorgeront",
"entr'égorges",
"entr'égorgés",
"entr'égorgez",
"entr'égorgiez",
"entr'égorgions",
"entr'égratigna",
"entr'égratignaient",
"entr'égratignait",
"entr'égratignâmes",
"entr'égratignant",
"entr'égratignassent",
"entr'égratignassiez",
"entr'égratignassions",
"entr'égratignât",
"entr'égratignâtes",
"entr'égratigne",
"entr'égratigné",
"entr'égratignées",
"entr'égratignent",
"entr'égratigner",
"entr'égratigner",
"entr'égratignera",
"entr'égratigneraient",
"entr'égratignerait",
"entr'égratignèrent",
"entr'égratignerez",
"entr'égratigneriez",
"entr'égratignerions",
"entr'égratignerons",
"entr'égratigneront",
"entr'égratignés",
"entr'égratignez",
"entr'égratigniez",
"entr'égratignions",
"entr'égratignons",
"entr'embarrassa",
"entr'embarrassaient",
"entr'embarrassait",
"entr'embarrassâmes",
"entr'embarrassant",
"entr'embarrassassent",
"entr'embarrassassiez",
"entr'embarrassassions",
"entr'embarrassât",
"entr'embarrassâtes",
"entr'embarrasse",
"entr'embarrassé",
"entr'embarrassées",
"entr'embarrassent",
"entr'embarrasser",
"entr'embarrasser",
"entr'embarrassera",
"entr'embarrasseraient",
"entr'embarrasserait",
"entr'embarrassèrent",
"entr'embarrasserez",
"entr'embarrasseriez",
"entr'embarrasserions",
"entr'embarrasserons",
"entr'embarrasseront",
"entr'embarrassés",
"entr'embarrassez",
"entr'embarrassiez",
"entr'embarrassions",
"entr'embarrassons",
"entr'embrassa",
"entr'embrassaient",
"entr'embrassait",
"entr'embrassâmes",
"entr'embrassant",
"entr'embrassassent",
"entr'embrassassiez",
"entr'embrassassions",
"entr'embrassât",
"entr'embrassâtes",
"entr'embrasse",
"entr'embrassé",
"entr'embrassées",
"entr'embrassent",
"entr'embrasser",
"entr'embrasser",
"entr'embrassera",
"entr'embrasseraient",
"entr'embrasserait",
"entr'embrassèrent",
"entr'embrasserez",
"entr'embrasseriez",
"entr'embrasserions",
"entr'embrasserons",
"entr'embrasseront",
"entr'embrassés",
"entr'embrassez",
"entr'embrassiez",
"entr'embrassions",
"entr'embrassons",
"Entremont-le-Vieux",
"entr'empêcha",
"entr'empêchaient",
"entr'empêchait",
"entr'empêchâmes",
"entr'empêchant",
"entr'empêchassent",
"entr'empêchassiez",
"entr'empêchassions",
"entr'empêchât",
"entr'empêchâtes",
"entr'empêche",
"entr'empêché",
"entr'empêchées",
"entr'empêchent",
"entr'empêcher",
"entr'empêcher",
"entr'empêchera",
"entr'empêcheraient",
"entr'empêcherait",
"entr'empêchèrent",
"entr'empêcherez",
"entr'empêcheriez",
"entr'empêcherions",
"entr'empêcherons",
"entr'empêcheront",
"entr'empêchés",
"entr'empêchez",
"entr'empêchiez",
"entr'empêchions",
"entr'empêchons",
"entr'encourage",
"entr'encouragé",
"entr'encouragea",
"entr'encourageaient",
"entr'encourageait",
"entr'encourageâmes",
"entr'encourageant",
"entr'encourageassent",
"entr'encourageassiez",
"entr'encourageassions",
"entr'encourageât",
"entr'encourageâtes",
"entr'encouragées",
"entr'encouragent",
"entr'encourageons",
"entr'encourager",
"entr'encourager",
"entr'encouragera",
"entr'encourageraient",
"entr'encouragerait",
"entr'encouragèrent",
"entr'encouragerez",
"entr'encourageriez",
"entr'encouragerions",
"entr'encouragerons",
"entr'encourageront",
"entr'encouragés",
"entr'encouragez",
"entr'encouragiez",
"entr'encouragions",
"entr'enleva",
"entr'enlevaient",
"entr'enlevait",
"entr'enlevâmes",
"entr'enlevant",
"entr'enlevassent",
"entr'enlevassiez",
"entr'enlevassions",
"entr'enlevât",
"entr'enlevâtes",
"entr'enlève",
"entr'enlevé",
"entr'enlevées",
"entr'enlèvent",
"entr'enlever",
"entr'enlever",
"entr'enlèvera",
"entr'enlèveraient",
"entr'enlèverait",
"entr'enlevèrent",
"entr'enlèverez",
"entr'enlèveriez",
"entr'enlèverions",
"entr'enlèverons",
"entr'enlèveront",
"entr'enlevés",
"entr'enlevez",
"entr'enleviez",
"entr'enlevions",
"entr'enlevons",
"entr'entend",
"entr'entendaient",
"entr'entendait",
"entr'entendant",
"entr'entende",
"entr'entendent",
"entr'entendez",
"entr'entendiez",
"entr'entendîmes",
"entr'entendions",
"entr'entendirent",
"entr'entendissent",
"entr'entendissiez",
"entr'entendissions",
"entr'entendit",
"entr'entendît",
"entr'entendîtes",
"entr'entendons",
"entr'entendra",
"entr'entendraient",
"entr'entendrait",
"entr'entendre",
"entr'entendre",
"entr'entendrez",
"entr'entendriez",
"entr'entendrions",
"entr'entendrons",
"entr'entendront",
"entr'entendu",
"entr'entendue",
"entr'entendues",
"entr'entendus",
"entr'enverra",
"entr'enverrai",
"entr'enverraient",
"entr'enverrais",
"entr'enverrait",
"entr'enverras",
"entr'enverrez",
"entr'enverriez",
"entr'enverrions",
"entr'enverrons",
"entr'enverront",
"entr'envoie",
"entr'envoient",
"entr'envoies",
"entr'envoya",
"entr'envoyai",
"entr'envoyaient",
"entr'envoyais",
"entr'envoyait",
"entr'envoyâmes",
"entr'envoyant",
"entr'envoyas",
"entr'envoyasse",
"entr'envoyassent",
"entr'envoyasses",
"entr'envoyassiez",
"entr'envoyassions",
"entr'envoyât",
"entr'envoyâtes",
"entr'envoyé",
"entr'envoyée",
"entr'envoyées",
"entr'envoyer",
"entr'envoyer",
"entr'envoyèrent",
"entr'envoyés",
"entr'envoyez",
"entr'envoyiez",
"entr'envoyions",
"entr'envoyons",
"entr'épia",
"entr'épiaient",
"entr'épiait",
"entr'épiâmes",
"entr'épiant",
"entr'épiassent",
"entr'épiassiez",
"entr'épiassions",
"entr'épiât",
"entr'épiâtes",
"entr'épie",
"entr'épié",
"entr'épiées",
"entr'épient",
"entr'épier",
"entr'épier",
"entr'épiera",
"entr'épieraient",
"entr'épierait",
"entr'épièrent",
"entr'épierez",
"entr'épieriez",
"entr'épierions",
"entr'épierons",
"entr'épieront",
"entr'épiés",
"entr'épiez",
"entr'épiiez",
"entr'épiions",
"entr'épions",
"entr'éprouva",
"entr'éprouvaient",
"entr'éprouvait",
"entr'éprouvâmes",
"entr'éprouvant",
"entr'éprouvassent",
"entr'éprouvassiez",
"entr'éprouvassions",
"entr'éprouvât",
"entr'éprouvâtes",
"entr'éprouve",
"entr'éprouvé",
"entr'éprouvées",
"entr'éprouvent",
"entr'éprouver",
"entr'éprouver",
"entr'éprouvera",
"entr'éprouveraient",
"entr'éprouverait",
"entr'éprouvèrent",
"entr'éprouverez",
"entr'éprouveriez",
"entr'éprouverions",
"entr'éprouverons",
"entr'éprouveront",
"entr'éprouvés",
"entr'éprouvez",
"entr'éprouviez",
"entr'éprouvions",
"entr'éprouvons",
"entrer-coucher",
"entr'escroqua",
"entr'escroquaient",
"entr'escroquait",
"entr'escroquâmes",
"entr'escroquant",
"entr'escroquassent",
"entr'escroquassiez",
"entr'escroquassions",
"entr'escroquât",
"entr'escroquâtes",
"entr'escroque",
"entr'escroqué",
"entr'escroquées",
"entr'escroquent",
"entr'escroquer",
"entr'escroquer",
"entr'escroquera",
"entr'escroqueraient",
"entr'escroquerait",
"entr'escroquèrent",
"entr'escroquerez",
"entr'escroqueriez",
"entr'escroquerions",
"entr'escroquerons",
"entr'escroqueront",
"entr'escroqués",
"entr'escroquez",
"entr'escroquiez",
"entr'escroquions",
"entr'escroquons",
"entr'étouffa",
"entr'étouffaient",
"entr'étouffait",
"entr'étouffâmes",
"entr'étouffant",
"entr'étouffassent",
"entr'étouffassiez",
"entr'étouffassions",
"entr'étouffât",
"entr'étouffâtes",
"entr'étouffe",
"entr'étouffé",
"entr'étouffées",
"entr'étouffent",
"entr'étouffer",
"entr'étouffer",
"entr'étouffera",
"entr'étoufferaient",
"entr'étoufferait",
"entr'étouffèrent",
"entr'étoufferez",
"entr'étoufferiez",
"entr'étoufferions",
"entr'étoufferons",
"entr'étoufferont",
"entr'étouffés",
"entr'étouffez",
"entr'étouffiez",
"entr'étouffions",
"entr'étouffons",
"entr'étripa",
"entr'étripaient",
"entr'étripait",
"entr'étripâmes",
"entr'étripant",
"entr'étripassent",
"entr'étripassiez",
"entr'étripassions",
"entr'étripât",
"entr'étripâtes",
"entr'étripe",
"entr'étripé",
"entr'étripées",
"entr'étripent",
"entr'étriper",
"entr'étriper",
"entr'étripera",
"entr'étriperaient",
"entr'étriperait",
"entr'étripèrent",
"entr'étriperez",
"entr'étriperiez",
"entr'étriperions",
"entr'étriperons",
"entr'étriperont",
"entr'étripés",
"entr'étripez",
"entr'étripiez",
"entr'étripions",
"entr'étripons",
"entr'eux",
"entr'éveilla",
"entr'éveillaient",
"entr'éveillait",
"entr'éveillâmes",
"entr'éveillant",
"entr'éveillassent",
"entr'éveillassiez",
"entr'éveillassions",
"entr'éveillât",
"entr'éveillâtes",
"entr'éveille",
"entr'éveillé",
"entr'éveillées",
"entr'éveillent",
"entr'éveiller",
"entr'éveiller",
"entr'éveillera",
"entr'éveilleraient",
"entr'éveillerait",
"entr'éveillèrent",
"entr'éveillerez",
"entr'éveilleriez",
"entr'éveillerions",
"entr'éveillerons",
"entr'éveilleront",
"entr'éveillés",
"entr'éveillez",
"entr'éveilliez",
"entr'éveillions",
"entr'éveillons",
"entr'excita",
"entr'excitaient",
"entr'excitait",
"entr'excitâmes",
"entr'excitant",
"entr'excitassent",
"entr'excitassiez",
"entr'excitassions",
"entr'excitât",
"entr'excitâtes",
"entr'excite",
"entr'excité",
"entr'excitées",
"entr'excitent",
"entr'exciter",
"entr'exciter",
"entr'excitera",
"entr'exciteraient",
"entr'exciterait",
"entr'excitèrent",
"entr'exciterez",
"entr'exciteriez",
"entr'exciterions",
"entr'exciterons",
"entr'exciteront",
"entr'excités",
"entr'excitez",
"entr'excitiez",
"entr'excitions",
"entr'excitons",
"entr'exhorta",
"entr'exhortaient",
"entr'exhortait",
"entr'exhortâmes",
"entr'exhortant",
"entr'exhortassent",
"entr'exhortassiez",
"entr'exhortassions",
"entr'exhortât",
"entr'exhortâtes",
"entr'exhorte",
"entr'exhorté",
"entr'exhortées",
"entr'exhortent",
"entr'exhorter",
"entr'exhorter",
"entr'exhortera",
"entr'exhorteraient",
"entr'exhorterait",
"entr'exhortèrent",
"entr'exhorterez",
"entr'exhorteriez",
"entr'exhorterions",
"entr'exhorterons",
"entr'exhorteront",
"entr'exhortés",
"entr'exhortez",
"entr'exhortiez",
"entr'exhortions",
"entr'exhortons",
"entr'hiver",
"entr'hiverna",
"entr'hivernai",
"entr'hivernaient",
"entr'hivernais",
"entr'hivernait",
"entr'hivernâmes",
"entr'hivernant",
"entr'hivernas",
"entr'hivernasse",
"entr'hivernassent",
"entr'hivernasses",
"entr'hivernassiez",
"entr'hivernassions",
"entr'hivernât",
"entr'hivernâtes",
"entr'hiverne",
"entr'hiverné",
"entr'hivernée",
"entr'hivernées",
"entr'hivernent",
"entr'hiverner",
"entr'hivernera",
"entr'hivernerai",
"entr'hiverneraient",
"entr'hivernerais",
"entr'hivernerait",
"entr'hiverneras",
"entr'hivernèrent",
"entr'hivernerez",
"entr'hiverneriez",
"entr'hivernerions",
"entr'hivernerons",
"entr'hiverneront",
"entr'hivernes",
"entr'hivernés",
"entr'hivernez",
"entr'hiverniez",
"entr'hivernions",
"entr'hivernons",
"entr'honora",
"entr'honoraient",
"entr'honorait",
"entr'honorâmes",
"entr'honorant",
"entr'honorassent",
"entr'honorassiez",
"entr'honorassions",
"entr'honorât",
"entr'honorâtes",
"entr'honore",
"entr'honoré",
"entr'honorées",
"entr'honorent",
"entr'honorer",
"entr'honorer",
"entr'honorera",
"entr'honoreraient",
"entr'honorerait",
"entr'honorèrent",
"entr'honorerez",
"entr'honoreriez",
"entr'honorerions",
"entr'honorerons",
"entr'honoreront",
"entr'honorés",
"entr'honorez",
"entr'honoriez",
"entr'honorions",
"entr'honorons",
"entr'immola",
"entr'immolaient",
"entr'immolait",
"entr'immolâmes",
"entr'immolant",
"entr'immolassent",
"entr'immolassiez",
"entr'immolassions",
"entr'immolât",
"entr'immolâtes",
"entr'immole",
"entr'immolé",
"entr'immolées",
"entr'immolent",
"entr'immoler",
"entr'immoler",
"entr'immolera",
"entr'immoleraient",
"entr'immolerait",
"entr'immolèrent",
"entr'immolerez",
"entr'immoleriez",
"entr'immolerions",
"entr'immolerons",
"entr'immoleront",
"entr'immolés",
"entr'immolez",
"entr'immoliez",
"entr'immolions",
"entr'immolons",
"entr'incommoda",
"entr'incommodaient",
"entr'incommodait",
"entr'incommodâmes",
"entr'incommodant",
"entr'incommodassent",
"entr'incommodassiez",
"entr'incommodassions",
"entr'incommodât",
"entr'incommodâtes",
"entr'incommode",
"entr'incommodé",
"entr'incommodées",
"entr'incommodent",
"entr'incommoder",
"entr'incommoder",
"entr'incommodera",
"entr'incommoderaient",
"entr'incommoderait",
"entr'incommodèrent",
"entr'incommoderez",
"entr'incommoderiez",
"entr'incommoderions",
"entr'incommoderons",
"entr'incommoderont",
"entr'incommodés",
"entr'incommodez",
"entr'incommodiez",
"entr'incommodions",
"entr'incommodons",
"entr'injuria",
"entr'injuriaient",
"entr'injuriait",
"entr'injuriâmes",
"entr'injuriant",
"entr'injuriassent",
"entr'injuriassiez",
"entr'injuriassions",
"entr'injuriât",
"entr'injuriâtes",
"entr'injurie",
"entr'injurié",
"entr'injuriées",
"entr'injurient",
"entr'injurier",
"entr'injurier",
"entr'injuriera",
"entr'injurieraient",
"entr'injurierait",
"entr'injurièrent",
"entr'injurierez",
"entr'injurieriez",
"entr'injurierions",
"entr'injurierons",
"entr'injurieront",
"entr'injuriés",
"entr'injuriez",
"entr'injuriiez",
"entr'injuriions",
"entr'injurions",
"entr'instruira",
"entr'instruiraient",
"entr'instruirait",
"entr'instruire",
"entr'instruire",
"entr'instruirez",
"entr'instruiriez",
"entr'instruirions",
"entr'instruirons",
"entr'instruiront",
"entr'instruisaient",
"entr'instruisait",
"entr'instruisant",
"entr'instruise",
"entr'instruisent",
"entr'instruisez",
"entr'instruisiez",
"entr'instruisîmes",
"entr'instruisions",
"entr'instruisirent",
"entr'instruisissent",
"entr'instruisissions",
"entr'instruisit",
"entr'instruisît",
"entr'instruisîtes",
"entr'instruisons",
"entr'instruit",
"entr'instruite",
"entr'instruites",
"entr'instruits",
"entr'oblige",
"entr'obligé",
"entr'obligea",
"entr'obligeaient",
"entr'obligeait",
"entr'obligeâmes",
"entr'obligeant",
"entr'obligeassent",
"entr'obligeassiez",
"entr'obligeassions",
"entr'obligeât",
"entr'obligeâtes",
"entr'obligées",
"entr'obligent",
"entr'obligeons",
"entr'obliger",
"entr'obliger",
"entr'obligera",
"entr'obligeraient",
"entr'obligerait",
"entr'obligèrent",
"entr'obligerez",
"entr'obligeriez",
"entr'obligerions",
"entr'obligerons",
"entr'obligeront",
"entr'obligés",
"entr'obligez",
"entr'obligiez",
"entr'obligions",
"entr'offensa",
"entr'offensaient",
"entr'offensait",
"entr'offensâmes",
"entr'offensant",
"entr'offensassent",
"entr'offensassiez",
"entr'offensassions",
"entr'offensât",
"entr'offensâtes",
"entr'offense",
"entr'offensé",
"entr'offensées",
"entr'offensent",
"entr'offenser",
"entr'offenser",
"entr'offensera",
"entr'offenseraient",
"entr'offenserait",
"entr'offensèrent",
"entr'offenserez",
"entr'offenseriez",
"entr'offenserions",
"entr'offenserons",
"entr'offenseront",
"entr'offensés",
"entr'offensez",
"entr'offensiez",
"entr'offensions",
"entr'offensons",
"entr'oie",
"entr'oient",
"entr'oies",
"entr'ois",
"entr'oit",
"entr'ombrage",
"entr'ombragé",
"entr'ombragea",
"entr'ombrageaient",
"entr'ombrageait",
"entr'ombrageâmes",
"entr'ombrageant",
"entr'ombrageassent",
"entr'ombrageassiez",
"entr'ombrageassions",
"entr'ombrageât",
"entr'ombrageâtes",
"entr'ombragées",
"entr'ombragent",
"entr'ombrageons",
"entr'ombrager",
"entr'ombrager",
"entr'ombragera",
"entr'ombrageraient",
"entr'ombragerait",
"entr'ombragèrent",
"entr'ombragerez",
"entr'ombrageriez",
"entr'ombragerions",
"entr'ombragerons",
"entr'ombrageront",
"entr'ombragés",
"entr'ombragez",
"entr'ombragiez",
"entr'ombragions",
"entr'opercule",
"entr'orraient",
"entr'orrais",
"entr'orrait",
"entr'orriez",
"entr'orrions",
"entr'oublia",
"entr'oubliaient",
"entr'oubliait",
"entr'oubliâmes",
"entr'oubliant",
"entr'oubliassent",
"entr'oubliassiez",
"entr'oubliassions",
"entr'oubliât",
"entr'oubliâtes",
"entr'oublie",
"entr'oublié",
"entr'oubliées",
"entr'oublient",
"entr'oublier",
"entr'oublier",
"entr'oubliera",
"entr'oublieraient",
"entr'oublierait",
"entr'oublièrent",
"entr'oublierez",
"entr'oublieriez",
"entr'oublierions",
"entr'oublierons",
"entr'oublieront",
"entr'oubliés",
"entr'oubliez",
"entr'oubliiez",
"entr'oubliions",
"entr'oublions",
"entr'ouï",
"entr'ouïe",
"entr'ouïes",
"entr'ouïmes",
"entr'ouïr",
"entr'ouïra",
"entr'ouïrai",
"entr'ouïraient",
"entr'ouïrais",
"entr'ouïrait",
"entr'ouïras",
"entr'ouïrent",
"entr'ouïrez",
"entr'ouïriez",
"entr'ouïrions",
"entr'ouïrons",
"entr'ouïront",
"entr'ouïs",
"entr'ouïsse",
"entr'ouïssent",
"entr'ouïsses",
"entr'ouïssiez",
"entr'ouïssions",
"entr'ouït",
"entr'ouïtes",
"entr'outrage",
"entr'outragé",
"entr'outragea",
"entr'outrageaient",
"entr'outrageait",
"entr'outrageâmes",
"entr'outrageant",
"entr'outrageassent",
"entr'outrageassiez",
"entr'outrageassions",
"entr'outrageât",
"entr'outrageâtes",
"entr'outragées",
"entr'outragent",
"entr'outrageons",
"entr'outrager",
"entr'outrager",
"entr'outragera",
"entr'outrageraient",
"entr'outragerait",
"entr'outragèrent",
"entr'outragerez",
"entr'outrageriez",
"entr'outragerions",
"entr'outragerons",
"entr'outrageront",
"entr'outragés",
"entr'outragez",
"entr'outragiez",
"entr'outragions",
"entr'ouvert",
"entr'ouverte",
"entr'ouvertes",
"entr'ouverts",
"entr'ouverture",
"entr'ouvraient",
"entr'ouvrais",
"entr'ouvrait",
"entr'ouvrant",
"entr'ouvre",
"entr'ouvrent",
"entr'ouvres",
"entr'ouvrez",
"entr'ouvriez",
"entr'ouvrîmes",
"entr'ouvrions",
"entr'ouvrir",
"entr'ouvrir",
"entr'ouvrira",
"entr'ouvrirai",
"entr'ouvriraient",
"entr'ouvrirais",
"entr'ouvrirait",
"entr'ouvriras",
"entr'ouvrirent",
"entr'ouvrirez",
"entr'ouvririez",
"entr'ouvririons",
"entr'ouvrirons",
"entr'ouvriront",
"entr'ouvris",
"entr'ouvrisse",
"entr'ouvrissent",
"entr'ouvrisses",
"entr'ouvrissiez",
"entr'ouvrissions",
"entr'ouvrit",
"entr'ouvrît",
"entr'ouvrîtes",
"entr'ouvrons",
"entr'oyaient",
"entr'oyais",
"entr'oyait",
"entr'oyant",
"entr'oyez",
"entr'oyiez",
"entr'oyions",
"entr'oyons",
"entr'usa",
"entr'usaient",
"entr'usait",
"entr'usâmes",
"entr'usant",
"entr'usassent",
"entr'usassiez",
"entr'usassions",
"entr'usât",
"entr'usâtes",
"entr'use",
"entr'usé",
"entr'usées",
"entr'usent",
"entr'user",
"entr'user",
"entr'usera",
"entr'useraient",
"entr'userait",
"entr'usèrent",
"entr'userez",
"entr'useriez",
"entr'userions",
"entr'userons",
"entr'useront",
"entr'usés",
"entr'usez",
"entr'usiez",
"entr'usions",
"entr'usons",
"Éole-en-Beauce",
"éoli-harpe",
"Epagne-Epagnette",
"Épagne-Épagnette",
"épargne-logement",
"épaulé-jeté",
"épaulés-jetés",
"Epaux-Bézu",
"Épaux-Bézu",
"Epeigné-les-Bois",
"Épeigné-les-Bois",
"Epeigné-sur-Dême",
"Épeigné-sur-Dême",
"Epercieux-Saint-Paul",
"Épercieux-Saint-Paul",
"Epernay-sous-Gevrey",
"Épernay-sous-Gevrey",
"Epiais-lès-Louvres",
"Épiais-lès-Louvres",
"Epiais-Rhus",
"Épiais-Rhus",
"Epi-Contois",
"épi-contois",
"Épi-Contois",
"Epi-Contoise",
"épi-contoise",
"Épi-Contoise",
"Epi-Contoises",
"épi-contoises",
"Épi-Contoises",
"épidote-gris",
"Epieds-en-Beauce",
"Épieds-en-Beauce",
"Epiez-sur-Chiers",
"Épiez-sur-Chiers",
"Epiez-sur-Meuse",
"Épiez-sur-Meuse",
"Épinac-les-Mines",
"épinard-fraise",
"Epinay-Champlâtreux",
"Épinay-Champlâtreux",
"Epinay-le-Comte",
"Épinay-le-Comte",
"Epinay-sous-Sénart",
"Épinay-sous-Sénart",
"Epinay-sur-Duclair",
"Épinay-sur-Duclair",
"Epinay-sur-Odon",
"Épinay-sur-Odon",
"Epinay-sur-Orge",
"Épinay-sur-Orge",
"Epinay-sur-Seine",
"Épinay-sur-Seine",
"Epineau-les-Voves",
"Épineau-les-Voves",
"Epine-aux-Bois",
"Épine-aux-Bois",
"épine-du-Christ",
"épine-fleurie",
"épines-vinettes",
"Epineuil-le-Fleuriel",
"Épineuil-le-Fleuriel",
"Epineu-le-Chevreuil",
"Épineu-le-Chevreuil",
"Epineux-le-Seguin",
"Épineux-le-Seguin",
"épine-vinette",
"épiplo-entérocèle",
"épiplo-ischiocèle",
"épiplo-mérocèle",
"épluche-légume",
"épluche-légumes",
"Eppenberg-Wöschnau",
"Eppe-Sauvage",
"Epreville-en-Lieuvin",
"Épreville-en-Lieuvin",
"Epreville-en-Roumois",
"Épreville-en-Roumois",
"Epreville-près-le-Neubourg",
"Épreville-près-le-Neubourg",
"e-procurement",
"e-procurements",
"ep's",
"épuises-volantes",
"épuise-volante",
"équato-guinéen",
"équato-guinéenne",
"équato-guinéennes",
"équato-guinéens",
"Équatoria-Central",
"Équatoria-Occidental",
"Équatoria-Oriental",
"Equennes-Eramecourt",
"Équennes-Éramecourt",
"Equeurdreville-Hainneville",
"Équeurdreville-Hainneville",
"Equihen-Plage",
"Équihen-Plage",
"Eragny-sur-Epte",
"Éragny-sur-Epte",
"Éragny-sur-Oise",
"Erbes-Büdesheim",
"Erbéviller-sur-Amezule",
"Ercé-en-Lamée",
"Ercé-près-Liffré",
"Erdre-en-Anjou",
"e-reader",
"e-readers",
"e-réputation",
"e-réputations",
"e-réservation",
"e-réservations",
"Ergué-Armel",
"Ergué-Gabéric",
"Erize-la-Brûlée",
"Érize-la-Brûlée",
"Érize-la-Grande",
"Erize-la-Petite",
"Érize-la-Petite",
"Erize-Saint-Dizier",
"Érize-Saint-Dizier",
"Erlangen-Höchstadt",
"Erlbach-Kirchberg",
"Ermenonville-la-Grande",
"Ermenonville-la-Petite",
"Ermeton-sur-Biert",
"Ernemont-Boutavent",
"Ernemont-la-Villette",
"Ernemont-sur-Buchy",
"Erneville-aux-Bois",
"Ernolsheim-Bruche",
"Ernolsheim-lès-Saverne",
"Ernolsheim-Saverne",
"Erny-Saint-Julien",
"Erpe-Mere",
"Erps-Kwerps",
"Erquinghem-le-Sec",
"Erquinghem-Lys",
"Ervy-le-Châtel",
"e-santé",
"Esboz-Brest",
"Eschbach-au-Val",
"Eschêne-Autrage",
"Esch-sur-Alzette",
"Esch-sur-Sûre",
"Esclassan-Labastide",
"Esclavolles-Lurey",
"Escles-Saint-Pierre",
"Escolives-Sainte-Camille",
"Escombres-et-le-Chesnois",
"Escoubès-Pouts",
"Escry-le-Franc",
"Escueillens-et-Saint-Just-de-Bélengard",
"Escures-sur-Favières",
"eskimau-aléoute",
"eskimo-aléoute",
"eskimo-aléoutes",
"Eslourenties-Daban",
"Esmery-Hallon",
"Esnes-en-Argonne",
"éso-narthex",
"espace-boutique",
"espaces-temps",
"espaces-ventes",
"espace-temps",
"espace-vente",
"espadon-voilier",
"Espagnac-Sainte-Eulalie",
"Espaly-Saint-Marcel",
"Esparron-de-Verdon",
"Esparron-la-Bâtie",
"Espès-Undurein",
"Espierres-Helchin",
"Espinasse-Vozelle",
"Espira-de-Conflent",
"Espira-de-l'Agly",
"Esplantas-Vazeilles",
"Esplas-de-Sérou",
"e-sport",
"e-sportif",
"e-sportifs",
"e-sports",
"esprit-de-bois",
"esprit-de-sel",
"esprit-de-vin",
"esprit-fort",
"Esprit-Saint",
"esprits-forts",
"Esquay-Notre-Dame",
"Esquay-sur-Seulles",
"Esquièze-Sère",
"esquimau-aléoute",
"esquimo-aléoute",
"Essche-Saint-Liévin",
"Essertenne-et-Cecey",
"Essertines-en-Châtelneuf",
"Essertines-en-Donzy",
"Essertines-sur-Rolle",
"Essertines-sur-Yverdon",
"Essert-Pittet",
"essert-romanais",
"Essert-Romanais",
"essert-romanaise",
"Essert-Romanaise",
"essert-romanaises",
"Essert-Romanaises",
"Essert-Romand",
"Esserts-Blay",
"Esserts-Salève",
"Esserval-Combe",
"Esserval-Tartre",
"Essey-et-Maizerais",
"Essey-la-Côte",
"Essey-les-Eaux",
"Essey-lès-Nancy",
"Essey-les-Ponts",
"Essigny-le-Grand",
"Essigny-le-Petit",
"Eßleben-Teutleben",
"Essômes-sur-Marne",
"essuie-glace",
"essuie-glaces",
"essuie-main",
"essuie-mains",
"essuie-meuble",
"essuie-meubles",
"essuie-phare",
"essuie-phares",
"essuie-pied",
"essuie-pieds",
"essuie-plume",
"essuie-plumes",
"essuie-tout",
"essuie-touts",
"essuie-verre",
"essuie-verres",
"Estavayer-le-Lac",
"Estinnes-au-Mont",
"Estinnes-au-Val",
"Estouteville-Ecalles",
"Estouteville-Écalles",
"Estrée-Blanche",
"estrée-blanchois",
"Estrée-Blanchois",
"estrée-blanchoise",
"Estrée-Blanchoise",
"estrée-blanchoises",
"Estrée-Blanchoises",
"estrée-cauchois",
"Estrée-Cauchois",
"estrée-cauchoise",
"Estrée-Cauchoise",
"estrée-cauchoises",
"Estrée-Cauchoises",
"Estrée-Cauchy",
"Estrées-Deniécourt",
"Estrées-en-Chaussée",
"Estrées-la-Campagne",
"Estrées-lès-Crécy",
"Estrées-Mons",
"Estrées-Saint-Denis",
"Estrées-sur-Noye",
"Estrée-Wamin",
"estrée-waminois",
"Estrée-Waminois",
"estrée-waminoise",
"Estrée-Waminoise",
"estrée-waminoises",
"Estrée-Waminoises",
"Esves-le-Moutier",
"Etables-sur-Mer",
"Étables-sur-Mer",
"Etais-la-Sauvin",
"Étais-la-Sauvin",
"étalon-or",
"Etampes-sur-Marne",
"Étampes-sur-Marne",
"Etang-Bertrand",
"Étang-Bertrand",
"Etang-la-Ville",
"Étang-la-Ville",
"Etang-Salé",
"Étang-Salé",
"Etang-Saléen",
"étang-saléen",
"Étang-Saléen",
"Etang-Saléenne",
"étang-saléenne",
"Étang-Saléenne",
"Etang-Saléennes",
"étang-saléennes",
"Étang-Saléennes",
"Etang-Saléens",
"étang-saléens",
"Étang-Saléens",
"Etang-sur-Arroux",
"Étang-sur-Arroux",
"Etang-Vergy",
"Étang-Vergy",
"état-limite",
"état-major",
"État-major",
"État-Major",
"État-nation",
"État-nounou",
"État-providence",
"états-civils",
"états-généraux",
"États-Généraux",
"états-limites",
"états-majors",
"États-majors",
"États-Majors",
"états-nations",
"États-nations",
"États-nounous",
"États-providence",
"états-unianisa",
"états-unianisai",
"états-unianisaient",
"états-unianisais",
"états-unianisait",
"états-unianisâmes",
"états-unianisant",
"états-unianisas",
"états-unianisasse",
"états-unianisassent",
"états-unianisasses",
"états-unianisassiez",
"états-unianisassions",
"états-unianisât",
"états-unianisâtes",
"états-unianise",
"états-unianisé",
"états-unianisée",
"états-unianisées",
"états-unianisent",
"états-unianiser",
"états-unianisera",
"états-unianiserai",
"états-unianiseraient",
"états-unianiserais",
"états-unianiserait",
"états-unianiseras",
"états-unianisèrent",
"états-unianiserez",
"états-unianiseriez",
"états-unianiserions",
"états-unianiserons",
"états-unianiseront",
"états-unianises",
"états-unianisés",
"états-unianisez",
"états-unianisiez",
"états-unianisions",
"états-unianisons",
"états-unien",
"États-Unien",
"états-unienne",
"États-Unienne",
"états-uniennes",
"États-Uniennes",
"états-uniens",
"États-Uniens",
"Etats-Unis",
"États-Unis",
"étau-limeur",
"étaux-limeurs",
"Etaves-et-Bocquiaux",
"Étaves-et-Bocquiaux",
"éthane-1,2-diol",
"éthéro-chloroforme",
"ethnico-religieux",
"éthyl-benzène",
"e-ticket",
"e-tickets",
"Étinehem-Méricourt",
"Etival-Clairefontaine",
"Étival-Clairefontaine",
"Etival-lès-le-Mans",
"Étival-lès-le-Mans",
"Etoile-Saint-Cyrice",
"Étoile-Saint-Cyrice",
"Etoile-sur-Rhône",
"Étoile-sur-Rhône",
"étouffe-chrétien",
"étouffe-chrétiens",
"e-tourisme",
"étrangle-chat",
"étrangle-chien",
"étrangle-loup",
"étrangle-loups",
"être-en-soi",
"être-là",
"Etrelles-et-la-Montbleuse",
"Étrelles-et-la-Montbleuse",
"Etrelles-sur-Aube",
"Étrelles-sur-Aube",
"êtres-en-soi",
"Etricourt-Manancourt",
"Étricourt-Manancourt",
"Etricourt-Manancourtois",
"étricourt-manancourtois",
"Étricourt-Manancourtois",
"Etricourt-Manancourtoise",
"étricourt-manancourtoise",
"Étricourt-Manancourtoise",
"Etricourt-Manancourtoises",
"étricourt-manancourtoises",
"Étricourt-Manancourtoises",
"Etten-Leur",
"Étueffont-Bas",
"Etxarri-Aranatz",
"Eugénie-les-Bains",
"Euilly-et-Lombut",
"Eure-et-Loir",
"euro-africain",
"euro-africaines",
"Euro-Afrique",
"euro-asiatique",
"euro-asiatiques",
"euro-bashing",
"euro-manifestation",
"euro-manifestations",
"euro-obligation",
"euro-obligations",
"Eurville-Bienville",
"eusses-tu-cru",
"eux-mêmes",
"Evaux-et-Ménil",
"Évaux-et-Ménil",
"Evaux-les-Bains",
"Évaux-les-Bains",
"Evette-Salbert",
"Évette-Salbert",
"Evian-les-Bains",
"Évian-les-Bains",
"Evin-Malmaison",
"Évin-Malmaison",
"Evry-Grégy-sur-Yerre",
"Évry-Grégy-sur-Yerre",
"Évry-Petit-Bourg",
"exa-ampère",
"exa-ampères",
"exa-électron-volt",
"exaélectron-volt",
"exa-électron-volts",
"exaélectron-volts",
"ex-aequo",
"ex-æquo",
"ex-ante",
"exa-octet",
"exa-octets",
"ex-champions",
"excito-nervin",
"excito-nervine",
"excito-nervines",
"excito-nervins",
"ex-copains",
"excusez-moi",
"ex-député",
"ex-députée",
"ex-députées",
"ex-députés",
"ex-femme",
"ex-femmes",
"ex-fumeur",
"ex-fumeurs",
"ex-libris",
"ex-mari",
"ex-maris",
"exo-noyau",
"exo-noyaux",
"expert-comptable",
"ex-petits",
"ex-présidents",
"ex-sacs",
"ex-sergents",
"ex-serviteurs",
"ex-soldats",
"ex-strip-teaseuse",
"extracto-chargeur",
"extracto-chargeurs",
"extracto-résine",
"extracto-résineux",
"extrêmes-droites",
"extrêmes-gauches",
"extrêmes-onctions",
"extro-déterminé",
"ex-voto",
"ex-votos",
"ex-Zaïre",
"eye-liner",
"eye-liners",
"Eygluy-Escoulin",
"Eygurande-et-Gardedeuil",
"Eyres-Moncube",
"Eyvignes-et-Eybènes",
"Eyzin-Pinet",
"Ezkio-Itsaso",
"Ezy-sur-Eure",
"Ézy-sur-Eure",
"face-à-face",
"face-à-main",
"face-B",
"face-kini",
"face-kinis",
"faces-à-main",
"faces-B",
"face-sitting",
"face-sittings",
"Faches-Thumesnil",
"faches-thumesnilois",
"Faches-Thumesnilois",
"faches-thumesniloise",
"Faches-Thumesniloise",
"faches-thumesniloises",
"Faches-Thumesniloises",
"fac-simila",
"fac-similai",
"fac-similaient",
"fac-similaire",
"fac-similais",
"fac-similait",
"fac-similâmes",
"fac-similant",
"fac-similas",
"fac-similasse",
"fac-similassent",
"fac-similasses",
"fac-similassiez",
"fac-similassions",
"fac-similât",
"fac-similâtes",
"fac-simile",
"fac-similé",
"fac-similée",
"fac-similées",
"fac-similent",
"fac-similer",
"fac-similera",
"fac-similerai",
"fac-simileraient",
"fac-similerais",
"fac-similerait",
"fac-simileras",
"fac-similèrent",
"fac-similerez",
"fac-simileriez",
"fac-similerions",
"fac-similerons",
"fac-simileront",
"fac-similes",
"fac-similés",
"fac-similez",
"fac-similiez",
"fac-similions",
"fac-similons",
"Faget-Abbatial",
"Fahy-lès-Autrey",
"faim-valle",
"Fain-lès-Montbard",
"Fain-lès-Moutiers",
"Fains-la-Folie",
"Fains-Véel",
"faire-part",
"faire-savoir",
"faire-valoir",
"fair-play",
"fair-plays",
"fait-à-fait",
"fait-divers",
"fait-diversier",
"fait-diversiers",
"fait-main",
"faits-divers",
"faits-diversier",
"faits-diversiers",
"fait-tout",
"Fajac-en-Val",
"Fajac-la-Relenque",
"Falkenberg-sur-Elster",
"fan-club",
"fan-clubs",
"fancy-fair",
"fancy-fairs",
"farcy-pontain",
"Farcy-Pontain",
"farcy-pontaine",
"Farcy-Pontaine",
"farcy-pontaines",
"Farcy-Pontaines",
"farcy-pontains",
"Farcy-Pontains",
"Fargau-Pratjau",
"Farges-Allichamps",
"Farges-en-Septaine",
"Farges-lès-Chalon",
"Farges-lès-Mâcon",
"Fargues-Saint-Hilaire",
"Fargues-sur-Ourbise",
"Far-West",
"fast-food",
"fast-foods",
"Fatouville-Grestain",
"Fatu-Hiva",
"Faucogney-et-la-Mer",
"Faucon-de-Barcelonnette",
"Faucon-du-Caire",
"Fau-de-Peyre",
"Faulx-les-Tombes",
"Fauquemont-sur-Gueule",
"fausse-braie",
"fausse-couche",
"fausse-limande",
"fausse-monnayeuse",
"fausse-porte",
"fausses-braies",
"fausses-couches",
"fausses-monnayeuses",
"Fauville-en-Caux",
"faux-acacia",
"faux-acacias",
"faux-ami",
"faux-amis",
"faux-bourdon",
"faux-bourdons",
"faux-bras",
"faux-carré",
"faux-carrés",
"faux-champlevé",
"faux-col",
"faux-cols",
"faux-cul",
"faux-derche",
"faux-derches",
"faux-filet",
"faux-filets",
"faux-frais",
"faux-frère",
"faux-frères",
"Faux-Fresnay",
"faux-fruit",
"faux-fruits",
"faux-fuyans",
"faux-fuyant",
"faux-fuyants",
"faux-garou",
"faux-grenier",
"faux-greniers",
"faux-jeton",
"faux-jetons",
"Faux-la-Montagne",
"Faux-Mazuras",
"faux-monnayage",
"faux-monnayages",
"faux-monnayeur",
"faux-monnayeurs",
"faux-nez",
"faux-palais",
"faux-persil",
"faux-poivrier",
"faux-poivriers",
"faux-pont",
"faux-ponts",
"faux-positif",
"faux-positifs",
"faux-saunage",
"faux-saunier",
"faux-saunière",
"faux-saunières",
"faux-sauniers",
"faux-scaphirhynque",
"faux-semblans",
"faux-semblant",
"faux-semblants",
"faux-sens",
"faux-vampire",
"faux-vampires",
"Faux-Vésigneul",
"Faux-Villecerf",
"faux-vin",
"Faveraye-Mâchelles",
"Faverges-de-la-Tour",
"Faverges-Seythenex",
"Faverolles-et-Coëmy",
"Faverolles-la-Campagne",
"Faverolles-lès-Lucey",
"Faverolles-les-Mares",
"Faverolles-sur-Cher",
"fax-tractage",
"fax-tractages",
"Fay-aux-Loges",
"Fay-de-Bretagne",
"Faye-d'Anjou",
"Faye-l'Abbesse",
"Faye-la-Vineuse",
"Fay-en-Montagne",
"Faye-sur-Ardin",
"Fayet-le-Château",
"Fayet-Ronaye",
"Fayl-Billot",
"fayl-billotin",
"Fayl-Billotin",
"fayl-billotine",
"Fayl-Billotine",
"fayl-billotines",
"Fayl-Billotines",
"fayl-billotins",
"Fayl-Billotins",
"Fay-le-Clos",
"Fay-les-Etangs",
"Fay-les-Étangs",
"Fay-lès-Marcilly",
"Faÿ-lès-Nemours",
"Fayl-la-Forêt",
"Fays-la-Chapelle",
"Fays-les-Veneurs",
"Fay-sur-Lignon",
"Fayt-le-Franc",
"Fayt-lez-Manage",
"Febvin-Palfart",
"Fêche-l'Eglise",
"Fêche-l'Église",
"fech-fech",
"feed-back",
"Fehl-Ritzhausen",
"Feins-en-Gâtinais",
"Feissons-sur-Isère",
"Feissons-sur-Salins",
"Felben-Wellhausen",
"Feldkirchen-Westerham",
"Félines-Minervois",
"Félines-sur-Rimandoule",
"Félines-Termenès",
"femelle-stérile",
"femelle-stériles",
"femme-enfant",
"femme-objet",
"femme-orchestre",
"femme-renarde",
"femmes-enfants",
"femmes-orchestres",
"femmes-renardes",
"fémoro-tibial",
"femto-ohm",
"femto-ohms",
"Fenouillet-du-Razès",
"fénoxaprop-éthyl",
"fénoxaprop-P-éthyl",
"féodo-vassalique",
"féodo-vassaliques",
"fer-à-cheval",
"fer-blanc",
"Fercé-sur-Sarthe",
"fer-chaud",
"fer-de-lance",
"fer-de-moulin",
"Fère-Champenoise",
"Fère-en-Tardenois",
"ferme-bourse",
"ferme-circuit",
"ferme-circuits",
"Ferme-Neuvien",
"ferme-porte",
"ferme-portes",
"fermes-hôtels",
"fermier-général",
"Fernán-Núñez",
"Ferney-Voltaire",
"Férolles-Attilly",
"Ferrals-les-Corbières",
"Ferrals-les-Montagnes",
"ferrando-forézienne",
"ferre-mule",
"Ferreux-Quincey",
"Ferrière-et-Lafolie",
"Ferrière-la-Grande",
"Ferrière-la-Petite",
"Ferrière-Larçon",
"Ferrières-en-Bray",
"Ferrières-en-Brie",
"Ferrières-en-Gâtinais",
"Ferrières-Haut-Clocher",
"Ferrières-la-Verrerie",
"Ferrières-le-Lac",
"Ferrières-les-Bois",
"Ferrières-lès-Ray",
"Ferrières-lès-Scey",
"Ferrières-les-Verreries",
"Ferrières-Poussarou",
"Ferrières-Saint-Hilaire",
"Ferrières-Saint-Mary",
"Ferrières-sur-Ariège",
"Ferrières-sur-Sichon",
"Ferrière-sur-Beaulieu",
"ferro-axinite",
"ferro-axinites",
"ferro-magnésien",
"ferro-magnétisme",
"ferro-magnétismes",
"ferro-phlogopite",
"ferro-phlogopites",
"ferro-prussiate",
"ferro-prussiates",
"ferry-boat",
"ferry-boats",
"fers-à-cheval",
"fers-blancs",
"fers-de-lance",
"Fesches-le-Châtel",
"fesh-fesh",
"Fesmy-le-Sart",
"Fessanvilliers-Mattanvilliers",
"fesse-cahier",
"fesse-mathieu",
"fesse-mathieus",
"fesse-mathieux",
"Fessenheim-le-Bas",
"fesse-tonneau",
"fesse-tonneaux",
"Fessey-Dessous-et-Dessus",
"fest-deiz",
"Festes-et-Saint-André",
"fest-noz",
"fest-nozs",
"Fête-Dieu",
"fétu-en-cul",
"fétus-en-cul",
"Feuguerolles-Bully",
"Feuguerolles-sur-Orne",
"Feuguerolles-sur-Seulles",
"feuille-caillou-ciseaux",
"feuille-morte",
"Feuquières-en-Vimeu",
"Fexhe-le-Haut-Clocher",
"Fexhe-Slins",
"Fey-en-Haye",
"fibre-cellule",
"fibro-cartilage",
"fibro-cellulaire",
"fibro-cystique",
"fibro-cystiques",
"fibro-granulaire",
"fibro-muqueux",
"fibro-séreux",
"fibro-soyeux",
"fiche-échalas",
"Fichous-Riumayou",
"fiducie-sûreté",
"Fieffes-Montrelet",
"fier-à-bras",
"fiers-à-bras",
"Fierville-Bray",
"Fierville-les-Mines",
"Fierville-les-Parcs",
"fie-vïnnamide",
"fie-vïnnamides",
"fifty-fifty",
"Figaró-Montmany",
"figuier-mûrier",
"filet-poubelle",
"filets-poubelles",
"fille-mère",
"filles-mères",
"film-fleuve",
"films-annonces",
"fils-de-puterie",
"filtre-presse",
"filtres-presses",
"fine-metal",
"Finkenbach-Gersweiler",
"finno-ougrien",
"finno-ougrienne",
"finno-ougriennes",
"finno-ougriens",
"fin-or",
"Fiquefleur-Equainville",
"Fiquefleur-Équainville",
"first-fit",
"Fischbach-Göslikon",
"Fischbach-Oberraden",
"fisse-larron",
"fisses-larrons",
"fist-fucking",
"fist-fuckings",
"Fitz-James",
"fitz-jamois",
"Fitz-Jamois",
"fitz-jamoise",
"Fitz-Jamoise",
"fitz-jamoises",
"Fitz-Jamoises",
"fixe-chaussette",
"fixe-chaussettes",
"fixe-fruit",
"fixe-fruits",
"fixe-longe",
"fixe-moustaches",
"fixe-ruban",
"fixe-rubans",
"Fix-Saint-Geneys",
"fix-up",
"Fize-Fontaine",
"Fize-le-Marsal",
"f'jer",
"f'jers",
"Flacé-lès-Mâcon",
"Flacey-en-Bresse",
"fla-fla",
"fla-flas",
"Flagey-Echézeaux",
"Flagey-Échézeaux",
"Flagey-lès-Auxonne",
"Flagey-Rigney",
"Flaignes-Havys",
"Flaignes-les-Oliviers",
"Flamets-Frétils",
"flanc-de-chien",
"Flanc-de-chien",
"flanc-garde",
"flanc-gardes",
"flanc-mou",
"Flancourt-Catelon",
"Flancourt-Crescy-en-Roumois",
"flancs-de-chien",
"Flancs-de-chien",
"flancs-gardes",
"flancs-mous",
"Flandre-Occidentale",
"Flandre-Orientale",
"flash-back",
"flash-ball",
"flash-balls",
"flash-mob",
"flash-mobs",
"Flassans-sur-Issole",
"Flaujac-Gare",
"Flaujac-Poujols",
"Flaumont-Waudrechies",
"Flavigny-le-Grand-et-Beaurain",
"Flavigny-le-Petit",
"Flavigny-sur-Moselle",
"Flavigny-sur-Ozerain",
"Flavy-le-Martel",
"Flavy-le-Meldeux",
"Fléac-sur-Seugne",
"Flémalle-Grande",
"Flémalle-Haute",
"Fléré-la-Rivière",
"Flers-en-Escrebieux",
"Flers-lez-Lille",
"Flers-sur-Noye",
"fleur-bleuisa",
"fleur-bleuisai",
"fleur-bleuisaient",
"fleur-bleuisais",
"fleur-bleuisait",
"fleur-bleuisâmes",
"fleur-bleuisant",
"fleur-bleuisas",
"fleur-bleuisasse",
"fleur-bleuisassent",
"fleur-bleuisasses",
"fleur-bleuisassiez",
"fleur-bleuisassions",
"fleur-bleuisât",
"fleur-bleuisâtes",
"fleur-bleuise",
"fleur-bleuisé",
"fleur-bleuisée",
"fleur-bleuisées",
"fleur-bleuisent",
"fleur-bleuiser",
"fleur-bleuisera",
"fleur-bleuiserai",
"fleur-bleuiseraient",
"fleur-bleuiserais",
"fleur-bleuiserait",
"fleur-bleuiseras",
"fleur-bleuisèrent",
"fleur-bleuiserez",
"fleur-bleuiseriez",
"fleur-bleuiserions",
"fleur-bleuiserons",
"fleur-bleuiseront",
"fleur-bleuises",
"fleur-bleuisés",
"fleur-bleuisez",
"fleur-bleuisiez",
"fleur-bleuisions",
"fleur-bleuisons",
"fleur-de-mai",
"Fleurey-lès-Faverney",
"Fleurey-lès-Lavoncourt",
"Fleurey-lès-Saint-Loup",
"Fleurey-sur-Ouche",
"fleur-feuille",
"Fleurieu-sur-Saône",
"Fleurieux-sur-l'Arbresle",
"Fleury-devant-Douaumont",
"Fleury-en-Bière",
"Fleury-et-Montmarin",
"Fleury-la-Forêt",
"Fleury-la-Montagne",
"Fleury-la-Rivière",
"Fleury-la-Vallée",
"Fleury-les-Aubrais",
"Fleury-Mérogis",
"Fleury-Montmarin",
"Fleury-sur-Aire",
"Fleury-sur-Andelle",
"Fleury-sur-Loire",
"Fleury-sur-Orne",
"Fleury-Vallée-d'Aillant",
"Fléville-devant-Nancy",
"Fléville-Lixières",
"Flez-Cuzy",
"flic-flac",
"flic-flaqua",
"flic-flaquai",
"flic-flaquaient",
"flic-flaquais",
"flic-flaquait",
"flic-flaquâmes",
"flic-flaquant",
"flic-flaquas",
"flic-flaquasse",
"flic-flaquassent",
"flic-flaquasses",
"flic-flaquassiez",
"flic-flaquassions",
"flic-flaquât",
"flic-flaquâtes",
"flic-flaque",
"flic-flaqué",
"flic-flaquent",
"flic-flaquer",
"flic-flaquera",
"flic-flaquerai",
"flic-flaqueraient",
"flic-flaquerais",
"flic-flaquerait",
"flic-flaqueras",
"flic-flaquèrent",
"flic-flaquerez",
"flic-flaqueriez",
"flic-flaquerions",
"flic-flaquerons",
"flic-flaqueront",
"flic-flaques",
"flic-flaquez",
"flic-flaquiez",
"flic-flaquions",
"flic-flaquons",
"Flieth-Stegelitz",
"Flines-lès-Mortagne",
"Flines-lez-Raches",
"Flins-Neuve-Eglise",
"Flins-Neuve-Église",
"Flins-sur-Seine",
"flint-glass",
"flip-flap",
"flirty-fishing",
"float-tube",
"float-tubes",
"Flogny-la-Chapelle",
"Floh-Seligenthal",
"Florent-en-Argonne",
"Florentin-la-Capelle",
"Florimont-Gaumier",
"Flörsheim-Dalsheim",
"flos-ferré",
"flos-ferri",
"Flottemanville-Hague",
"flotte-tube",
"flotte-tubes",
"flou-flou",
"fluazifop-butyl",
"fluazifop-P-butyl",
"Fluorn-Winzeln",
"fluoro-phlogopite",
"fluoro-phlogopites",
"flupyrsulfuron-méthyle",
"fluroxypyr-meptyl",
"fluvio-marin",
"fly-over",
"fly-overs",
"fly-tox",
"f'nêtre",
"f'nêtres",
"Foameix-Ornel",
"foc-en-l'air",
"Föhrden-Barl",
"Fohren-Linden",
"foie-de-boeuf",
"foies-de-boeuf",
"foi-menti",
"foi-mentie",
"foire-exposition",
"foires-expositions",
"Foissy-lès-Vézelay",
"Foissy-sur-Vanne",
"folk-lore",
"folk-lores",
"Follainville-Dennemont",
"folle-avoine",
"folle-blanche",
"folles-avoines",
"folle-verte",
"Folx-les-Caves",
"folx-les-cavien",
"Folx-les-Cavien",
"Folx-les-Cavienne",
"Fonches-Fonchette",
"Foncine-le-Bas",
"Foncine-le-Haut",
"Fondachelli-Fantina",
"fond-de-teinta",
"fond-de-teintai",
"fond-de-teintaient",
"fond-de-teintais",
"fond-de-teintait",
"fond-de-teintâmes",
"fond-de-teintant",
"fond-de-teintas",
"fond-de-teintasse",
"fond-de-teintassent",
"fond-de-teintasses",
"fond-de-teintassiez",
"fond-de-teintassions",
"fond-de-teintât",
"fond-de-teintâtes",
"fond-de-teinte",
"fond-de-teinté",
"fond-de-teintée",
"fond-de-teintées",
"fond-de-teintent",
"fond-de-teinter",
"fond-de-teintera",
"fond-de-teinterai",
"fond-de-teinteraient",
"fond-de-teinterais",
"fond-de-teinterait",
"fond-de-teinteras",
"fond-de-teintèrent",
"fond-de-teinterez",
"fond-de-teinteriez",
"fond-de-teinterions",
"fond-de-teinterons",
"fond-de-teinteront",
"fond-de-teintes",
"fond-de-teintés",
"fond-de-teintez",
"fond-de-teintiez",
"fond-de-teintions",
"fond-de-teintons",
"Fonds-Saint-Denis",
"fon-gbe",
"Fons-sur-Lussan",
"Fontaine-au-Bois",
"Fontaine-au-Pire",
"Fontaine-Bellenger",
"Fontaine-Bethon",
"Fontaine-Bonneleau",
"fontaine-brayen",
"Fontaine-Brayen",
"fontaine-brayenne",
"Fontaine-Brayenne",
"fontaine-brayennes",
"Fontaine-Brayennes",
"fontaine-brayens",
"Fontaine-Brayens",
"Fontaine-Chaalis",
"Fontaine-Chalendray",
"Fontaine-Couverte",
"Fontaine-Denis",
"Fontaine-Denis-Nuisy",
"Fontaine-de-Vaucluse",
"Fontaine-en-Bray",
"Fontaine-en-Dormois",
"Fontaine-Etoupefour",
"Fontaine-Étoupefour",
"Fontaine-Fourches",
"Fontaine-Française",
"Fontaine-Guérin",
"Fontaine-Henry",
"Fontaine-Heudebourg",
"Fontaine-l'Abbé",
"Fontaine-la-Gaillarde",
"Fontaine-la-Guyon",
"Fontaine-la-Louvet",
"Fontaine-la-Mallet",
"Fontaine-la-Rivière",
"Fontaine-la-Soret",
"Fontaine-Lavaganne",
"Fontaine-le-Bourg",
"Fontaine-le-Comte",
"Fontaine-le-Dun",
"Fontaine-le-Pin",
"Fontaine-le-Port",
"Fontaine-le-Puits",
"Fontaine-les-Bassets",
"Fontaine-lès-Boulans",
"Fontaine-lès-Cappy",
"Fontaine-lès-Clercs",
"Fontaine-lès-Clerval",
"Fontaine-les-Coteaux",
"Fontaine-lès-Croisilles",
"Fontaine-lès-Dijon",
"Fontaine-le-Sec",
"Fontaine-les-Grès",
"Fontaine-lès-Hermans",
"Fontaine-lès-Luxeuil",
"Fontaine-les-Ribouts",
"Fontaine-lès-Vervins",
"Fontaine-l'Etalon",
"Fontaine-l'Étalon",
"Fontaine-l'Évêque",
"Fontaine-Luyères",
"Fontaine-Mâcon",
"Fontaine-Milon",
"Fontaine-Notre-Dame",
"Fontaine-Raoul",
"Fontaine-Saint-Lucien",
"Fontaines-d'Ozillac",
"Fontaines-en-Duesmois",
"Fontaines-en-Sologne",
"Fontaine-Simon",
"Fontaines-les-Sèches",
"Fontaine-sous-Jouy",
"Fontaine-sous-Montaiguillon",
"Fontaine-sous-Montdidier",
"Fontaine-sous-Pezou",
"Fontaine-sous-Préaux",
"Fontaines-Saint-Clair",
"Fontaines-Saint-Martin",
"Fontaines-sur-Grandson",
"Fontaines-sur-Marne",
"Fontaines-sur-Saône",
"Fontaine-sur-Ay",
"Fontaine-sur-Coole",
"Fontaine-sur-Maye",
"Fontaine-sur-Somme",
"Fontaine-Uterte",
"Fontaine-Valmont",
"Fontanès-de-Sault",
"Fontanes-du-Causse",
"Fontanil-Cornillon",
"Fontcouverte-la-Toussuire",
"Font-de-Carpentin",
"Fontenai-les-Louvets",
"Fontenai-sur-Orne",
"Fontenay-aux-Roses",
"Fontenay-de-Bossery",
"Fontenay-en-Parisis",
"Fontenay-en-Vexin",
"Fontenay-le-Comte",
"Fontenay-le-Fleury",
"Fontenay-le-Marmion",
"Fontenay-le-Pesnel",
"Fontenay-lès-Briis",
"Fontenay-le-Vicomte",
"Fontenay-Mauvoisin",
"Fontenay-près-Chablis",
"Fontenay-près-Vézelay",
"Fontenay-Saint-Père",
"Fontenay-sous-Bois",
"Fontenay-sous-Fouronnes",
"Fontenay-sur-Conie",
"Fontenay-sur-Eure",
"Fontenay-sur-Loing",
"Fontenay-sur-Mer",
"Fontenay-sur-Vègre",
"Fontenay-Torcy",
"Fontenay-Trésigny",
"Fontenelle-en-Brie",
"Fontenelle-Montby",
"Fontenille-Saint-Martin-d'Entraigues",
"Fontenilles-d'Aigueparse",
"Fontenois-la-Ville",
"Fontenois-lès-Montbozon",
"Fontenoy-en-Puisaye",
"Fontenoy-la-Joûte",
"Fontenoy-le-Château",
"Fontenoy-sur-Moselle",
"Fonters-du-Razès",
"Fontevraud-l'Abbaye",
"Fontiers-Cabardès",
"Fontiès-d'Aude",
"Font-Romeu-Odeillo-Via",
"Font-rubí",
"food-court",
"food-courts",
"food-truck",
"food-trucks",
"Forcelles-Saint-Gorgon",
"Forcelles-sous-Gugney",
"Forceville-en-Vimeu",
"force-vivier",
"Forchies-la-Marche",
"Forel-sur-Lucens",
"Forest-en-Cambrésis",
"Forest-l'Abbaye",
"Forest-Montiers",
"Forest-Saint-Julien",
"Forest-sur-Marque",
"forêt-clairière",
"forêt-climax",
"forêt-galerie",
"Forêt-la-Folie",
"Forêt-Noire",
"Forêt-Noire-Baar",
"forêt-parc",
"forêts-clairières",
"forêts-climax",
"forêts-galeries",
"forêts-parcs",
"forge-mètre",
"Forge-Philippe",
"Forges-la-Forêt",
"Forges-les-Bains",
"Forges-les-Eaux",
"Forges-sur-Meuse",
"Forlì-Cesena",
"formica-leo",
"formule-choc",
"formule-chocs",
"forsétyl-al",
"Forst-Längenbühl",
"Fortel-en-Artois",
"forte-piano",
"forte-pianos",
"forts-vêtu",
"Fosbury-flop",
"fosétyl-Al",
"Fossès-et-Baleyssac",
"Fosses-la-Ville",
"Fos-sur-Mer",
"Foucaucourt-en-Santerre",
"Foucaucourt-Hors-Nesle",
"Foucaucourt-sur-Thabas",
"Fouchères-aux-Bois",
"foué-toutrac",
"foué-toutracs",
"fouette-cul",
"fouette-culs",
"fouette-queue",
"fouette-queues",
"Foufflin-Ricametz",
"Foufnie-les-Berdouilles",
"Fougax-et-Barrineuf",
"fougère-aigle",
"fougères-aigles",
"Fougères-sur-Bièvre",
"Fougerolles-du-Plessis",
"fouille-au-pot",
"fouille-merde",
"foule-crapaud",
"Fouquières-lès-Béthune",
"Fouquières-lès-Lens",
"Fourcatier-et-Maison-Neuve",
"fourche-fière",
"fourmi-lion",
"fourmis-lions",
"Fourneaux-le-Val",
"Fournes-Cabardès",
"Fournes-en-Weppes",
"Fournet-Blancheroche",
"Fournets-Luisans",
"Fouron-le-Comte",
"Fouron-Saint-Martin",
"Fouron-Saint-Pierre",
"Fourques-sur-Garonne",
"fourre-tout",
"Fours-en-Vexin",
"Foussais-Payré",
"Fouta-Diallon",
"Fouta-Djalon",
"Fouvent-le-Bas",
"Fouvent-Saint-Andoche",
"Fox-Amphoux",
"fox-hound",
"fox-hounds",
"fox-terrier",
"fox-terriers",
"fox-trot",
"fox-trott",
"fox-trotta",
"fox-trottai",
"fox-trottaient",
"fox-trottais",
"fox-trottait",
"fox-trottâmes",
"fox-trottant",
"fox-trottas",
"fox-trottasse",
"fox-trottassent",
"fox-trottasses",
"fox-trottassiez",
"fox-trottassions",
"fox-trottât",
"fox-trottâtes",
"fox-trotte",
"fox-trotté",
"fox-trottent",
"fox-trotter",
"fox-trottera",
"fox-trotterai",
"fox-trotteraient",
"fox-trotterais",
"fox-trotterait",
"fox-trotteras",
"fox-trottèrent",
"fox-trotterez",
"fox-trotteriez",
"fox-trotterions",
"fox-trotterons",
"fox-trotteront",
"fox-trottes",
"fox-trottez",
"fox-trottiez",
"fox-trottions",
"fox-trottons",
"fox-trotts",
"Foy-Notre-Dame",
"foy-notre-damien",
"Foy-Notre-Damien",
"Foy-Notre-Damienne",
"Foz-Calanda",
"Frahier-et-Chatebier",
"Fraignot-et-Vesvrotte",
"frais-chier",
"Fraisnes-en-Saintois",
"Fraisse-Cabardès",
"Fraissé-des-Corbières",
"Fraisse-sur-Agout",
"Fraissinet-de-Fourques",
"Fraissinet-de-Lozère",
"Framerville-Rainecourt",
"Francfort-sur-le-Main",
"Francfort-sur-l'Oder",
"Franche-Comté",
"Franches-Montagnes",
"Francillon-sur-Roubion",
"Francilly-Selency",
"Frangy-en-Bresse",
"Fränkisch-Crumbach",
"Franqueville-Saint-Pierre",
"frappe-abord",
"frappe-à-bord",
"frappe-à-mort",
"frappe-babord",
"frappe-d'abord",
"frappe-devant",
"frappe-main",
"frappe-mains",
"frappe-plaque",
"frappe-plaques",
"Frasnay-Reugny",
"Frasne-le-Château",
"Frasne-les-Meulières",
"Frasnes-lez-Anvaing",
"Frasnes-lez-Buissenal",
"Frasnes-lez-Couvin",
"Frasnes-lez-Gosselies",
"Frayssinet-le-Gélat",
"Fréchet-Aure",
"Fréchou-Fréchet",
"Frédéric-Fontaine",
"Fredersdorf-Vogelsdorf",
"free-lance",
"Freienstein-Teufen",
"Frei-Laubersheim",
"freins-vapeur",
"frein-vapeur",
"Freix-Anglards",
"Frémeréville-sous-les-Côtes",
"Frenelle-la-Grande",
"Frenelle-la-Petite",
"Freneuse-sur-Risle",
"Fresnay-en-Retz",
"Fresnay-le-Comte",
"Fresnay-le-Gilmert",
"Fresnay-le-Long",
"Fresnay-le-Samson",
"Fresnay-l'Evêque",
"Fresnay-l'Évêque",
"Fresnay-sur-Sarthe",
"Fresneaux-Montchevreuil",
"Fresne-Cauverville",
"Fresné-la-Mère",
"Fresne-l'Archevêque",
"Fresne-Léguillon",
"Fresne-le-Plan",
"Fresne-lès-Reims",
"Fresne-Saint-Mamès",
"Fresnes-au-Mont",
"Fresnes-en-Saulnois",
"Fresnes-en-Tardenois",
"Fresnes-en-Woëvre",
"Fresnes-lès-Montauban",
"Fresnes-lès-Reims",
"Fresnes-Mazancourt",
"Fresnes-sur-Apance",
"Fresnes-sur-Escaut",
"Fresnes-sur-Marne",
"Fresnes-Tilloloy",
"Fresney-le-Puceux",
"Fresney-le-Vieux",
"Fresnicourt-le-Dolmen",
"Fresnois-la-Montagne",
"Fresnoy-Andainville",
"Fresnoy-au-Val",
"Fresnoy-en-Bassigny",
"Fresnoy-en-Chaussée",
"Fresnoy-en-Gohelle",
"Fresnoy-en-Thelle",
"Fresnoy-Folny",
"Fresnoy-la-Rivière",
"Fresnoy-le-Château",
"Fresnoy-le-Grand",
"Fresnoy-le-Luat",
"Fresnoy-lès-Roye",
"Fresse-sur-Moselle",
"Fretigney-et-Velloreille",
"Frétoy-le-Château",
"Fréville-du-Gâtinais",
"Frévin-Capelle",
"Freycenet-la-Cuche",
"Freycenet-la-Tour",
"Freyming-Merlebach",
"freyming-merlebachois",
"Freyming-Merlebachois",
"freyming-merlebachoise",
"Freyming-Merlebachoise",
"freyming-merlebachoises",
"Freyming-Merlebachoises",
"Freyung-Grafenau",
"Fribourg-en-Brisgau",
"fric-frac",
"fric-fracs",
"Friedrich-Wilhelm-Lübke-Koog",
"Frières-Faillouël",
"Frise-du-Nord",
"Frise-Occidentale",
"Friville-Escarbotin",
"Frohen-le-Grand",
"Frohen-le-Petit",
"Frohen-sur-Authie",
"Froidmont-Cohartille",
"Fromeréville-les-Vallons",
"Frontenay-Rohan-Rohan",
"Frontenay-sur-Dive",
"Frontignan-de-Comminges",
"Frontignan-Savès",
"fronto-iniaque",
"Frotey-lès-Lure",
"Frotey-lès-Vesoul",
"frou-frou",
"frou-frous",
"frous-frous",
"Frugerès-les-Mines",
"Frugières-le-Pin",
"Frutigen-Bas-Simmental",
"fuel-oil",
"fuel-oils",
"Fuente-Álamo",
"Fuente-Olmedo",
"Fuente-Tójar",
"full-contact",
"Full-Reuenthal",
"full-stack",
"fulmi-coton",
"fulmi-cotons",
"fume-cigare",
"fume-cigares",
"fume-cigarette",
"fume-cigarettes",
"fumée-gelée",
"fusée-sonde",
"fusilier-commando",
"fusilier-marin",
"fusiliers-commandos",
"fusiliers-marins",
"fusil-mitrailleur",
"fusils-mitrailleurs",
"fusion-acquisition",
"fute-fute",
"futes-futes",
"fût-et-fare",
"fut's",
"futuna-aniwa",
"Gaag-Maasland",
"Gaag-Schipluiden",
"Gaasterlân-Sleat",
"Gabbioneta-Binanuova",
"gabrielino-fernandeño",
"gâche-métier",
"Gadz'Arette",
"Gadz'Arettes",
"gadz'arts",
"Gadz'Arts",
"Gageac-et-Rouillac",
"Gagnac-sur-Cère",
"Gagnac-sur-Garonne",
"gagnante-gagnante",
"gagnante-gagnante-gagnante",
"gagnantes-gagnantes",
"gagnantes-gagnantes-gagnantes",
"gagnant-gagnant",
"gagnant-gagnant-gagnant",
"gagnants-gagnants",
"gagnants-gagnants-gagnants",
"Gagne-monopanglotte",
"gagne-pain",
"gagne-pains",
"gagne-petit",
"Gaillac-d'Aveyron",
"Gaillac-Toulza",
"Gaillan-en-Médoc",
"Gaillardbois-Cressenville",
"gaillet-gratteron",
"gaillets-gratterons",
"Gaillon-sur-Montcient",
"gaine-culotte",
"gaines-culottes",
"Gaja-et-Villedieu",
"Gaja-la-Selve",
"galaïco-portugais",
"galégo-portugais",
"galeries-refuges",
"galette-saucisse",
"galette-saucisses",
"Gallargues-le-Montueux",
"Gallin-Kuppentin",
"galvano-cautère",
"galvano-magnétique",
"galvano-magnétiques",
"galvano-magnétisme",
"galvano-magnétismes",
"Gamaches-en-Vexin",
"Gamarde-les-Bains",
"Gamiz-Fika",
"gamma-1,2,3,4,5,6-hexachlorocyclohexane",
"gamma-HCH",
"gamma-hexachlorobenzène",
"gamma-hexachlorocyclohexane",
"Gampel-Bratsch",
"Gancourt-Saint-Etienne",
"Gancourt-Saint-Étienne",
"Gannay-sur-Loire",
"Garancières-en-Beauce",
"Garancières-en-Drouais",
"Garcelles-Secqueville",
"garcette-goitre",
"Gardegan-et-Tourtirac",
"garden-parties",
"garden-party",
"garden-partys",
"Garennes-sur-Eure",
"Garges-lès-Gonesse",
"Gargilesse-Dampierre",
"Garlède-Mondebat",
"Garmisch-Partenkirchen",
"Garnat-sur-Engièvre",
"Garrigues-Sainte-Eulalie",
"Garzau-Garzin",
"gas-oil",
"gas-oils",
"Gaspé-Nordien",
"Gaspésie-Îles-de-la-Madeleine",
"Gastines-sur-Erve",
"Gasville-Oisème",
"gâte-bois",
"gâte-ménage",
"gâte-ménages",
"gâte-métier",
"gâte-métiers",
"gâte-papier",
"gâte-papiers",
"gâte-pâte",
"gâte-sauce",
"gâte-sauces",
"Gatteville-le-Phare",
"Gau-Algesheim",
"Gau-Bickelheim",
"Gau-Bischofsheim",
"gauche-fer",
"Gauchin-Légal",
"Gauchin-Verloingt",
"Gaudreville-la-Rivière",
"Gau-Heppenheim",
"Gau-Odernheim",
"Gaurain-Ramecroix",
"Gauville-la-Campagne",
"Gau-Weinheim",
"Gavarnie-Gèdre",
"Gavarret-sur-Aulouste",
"gay-friendly",
"gays-friendly",
"Gazax-et-Baccarisse",
"gaz-cab",
"gazelle-girafe",
"gaz-poivre",
"Gée-Rivière",
"Geest-Gérompont",
"Geest-Gérompont-Petit-Rosière",
"Géfosse-Fontenay",
"gélatino-bromure",
"gélatino-bromures",
"gel-douche",
"gel-douches",
"Geldrop-Mierlo",
"Gelvécourt-et-Adompt",
"Gemert-Bakel",
"Genac-Bignac",
"Génicourt-sous-Condé",
"Génicourt-sur-Meuse",
"génie-conseil",
"génies-conseils",
"génio-hyoïdien",
"génio-hyoïdienne",
"génio-hyoïdiennes",
"génio-hyoïdiens",
"génito-crural",
"génito-urinaire",
"génito-urinaires",
"Gennes-Ivergny",
"Gennes-sur-Glaize",
"Gennes-sur-Seiche",
"Gensac-de-Boulogne",
"Gensac-la-Pallue",
"Gensac-sur-Garonne",
"Gentioux-Pigerolles",
"gentleman-rider",
"gentlemen-riders",
"Georges-Fontaine",
"Gerbécourt-et-Haplemont",
"Gercourt-et-Drillancourt",
"Gère-Bélesten",
"gère-bélestinois",
"Gère-Bélestinois",
"gère-bélestinoise",
"Gère-Bélestinoise",
"gère-bélestinoises",
"Gère-Bélestinoises",
"germanate-analcime",
"germanate-analcimes",
"germano-américain",
"germano-américaine",
"germano-américaines",
"germano-américains",
"germano-anglais",
"germano-anglaises",
"germano-iranien",
"Germano-Iranien",
"germano-italo-japonais",
"Germigny-des-Prés",
"Germigny-lès-Machault",
"Germigny-lès-Machaut",
"Germigny-l'Evêque",
"Germigny-l'Évêque",
"Germigny-l'Exempt",
"Germigny-Pend-la-Pie",
"Germigny-sous-Coulombs",
"Germigny-sur-Loire",
"Germolles-sur-Grosne",
"Germond-Rouvre",
"germo-roburien",
"Germo-Roburien",
"germo-roburienne",
"Germo-Roburienne",
"germo-roburiennes",
"Germo-Roburiennes",
"germo-roburiens",
"Germo-Roburiens",
"Germs-sur-l'Oussouet",
"Gernika-Lumo",
"Gerville-la-Forêt",
"Gesnes-en-Argonne",
"Gesnes-le-Gandelin",
"gestalt-thérapie",
"gestalt-thérapies",
"Gesvres-le-Chapitre",
"gétah-lahoë",
"Géus-d'Arzacq",
"Geüs-d'Oloron",
"Gevigney-et-Mercey",
"Gevrey-Chambertin",
"Gez-ez-Angles",
"Gezier-et-Fontelenay",
"Gézier-et-Fontenelay",
"Giardini-Naxos",
"Giel-Courteilles",
"Gien-sur-Cure",
"Giessen-Nieuwkerk",
"Giessen-Oudekerk",
"Giey-sur-Aujon",
"Giffaumont-Champaubert",
"Gif-sur-Yvette",
"giga-ampère",
"giga-ampères",
"gigabit-ethernet",
"giga-électron-volt",
"gigaélectron-volt",
"giga-électron-volts",
"gigaélectron-volts",
"giga-ohm",
"giga-ohms",
"Gignac-la-Nerthe",
"Gigny-Bussy",
"Gigny-sur-Saône",
"Gigors-et-Lozeron",
"Gilhac-et-Bruzac",
"Gilhoc-sur-Ormèze",
"gill-box",
"Gilly-lès-Cîteaux",
"Gilly-sur-Isère",
"Gilly-sur-Loire",
"Gimel-les-Cascades",
"Ginsheim-Gustavsburg",
"Giou-de-Mamou",
"Gipf-Oberfrick",
"Gircourt-lès-Viéville",
"Girecourt-sur-Durbion",
"Girmont-Val-d'Ajol",
"Gironcourt-sur-Vraine",
"Gironde-sur-Dropt",
"Gironville-et-Neuville",
"Gironville-sous-les-Côtes",
"Gironville-sur-Essonne",
"Girovillers-sous-Montfort",
"Gisay-la-Coudre",
"Gissey-le-Vieil",
"Gissey-sous-Flavigny",
"Gissey-sur-Ouche",
"Gisy-les-Nobles",
"Gisy-sur-Oreuse",
"Givenchy-en-Gohelle",
"Givenchy-le-Noble",
"Givenchy-lès-la-Bassée",
"Givet-Notre-Dame",
"Givet-Saint-Hilaire",
"Givry-en-Argonne",
"Givry-lès-Loisy",
"Givry-sur-Aisne",
"Glabbeek-Zuurbemde",
"glabello-iniaque",
"Glaine-Montaigut",
"Glaire-et-Villette",
"Glaire-Latour",
"Glane-Beekhoek",
"Glan-Münchweiler",
"glass-cord",
"glauco-ferrugineuse",
"glauco-ferrugineuses",
"glauco-ferrugineux",
"Glaude-Arbourois",
"Gleiszellen-Gleishorbach",
"glisser-déposer",
"globe-trotter",
"globe-trotters",
"globe-trotteur",
"globe-trotteurs",
"globe-trotteuse",
"globe-trotteuses",
"Glos-la-Ferrière",
"glosso-épiglottique",
"glosso-épiglottiques",
"glosso-pharyngien",
"glosso-staphylin",
"glosso-staphylins",
"Glos-sur-Risle",
"gloubi-boulga",
"gluco-corticoïde",
"gluco-corticoïdes",
"glufosinate-ammonium",
"Glux-en-Glenne",
"glycéraldéhyde-3-phosphate",
"glycosyl-phosphatidylinositol",
"goal-average",
"goal-averages",
"goal-ball",
"gobe-dieu",
"gobe-goujons",
"gobe-mouche",
"gobe-moucherie",
"gobe-moucherons",
"gobe-mouches",
"gobe-mouton",
"gode-ceinture",
"gode-miché",
"gode-michés",
"godes-ceintures",
"Gœgnies-Chaussée",
"Goeree-Overflakkee",
"Gognies-Chaussée",
"Göhren-Döhlen",
"Göhren-Lebbin",
"Goldbach-Altenbach",
"goma-dare",
"Gometz-la-Ville",
"Gometz-le-Châtel",
"gomme-cogne",
"gomme-cognes",
"gomme-gutte",
"Gommenec'h",
"gomme-résine",
"gommo-résineux",
"Gomzé-Andoumont",
"Gondenans-les-Moulins",
"Gondenans-Montby",
"Gondenans-Moulins",
"Gond-Pontouvre",
"Gondrecourt-Aix",
"Gondrecourt-le-Château",
"Gonfreville-Caillot",
"Gonfreville-l'Orcher",
"Gonneville-en-Auge",
"Gonneville-la-Mallet",
"Gonneville-sur-Honfleur",
"Gonneville-sur-Mer",
"Gonneville-sur-Merville",
"Gonneville-sur-Scie",
"Gontaud-de-Nogaret",
"google-isa",
"google-isai",
"google-isaient",
"google-isais",
"google-isait",
"google-isâmes",
"google-isant",
"google-isas",
"google-isasse",
"google-isassent",
"google-isasses",
"google-isassiez",
"google-isassions",
"google-isât",
"google-isâtes",
"google-ise",
"google-isé",
"google-isée",
"google-isées",
"google-isent",
"google-iser",
"google-isera",
"google-iserai",
"google-iseraient",
"google-iserais",
"google-iserait",
"google-iseras",
"google-isèrent",
"google-iserez",
"google-iseriez",
"google-iserions",
"google-iserons",
"google-iseront",
"google-ises",
"google-isés",
"google-isez",
"google-isiez",
"google-isions",
"google-isons",
"Gorden-Staupitz",
"gorge-bleue",
"gorge-de-pigeon",
"gorge-fouille",
"Gorges-du-Tarn-Causses",
"Gornate-Olona",
"Gorom-Gorom",
"Gors-Opleeuw",
"go-slow",
"Gossersweiler-Stein",
"Gotein-Libarrenx",
"Gouaux-de-Larboust",
"Gouaux-de-Luchon",
"Goudelancourt-lès-Berrieux",
"Goudelancourt-lès-Pierrepont",
"Goulier-et-Olbier",
"Gourdan-Polignan",
"gourdan-polignanais",
"Gourdan-Polignanais",
"gourdan-polignanaise",
"Gourdan-Polignanaise",
"gourdan-polignanaises",
"Gourdan-Polignanaises",
"Gourdon-Murat",
"gouris-taitien",
"Gouris-Taitien",
"gouris-taitienne",
"Gouris-Taitienne",
"gouris-taitiennes",
"Gouris-Taitiennes",
"gouris-taitiens",
"Gouris-Taitiens",
"Gournay-en-Bray",
"Gournay-le-Guérin",
"Gournay-Loizé",
"Gournay-sur-Aronde",
"Gournay-sur-Marne",
"Gout-Rossignol",
"goutte-à-goutte",
"goutte-de-sang",
"goutte-de-suif",
"goutte-rose",
"gouttes-de-sang",
"Goux-lès-Dambelin",
"Goux-les-Usiers",
"Goux-sous-Landet",
"Gouy-en-Artois",
"Gouy-en-Ternois",
"Gouy-les-Groseillers",
"Gouy-lez-Piéton",
"Gouy-l'Hôpital",
"Gouy-Saint-André",
"Gouy-Servins",
"Gouy-sous-Bellonne",
"gouzi-gouzi",
"gouzis-gouzis",
"goyave-ananas",
"goyaves-ananas",
"Graal-Müritz",
"Graben-Neudorf",
"Grabow-Below",
"Grâce-Berleur",
"Grâce-Hollogne",
"Grâce-Uzel",
"gracieux-berluron",
"Gracieux-Berluron",
"Gracieux-Berluronne",
"grâcieux-hollognois",
"Grâcieux-Hollognois",
"Grâcieux-Hollognoise",
"Graffigny-Chemin",
"Graignes-Mesnil-Angot",
"Graincourt-lès-Havrincourt",
"grain-d'orge",
"Grainville-Langannerie",
"Grainville-la-Teinturière",
"Grainville-sur-Odon",
"Grainville-sur-Ry",
"Grainville-Ymauville",
"Grancey-le-Château-Neuvelle",
"Grancey-sur-Ource",
"Grandcamp-Maisy",
"Grandchamp-le-Château",
"Grandchamps-des-Fontaines",
"grand'chose",
"Grand'Combe-Châteleu",
"Grand'Combe-des-Bois",
"grand'faim",
"Grandfontaine-sur-Creuse",
"grand'garde",
"grand'gardes",
"grandgousier-pélican",
"grand'hamien",
"Grand'hamien",
"grand'hamienne",
"Grand'hamienne",
"grand'hamiennes",
"Grand'hamiennes",
"grand'hamiens",
"Grand'hamiens",
"grand'honte",
"grand'hontes",
"grand'landais",
"Grand'Landais",
"grand'landaise",
"Grand'Landaise",
"grand'landaises",
"Grand'Landaises",
"Grand'Landes",
"Grandlup-et-Fay",
"grand'maman",
"grand'mamans",
"grand'maternité",
"grand'maternités",
"grand'mère",
"grand'mères",
"Grand'Mérien",
"Grand'mérois",
"Grand'Mérois",
"Grand'Mèrois",
"grand'messe",
"grand'messes",
"grand'paternité",
"grand'paternités",
"Grandpuits-Bailly-Carrois",
"Grand'Rivière",
"Grandrupt-de-Bains",
"grand'tante",
"grand'tantes",
"Grandvelle-et-le-Perrenot",
"Grandvelle-et-Perrenot",
"Grandville-Gaudreville",
"Grandvillers-aux-Bois",
"Grange-de-Vaivre",
"Grange-le-Bocage",
"Granges-Aumontzey",
"Granges-d'Ans",
"Granges-de-Plombières",
"Granges-de-Vienney",
"Granges-la-Ville",
"Granges-le-Bourg",
"Granges-les-Beaumont",
"Granges-Maillot",
"Granges-Narboz",
"Granges-Paccot",
"Granges-Sainte-Marie",
"Granges-sur-Aube",
"Granges-sur-Baume",
"Granges-sur-Lot",
"Granges-sur-Vologne",
"grano-lamellaire",
"Granzay-Gript",
"grap-fruit",
"grap-fruits",
"grapho-moteur",
"grappe-fruit",
"gras-double",
"gras-doubles",
"gras-fondu",
"Grateloup-Saint-Gayrand",
"grattes-ciels",
"grave-cimens",
"grave-ciment",
"grave-ciments",
"Graveron-Sémerville",
"graves-ciment",
"Graves-Saint-Amant",
"gravi-kora",
"Grayan-et-l'Hôpital",
"Graye-et-Charnay",
"Graye-sur-Mer",
"Gray-la-Ville",
"gray-la-villois",
"Gray-la-Villois",
"gray-la-villoise",
"Gray-la-Villoise",
"gray-la-villoises",
"Gray-la-Villoises",
"Grébault-Mesnil",
"Grebs-Niendorf",
"Grèce-Centrale",
"Grèce-Occidentale",
"Gréez-sur-Roc",
"Grégy-sur-Yerre",
"Gremersdorf-Buchholz",
"Grenade-sur-Garonne",
"Grenade-sur-l'Adour",
"grenadiers-voltigeurs",
"grenadier-voltigeur",
"Grenand-lès-Sombernon",
"Grenant-lès-Sombernon",
"Greneville-en-Beauce",
"Grenier-Montgon",
"grenouilles-taureaux",
"grenouille-taureau",
"Grenville-sur-la-Rouge",
"Grenzach-Wyhlen",
"Gréoux-les-Bains",
"Grésigny-Sainte-Reine",
"Gresse-en-Vercors",
"Gressoney-La-Trinité",
"Gressoney-Saint-Jean",
"Grésy-sur-Aix",
"Grésy-sur-Isère",
"Gretz-Armainvilliers",
"Gréville-Hague",
"Grez-Doiceau",
"Grez-en-Bouère",
"Grézet-Cavagnan",
"Grézieu-la-Varenne",
"Grézieu-le-Marché",
"Grézieux-le-Fromental",
"Grez-Neuville",
"grez-neuvillois",
"Grez-Neuvillois",
"grez-neuvilloise",
"Grez-Neuvilloise",
"grez-neuvilloises",
"Grez-Neuvilloises",
"Grez-sur-Loing",
"griche-dents",
"Griesbach-au-Val",
"Griesbach-le-Bastberg",
"Griesheim-près-Molsheim",
"Griesheim-sur-Souffel",
"Griesheim-sur-Souffle",
"gri-gri",
"gri-gris",
"gril-au-vent",
"grille-midi",
"grille-pain",
"grille-pains",
"Grimaucourt-en-Woëvre",
"Grimaucourt-près-Sampigny",
"Grincourt-lès-Pas",
"Grindorff-Bizing",
"grippe-argent",
"grippe-chair",
"grippe-fromage",
"grippe-fromages",
"grippe-minaud",
"grippe-minauds",
"grippe-sou",
"grippe-sous",
"grise-bonne",
"grises-bonnes",
"gris-farinier",
"gris-fariniers",
"gris-gris",
"gris-pendart",
"gris-pendarts",
"Grisy-les-Plâtres",
"Grisy-Suisnes",
"Grisy-sur-Seine",
"Grivy-Loisy",
"Groot-Abeele",
"Groot-Ammers",
"Groot-Gelmen",
"Groot-Loo",
"Groot-Loon",
"Groot-Valkenisse",
"Groot-Wetsinge",
"Grosbois-en-Montagne",
"Grosbois-lès-Tichey",
"Groslée-Saint-Benoît",
"Grosley-sur-Risle",
"Groß-Bieberau",
"grosse-de-fonte",
"grosse-gorge",
"Grosse-Islois",
"Grosseto-Prugna",
"Gross-Gerau",
"Groß-Gerau",
"grosso-modo",
"Groß-Rohrheim",
"Großtreben-Zwethau",
"Groß-Umstadt",
"Groß-Zimmern",
"Grote-Brogel",
"Grote-Spouwen",
"Grouches-Luchuel",
"Gruchet-le-Valasse",
"Gruchet-Saint-Siméon",
"Gruey-lès-Surance",
"Grugé-l'Hôpital",
"Grun-Bordas",
"Grünhain-Beierfeld",
"Grunow-Dammendorf",
"g-strophanthine",
"guarasu'we",
"Gudmont-Villiers",
"Guéblange-lès-Dieuze",
"Guéblange-lès-Sarralbe",
"gué-d'allérien",
"Gué-d'Allérien",
"gué-d'allérienne",
"Gué-d'Allérienne",
"gué-d'allériennes",
"Gué-d'Allériennes",
"gué-d'allériens",
"Gué-d'Allériens",
"Gué-d'Hossus",
"Guémené-Penfao",
"Guémené-sur-Scorff",
"guerre-éclair",
"Guessling-Hémering",
"guet-apens",
"guet-à-pent",
"guet-appens",
"guets-apens",
"guette-chemin",
"gueule-bée",
"gueule-de-loup",
"gueules-de-loup",
"Gueutteville-les-Grès",
"Gueytes-et-Labastide",
"Gugney-aux-Aulx",
"guide-âne",
"guide-ânes",
"guide-fil",
"guide-fils",
"guide-main",
"guigne-cul",
"guigne-culs",
"Guigneville-sur-Essonne",
"Guignicourt-sur-Vence",
"Guiler-sur-Goyen",
"guilherandaise-grangeoise",
"Guilherandaise-Grangeoise",
"guilherandaises-grangeoises",
"Guilherandaises-Grangeoises",
"guilherandais-grangeois",
"Guilherandais-Grangeois",
"Guilherand-Granges",
"guili-guili",
"guili-guilis",
"guillemet-apostrophe",
"guillemets-apostrophes",
"Guilligomarc'h",
"Guillon-les-Bains",
"Guinarthe-Parenties",
"Guindrecourt-aux-Ormes",
"Guindrecourt-sur-Blaise",
"Guinée-Bissao",
"Guinée-Bissau",
"Guinée-Conakry",
"Guipry-Messac",
"Guiry-en-Vexin",
"Guitalens-L'Albarède",
"guitare-harpe",
"guitare-violoncelle",
"guitare-violoncelles",
"Guitera-les-Bains",
"guit-guit",
"Gujan-Mestras",
"gulf-stream",
"gulf-streams",
"Gülitz-Reetz",
"Gulpen-Wittem",
"Gülzow-Prüzen",
"Gumbrechtshoffen-Oberbronn",
"Günthersleben-Wechmar",
"Gurcy-le-Châtel",
"Gurgy-la-Ville",
"Gurgy-le-Château",
"gusathion-éthyl",
"gusathion-méthyl",
"Gusow-Platkow",
"Gutenzell-Hürbel",
"Gutierre-Muñoz",
"gut-komm",
"gutta-percha",
"Guttet-Feschel",
"gutturo-maxillaire",
"Guyans-Durnes",
"Guyans-Vennes",
"Guyencourt-Saulcourt",
"Guyencourt-sur-Noye",
"gwich'in",
"Gy-en-Sologne",
"Gyé-sur-Seine",
"Gy-les-Nonains",
"Gy-l'Evêque",
"Gy-l'Évêque",
"Ha'ava",
"Habay-la-Neuve",
"Habay-la-Vieille",
"Habère-Lullin",
"Habère-Poche",
"hache-bâché",
"hache-écorce",
"hache-écorces",
"hache-légume",
"hache-légumes",
"hache-paille",
"hache-pailles",
"Hadancourt-le-Haut-Clocher",
"Hadigny-les-Verrières",
"Hadonville-lès-Lachaussée",
"Häg-Ehrsberg",
"Hagenthal-le-Bas",
"Hagenthal-le-Haut",
"hagio-onomastique",
"hagio-onomastiques",
"Hagnéville-et-Roncourt",
"ha-ha",
"hâ-hâ",
"ha-has",
"hâ-hâs",
"Haine-Saint-Paul",
"Haine-Saint-Pierre",
"hakko-ryu",
"hale-à-bord",
"hale-avans",
"hale-avant",
"hale-avants",
"hale-bas",
"hale-breu",
"hale-croc",
"hale-dedans",
"hale-dehors",
"haleine-de-Jupiter",
"haleines-de-Jupiter",
"Halenbeck-Rohlsdorf",
"half-and-half",
"half-pipe",
"half-pipes",
"half-track",
"half-tracks",
"Halifaxois-du-Sud",
"Halle-Booienhoven",
"Halle-Heide",
"Halle-Nijman",
"Hallennes-lez-Haubourdin",
"Halles-sous-les-Côtes",
"Halling-lès-Boulay",
"Halling-lès-Boulay-Moselle",
"Halloy-lès-Pernois",
"halo-halo",
"halo-lunaire",
"halos-lunaires",
"haloxyfop-éthoxyéthyl",
"haloxyfop-R",
"halte-garderie",
"halte-garderies",
"halte-là",
"haltes-garderies",
"halvadji-bachi",
"Hamblain-les-Prés",
"Hamelin-Pyrmont",
"Ham-en-Artois",
"Hames-Boucres",
"hames-boucrois",
"Hames-Boucrois",
"hames-boucroise",
"Hames-Boucroise",
"hames-boucroises",
"Hames-Boucroises",
"Ham-les-Moines",
"Hamme-Mille",
"hamme-millois",
"Hamme-Millois",
"Hamme-Milloise",
"ham-nalinnois",
"Ham-Nalinnois",
"Ham-Nalinnoise",
"Ham-Nordois",
"Hamont-Achel",
"Ham-sans-Culottes",
"Ham-sous-Varsberg",
"Ham-sur-Heure",
"Ham-sur-Heure-Nalinnes",
"Ham-sur-Meuse",
"Ham-sur-Sambre",
"Han-devant-Pierrepont",
"handi-accessible",
"handi-accessibles",
"Hanerau-Hademarschen",
"Hangen-Weisheim",
"Hangest-en-Santerre",
"Hangest-sur-Somme",
"Han-lès-Juvigny",
"Hannogne-Saint-Martin",
"Hannogne-Saint-Rémy",
"Hannonville-sous-les-Côtes",
"Hannonville-Suzémont",
"Han-sur-Lesse",
"Han-sur-Meuse",
"Han-sur-Nied",
"Hantes-Wihéries",
"happe-chair",
"happe-chat",
"happe-foie",
"harai-goshi",
"haraï-goshi",
"hara-kiri",
"hara-kiris",
"hara-kiriser",
"hara-kiriser",
"Haraucourt-sur-Seille",
"hard-discount",
"hard-discountisa",
"hard-discountisai",
"hard-discountisaient",
"hard-discountisais",
"hard-discountisait",
"hard-discountisâmes",
"hard-discountisant",
"hard-discountisas",
"hard-discountisasse",
"hard-discountisassent",
"hard-discountisasses",
"hard-discountisassiez",
"hard-discountisassions",
"hard-discountisât",
"hard-discountisâtes",
"hard-discountise",
"hard-discountisé",
"hard-discountisée",
"hard-discountisées",
"hard-discountisent",
"hard-discountiser",
"hard-discountisera",
"hard-discountiserai",
"hard-discountiseraient",
"hard-discountiserais",
"hard-discountiserait",
"hard-discountiseras",
"hard-discountisèrent",
"hard-discountiserez",
"hard-discountiseriez",
"hard-discountiserions",
"hard-discountiserons",
"hard-discountiseront",
"hard-discountises",
"hard-discountisés",
"hard-discountisez",
"hard-discountisiez",
"hard-discountisions",
"hard-discountisons",
"hard-discounts",
"Hardecourt-aux-Bois",
"Hardencourt-Cocherel",
"Hardinxveld-Giessendam",
"hardi-petit",
"Hardivillers-en-Vexin",
"Hargarten-aux-Mines",
"Hargeville-sur-Chée",
"harpe-guitare",
"harpe-luth",
"Harréville-les-Chanteurs",
"Hartennes-et-Taux",
"Harth-Pöllnitz",
"Hartmannsdorf-Reichenau",
"has-been",
"has-beens",
"Hastière-Lavaux",
"Hastière-par-delà",
"Haucourt-en-Cambrésis",
"Haucourt-la-Rigole",
"Haucourt-Moulaine",
"Hauenstein-Ifenthal",
"Haumont-lès-Lachaussée",
"Haumont-près-Samogneux",
"Hauptwil-Gottshaus",
"hausse-col",
"hausse-cols",
"hausse-pied",
"hausse-pieds",
"hausse-queue",
"Hautecourt-lès-Broville",
"Hautecourt-Romanèche",
"Hautefage-la-Tour",
"Hautem-Sainte-Marguerite",
"Hautem-Saint-Liévin",
"Hautepierre-le-Châtelet",
"Hauterive-la-Fresse",
"Hauteville-Gondon",
"Hauteville-la-Guichard",
"Hauteville-lès-Dijon",
"Hauteville-Lompnes",
"Hauteville-Lompnés",
"Hauteville-sur-Fier",
"Hauteville-sur-Mer",
"Hauthem-Saint-Liévin",
"Hautot-l'Auvray",
"Hautot-le-Vatois",
"Hautot-Saint-Sulpice",
"Hautot-sur-Mer",
"Hautot-sur-Seine",
"Hautteville-Bocage",
"Hautvillers-Ouville",
"Havre-Saint-Pierrois",
"haye-le-comtois",
"Haye-le-Comtois",
"haye-le-comtoise",
"Haye-le-Comtoise",
"haye-le-comtoises",
"Haye-le-Comtoises",
"Haÿ-les-Roses",
"Hazerswoude-Dorp",
"Hazerswoude-Rijndijk",
"Hechtel-Eksel",
"Heckelberg-Brunow",
"hecto-ohm",
"hecto-ohms",
"Hédé-Bazouges",
"Heeswijk-Dinther",
"Heeze-Leende",
"Heiltz-le-Hutier",
"Heiltz-le-Maurupt",
"Heiltz-l'Evêque",
"Heiltz-l'Évêque",
"Heining-lès-Bouzonville",
"Heist-op-den-Berg",
"Heist-sur-la-Montagne",
"Helen-Bos",
"Hellemmes-Lille",
"Hellenhahn-Schellenberg",
"Hellering-lès-Fénétrange",
"Hellschen-Heringsand-Unterschaar",
"Helmstadt-Bargen",
"Hem-Hardinval",
"hémi-dodécaèdre",
"hémi-épiphyte",
"hémi-épiphytes",
"hémi-octaèdre",
"Hem-Lenglet",
"Hem-Monacu",
"Hendecourt-lès-Cagnicourt",
"Hendecourt-lès-Ransart",
"Hendrik-Ido-Ambacht",
"Hénin-Beaumont",
"Hénin-sur-Cojeul",
"Henri-Chapelle",
"Henstedt-Ulzburg",
"hentai-gana",
"hépato-biliaire",
"hépato-cystique",
"hépato-cystiques",
"hépato-gastrique",
"hépato-gastrite",
"hépato-gastrites",
"herbe-à-cochon",
"herbe-au-bitume",
"herbe-aux-femmes-battues",
"herbe-aux-plaies",
"herbes-à-cochon",
"herbes-au-bitume",
"herbes-aux-femmes-battues",
"herbes-aux-plaies",
"herbes-aux-taupes",
"Herck-la-Ville",
"Herck-Saint-Lambert",
"herd-book",
"Herdwangen-Schönach",
"Héricourt-en-Caux",
"Héricourt-Saint-Samson",
"Héricourt-sur-Thérain",
"Heringen-sur-Helme",
"Hérinnes-lez-Enghien",
"Herlin-le-Sec",
"Hermalle-sous-Argenteau",
"Hermalle-sous-Huy",
"Hermanville-sur-Mer",
"Hermeton-sur-Meuse",
"Herméville-en-Woëvre",
"Hermitage-Lorge",
"Hermival-les-Vaux",
"Hernán-Pérez",
"héroï-comique",
"héroï-comiques",
"Hérouville-en-Vexin",
"Hérouville-Saint-Clair",
"Herpy-l'Arlésienne",
"Herren-Sulzbach",
"Herrlisheim-près-Colmar",
"Herschweiler-Pettersheim",
"Hersfeld-Rotenburg",
"Hersin-Coupigny",
"Héry-sur-Alby",
"Herzebrock-Clarholz",
"Hesdigneul-lès-Béthune",
"Hesdigneul-lès-Boulogne",
"Hesdin-l'Abbé",
"hétéro-céphalophorie",
"hétéro-céphalophories",
"hétéro-épitaxie",
"hétéro-évaluation",
"hétéro-évaluations",
"hétéro-réparation",
"hétéro-réparations",
"Hettange-Grande",
"Heubécourt-Haricourt",
"Heuchelheim-Klingen",
"Heucourt-Croquoison",
"Heudicourt-sous-les-Côtes",
"Heudreville-en-Lieuvin",
"Heudreville-sur-Eure",
"Heugleville-sur-Scie",
"Heugueville-sur-Sienne",
"Heuilley-Cotton",
"Heuilley-le-Grand",
"Heuilley-sur-Saône",
"Heume-l'Eglise",
"Heume-l'Église",
"heure-homme",
"Heure-le-Romain",
"Heure-le-Tixhe",
"heure-lumière",
"heures-hommes",
"heures-lumière",
"heurte-pot",
"Heusden-Zolder",
"hexa-core",
"hexa-cores",
"hexa-rotor",
"hexa-rotors",
"Hières-sur-Amby",
"Hiers-Brouage",
"hi-fi",
"high-life",
"high-tech",
"Higuères-Souye",
"hi-han",
"Hilgertshausen-Tandern",
"himène-plume",
"Hinzert-Pölert",
"hip-hop",
"hip-hopisa",
"hip-hopisai",
"hip-hopisaient",
"hip-hopisais",
"hip-hopisait",
"hip-hopisâmes",
"hip-hopisant",
"hip-hopisas",
"hip-hopisasse",
"hip-hopisassent",
"hip-hopisasses",
"hip-hopisassiez",
"hip-hopisassions",
"hip-hopisât",
"hip-hopisâtes",
"hip-hopise",
"hip-hopisé",
"hip-hopisée",
"hip-hopisées",
"hip-hopisent",
"hip-hopiser",
"hip-hopisera",
"hip-hopiserai",
"hip-hopiseraient",
"hip-hopiserais",
"hip-hopiserait",
"hip-hopiseras",
"hip-hopisèrent",
"hip-hopiserez",
"hip-hopiseriez",
"hip-hopiserions",
"hip-hopiserons",
"hip-hopiseront",
"hip-hopises",
"hip-hopisés",
"hip-hopisez",
"hip-hopisiez",
"hip-hopisions",
"hip-hopisons",
"hippocampe-feuillu",
"hippocampes-feuillus",
"Hirz-Maulsbach",
"hispano-américain",
"hispano-américaine",
"hispano-américaines",
"hispano-américains",
"hispano-arabe",
"hispano-arabes",
"hispano-mauresque",
"hispano-moresque",
"hispano-moresques",
"histoire-géo",
"historico-culturelle",
"hitléro-trotskisme",
"hitléro-trotskiste",
"hit-parade",
"hit-parades",
"Hiva-Oa",
"hoat-chi",
"Hochdorf-Assenheim",
"hoche-cul",
"hoche-culs",
"hoche-queue",
"Hô-Chi-Minh-Ville",
"Hochstetten-Dhaun",
"Hodenc-en-Bray",
"Hodenc-l'Evêque",
"Hodenc-l'Évêque",
"Hodeng-au-Bosc",
"Hodeng-Hodenger",
"Hofstetten-Flüh",
"Hohenberg-Krusemark",
"Hohenfels-Essingen",
"Höhenkirchen-Siegertsbrunn",
"Hohenstein-Ernstthal",
"Hohen-Sülzen",
"Höhr-Grenzhausen",
"hokkaïdo-ken",
"hold-up",
"Hollande-du-Nord",
"Hollande-du-Sud",
"Hollande-Méridionale",
"Hollande-Septentrionale",
"Hollern-Twielenfleth",
"Hollogne-aux-Pierres",
"Hollogne-sur-Geer",
"Holstein-de-l'Est",
"Hombourg-Budange",
"Hombourg-Haut",
"Hôme-Chamondot",
"home-jacking",
"home-jackings",
"home-sitter",
"home-sitters",
"home-sitting",
"home-sittings",
"home-trainer",
"home-trainers",
"homme-animal",
"homme-chacal",
"homme-clé",
"homme-femme",
"homme-fourmi",
"homme-grenouille",
"homme-léopard",
"homme-loup",
"homme-mort",
"homme-morts",
"homme-objet",
"homme-orchestre",
"homme-robot",
"homme-sandwich",
"hommes-chacals",
"hommes-clés",
"hommes-femmes",
"hommes-fourmis",
"hommes-grenouilles",
"hommes-léopards",
"hommes-loups",
"hommes-objets",
"hommes-orchestres",
"hommes-robots",
"hommes-sandwiches",
"hommes-sandwichs",
"hommes-troncs",
"homme-tronc",
"homo-épitaxie",
"homo-épitaxies",
"honey-dew",
"Hong-Kong",
"hong-kongais",
"Hong-kongais",
"hong-kongaise",
"Hong-kongaise",
"hong-kongaises",
"Hong-kongaises",
"Honguemare-Guenouville",
"hon-hergeois",
"Hon-Hergeois",
"hon-hergeoise",
"Hon-Hergeoise",
"hon-hergeoises",
"Hon-Hergeoises",
"Hon-Hergies",
"Honnecourt-sur-Escaut",
"Honnécourt-sur-l'Escaut",
"Honor-de-Cos",
"Hoog-Baarlo",
"Hoog-Caestert",
"Hoogezand-Sappemeer",
"Hoog-Geldrop",
"Hoog-Keppel",
"Hoorebeke-Saint-Corneille",
"Hoorebeke-Sainte-Marie",
"Hôpital-Camfrout",
"Hôpital-d'Orion",
"Hôpital-du-Grosbois",
"Hôpital-le-Grand",
"Hôpital-le-Mercier",
"Hôpital-Saint-Blaise",
"Hôpital-Saint-Lieffroy",
"Hôpital-sous-Rochefort",
"Hoppstädten-Weiersbach",
"Horbourg-Wihr",
"Horion-Hozémont",
"Horndon-on-the-Hill",
"Hornow-Wadelsdorf",
"Hornoy-le-Bourg",
"horo-kilométrique",
"horo-kilométriques",
"Horrenbach-Buchen",
"hors-bord",
"hors-bords",
"hors-champ",
"hors-concours",
"hors-d'oeuvre",
"hors-d'œuvre",
"horse-ball",
"horse-guard",
"horse-guards",
"Hörselberg-Hainich",
"hors-fonds",
"hors-jeu",
"hors-jeux",
"hors-la-loi",
"hors-ligne",
"hors-lignes",
"hors-norme",
"hors-piste",
"hors-pistes",
"hors-sac",
"hors-série",
"hors-séries",
"hors-service",
"hors-sol",
"hors-sols",
"hors-sujet",
"hors-temps",
"hors-texte",
"hors-textes",
"Horville-en-Ornois",
"Hospitalet-du-Larzac",
"Hospitalet-près-l'Andorre",
"Hoste-Haut",
"hostello-flavien",
"Hostello-Flavien",
"hostello-flavienne",
"Hostello-Flavienne",
"hostello-flaviennes",
"Hostello-Flaviennes",
"hostello-flaviens",
"Hostello-Flaviens",
"hot-dog",
"hot-dogs",
"Hôtel-de-Ville",
"hôtel-Dieu",
"Hôtel-Dieu",
"Hôtellerie-de-Flée",
"hôtellerie-restauration",
"hôtels-Dieu",
"hot-melt",
"hot-melts",
"Hotot-en-Auge",
"hot-plug",
"Hottot-les-Bagues",
"Houdain-lez-Bavay",
"Houdelaucourt-sur-Othain",
"Houdeng-Aimeries",
"Houdeng-Goegnies",
"Houdeng-Gœgnies",
"Houlbec-Cocherel",
"Houlbec-près-le-Gros-Theil",
"houl'eau",
"Houphouët-Boigny",
"Houplin-Ancoisne",
"house-boats",
"Houtain-le-Val",
"Houtain-l'Évêque",
"Houtain-Saint-Siméon",
"Hout-Blerick",
"Houthalen-Helchteren",
"Houville-en-Vexin",
"Houville-la-Branche",
"Houvin-Houvigneul",
"houx-frelon",
"houx-frelons",
"Hoya-Gonzalo",
"Huanne-Montmartin",
"Hubert-Folie",
"Huby-Saint-Leu",
"Huércal-Overa",
"Huétor-Tájar",
"Hugleville-en-Caux",
"Huilly-sur-Seille",
"huis-clos",
"Huisnes-sur-Mer",
"Huison-Longueville",
"Huisseau-en-Beauce",
"Huisseau-sur-Cosson",
"Huisseau-sur-Mauves",
"huitante-neuf",
"huitante-neuvième",
"huitante-neuvièmes",
"huit-marsiste",
"huit-marsistes",
"huit-pieds",
"huit-reflets",
"huit-ressorts",
"Humes-Jorquenay",
"hume-vent",
"huppe-col",
"Hures-la-Parade",
"Hurons-Wendat",
"huron-wendat",
"Husseren-les-Châteaux",
"Husseren-Wesserling",
"Hussigny-Godbrange",
"hydrargyro-cyanate",
"hydrargyro-cyanates",
"hydraulico-pneumatique",
"hydro-aviation",
"hydro-aviations",
"hydro-avion",
"hydro-avions",
"hydro-électricité",
"hydro-électricités",
"hydro-électrique",
"hydro-électriques",
"hydro-ensemencement",
"hydro-ensemencements",
"hydro-météorologie",
"Hyencourt-le-Grand",
"Hyencourt-le-Petit",
"hyène-garou",
"hyènes-garous",
"Hyèvre-Magny",
"Hyèvre-Paroisse",
"hyo-épiglottique",
"hyo-épiglottiques",
"hyo-pharyngien",
"hypo-centre",
"hypo-centres",
"hypo-iodeuse",
"hypo-iodeuses",
"hypo-iodeux",
"hypothético-déductif",
"hystéro-catalepsie",
"hystéro-catalepsies",
"hystéro-épilepsie",
"hystéro-épilepsies",
"Iamalo-Nénètsie",
"iatro-magique",
"iatro-magiques",
"ibéro-roman",
"i-butane",
"i-butanes",
"ice-belt",
"ice-belts",
"ice-berg",
"ice-bergs",
"ice-blink",
"ice-blinks",
"ice-bloc",
"ice-blocs",
"ice-cream",
"ice-creams",
"ice-foot",
"ice-foots",
"ice-rapt",
"ice-rapts",
"ice-table",
"ice-tables",
"ici-bas",
"Idanha-a-Nova",
"Idar-Oberstein",
"Idaux-Mendy",
"idéal-type",
"idée-force",
"idée-maîtresse",
"idées-forces",
"idées-maîtresses",
"idio-électricité",
"idio-électrique",
"idio-électriques",
"Idrac-Respaillès",
"Ids-Saint-Roch",
"i.-e.",
"ifira-mele",
"ifira-meles",
"I-frame",
"Igny-Comblizy",
"igny-marin",
"Igny-Marin",
"igny-marine",
"Igny-Marine",
"igny-marines",
"Igny-Marines",
"igny-marins",
"Igny-Marins",
"III-V",
"II-VI",
"Île-aux-Moines",
"Île-Bouchard",
"Île-d'Aix",
"Île-d'Anticosti",
"Île-d'Arz",
"Île-de-Batz",
"Île-de-Bréhat",
"Ile-de-France",
"île-de-France",
"Île-de-France",
"Île-d'Elle",
"Île-de-Sein",
"Île-d'Houat",
"Île-d'Olonne",
"Île-du-Prince-Édouard",
"Île-d'Yeu",
"île-État",
"Île-Molène",
"iléo-cæcal",
"iléo-cæcale",
"iléo-cæcales",
"iléo-cæcaux",
"iléo-colique",
"iléo-coliques",
"iléos-meldois",
"Iléos-Meldois",
"iléos-meldoise",
"Iléos-Meldoise",
"iléos-meldoises",
"Iléos-Meldoises",
"île-prison",
"Île-Rousse",
"Île-Saint-Denis",
"Îles-de-la-Madeleine",
"îles-États",
"îles-prisons",
"île-tudiste",
"Île-Tudiste",
"île-tudistes",
"Île-Tudistes",
"Île-Tudy",
"iliaco-fémoral",
"iliaco-musculaire",
"ilio-pectiné",
"ilio-pubien",
"ilio-scrotal",
"Ille-et-Vilaine",
"Ille-sur-Têt",
"Illeville-sur-Montfort",
"Illier-et-Laramade",
"Illiers-Combray",
"Illiers-l'Evêque",
"Illiers-l'Évêque",
"Illkirch-Graffenstaden",
"Illnau-Effretikon",
"ilo-dionysien",
"Ilo-Dionysien",
"îlo-dionysien",
"Îlo-Dionysien",
"ilo-dionysienne",
"Ilo-Dionysienne",
"Îlo-Dionysienne",
"ilo-dionysiennes",
"Ilo-Dionysiennes",
"ilo-dionysiens",
"Ilo-Dionysiens",
"image-gradient",
"imazaméthabenz-méthyl",
"immuno-pharmacologie",
"immuno-pharmacologies",
"impari-nervé",
"impari-nervié",
"impari-penné",
"impératrice-mère",
"impératrices-mères",
"import-export",
"in-12",
"in-12º",
"in-16",
"in-16º",
"in-18",
"in-18º",
"in-32",
"in-4",
"in-4º",
"in-4.º",
"in-4to",
"in-6",
"in-6º",
"in-8",
"in-8º",
"in-8.º",
"in-8vo",
"in-cent-vingt-huit",
"inch'allah",
"inch'Allah",
"Inch'allah",
"Inch'Allah",
"Inchy-en-Artois",
"incito-moteur",
"incito-motricité",
"income-tax",
"indane-1,3-dione",
"inde-plate",
"india-océanisme",
"india-océanismes",
"in-dix-huit",
"in-douze",
"Indre-et-Loire",
"in-duodecimo",
"in-fº",
"info-ballon",
"info-ballons",
"info-bulle",
"info-bulles",
"in-folio",
"ingénieur-conseil",
"ingénieur-docteur",
"ingénieure-conseil",
"ingénieures-conseils",
"ingénieur-maître",
"ingénieurs-conseils",
"ingénieurs-docteurs",
"ingénieurs-maîtres",
"Ingrandes-de-Touraine",
"in-huit",
"injonction-bâillon",
"Injoux-Génissiat",
"in-manus",
"in-octavo",
"in-plano",
"in-plº",
"in-promptu",
"in-quarto",
"insecto-mortifère",
"insecto-mortifères",
"in-sedecimo",
"in-seize",
"in-six",
"inspecteur-chef",
"inspecteurs-chefs",
"insulino-dépendant",
"insulino-dépendante",
"insulino-dépendantes",
"insulino-dépendants",
"Interlaken-Oberhasli",
"interno-médial",
"interro-négatif",
"intervertébro-costal",
"in-trente-deux",
"Intville-la-Guétard",
"inuit-aléoute",
"inuit-aléoutes",
"Inval-Boiron",
"in-vingt-quatre",
"in-vitro",
"Inzinzac-Lochrist",
"iodo-borique",
"iodo-chlorure",
"iodosulfuron-méthyl-sodium",
"iowa-oto",
"iowa-otos",
"Î.-P.-É.",
"Iré-le-Sec",
"Iruraiz-Gauna",
"ischio-anal",
"ischio-clitorien",
"ischio-fémoral",
"ischio-fémorale",
"ischio-fémorales",
"ischio-fémoraux",
"ischio-jambier",
"ischio-jambière",
"ischio-jambières",
"ischio-jambiers",
"ischio-périnéal",
"ischio-tibial",
"ischio-tibiaux",
"Is-en-Bassigny",
"Isigny-le-Buat",
"Isigny-sur-Mer",
"IS-IS",
"Isle-Adam",
"Isle-Arné",
"Isle-Aubigny",
"Isle-Aumont",
"Isle-Bouzon",
"Isle-d'Abeau",
"Isle-de-Noé",
"Isle-d'Espagnac",
"Isle-en-Dodon",
"Isle-et-Bardais",
"Isle-Jourdain",
"Isle-Saint-Georges",
"Isles-les-Meldeuses",
"Isles-lès-Villenoy",
"Isle-sous-Montréal",
"Isles-sur-Suippe",
"Isle-sur-la-Sorgue",
"Isle-sur-le-Doubs",
"Isle-sur-Marne",
"Isle-sur-Serein",
"Isle-Vertois",
"Isolaccio-di-Fiumorbo",
"isoxadifen-éthyl",
"israélo-syrienne",
"Issancourt-et-Rumel",
"Issoudun-Létrieix",
"Is-sur-Tille",
"Issy-les-Moulineaux",
"Issy-l'Evêque",
"Issy-l'Évêque",
"istro-roumain",
"Ithorots-Olhaïby",
"Ivano-Fracena",
"Ivoy-le-Petit",
"Ivoy-le-Pré",
"Ivoz-Ramet",
"ivre-mort",
"ivre-morte",
"ivres-mortes",
"ivres-morts",
"Ivry-en-Montagne",
"Ivry-la-Bataille",
"Ivry-le-Temple",
"Ivry-sur-Seine",
"Izaut-de-l'Hôtel",
"Izel-lès-Equerchin",
"Izel-lès-Équerchin",
"Izel-lès-Hameau",
"Izel-les-Hameaux",
"Izon-la-Bruisse",
"Jabreilles-les-Bordes",
"jack-russell",
"Jacob-Bellecombette",
"Jagny-sous-Bois",
"jaguar-garou",
"jaguars-garous",
"jaï-alaï",
"jaï-alaïs",
"Jailly-les-Moulins",
"Jaligny-sur-Besbre",
"jambon-beurre",
"jambon-des-jardiniers",
"jambons-des-jardiniers",
"Jammu-et-Cachemire",
"jam-sessions",
"Jandrain-Jandrenouille",
"Janville-sur-Juine",
"Jard-sur-Mer",
"Jarnac-Champagne",
"Jarville-la-Malgrange",
"Jarzé-Villages",
"Jassans-Riottier",
"Jau-Dignac-et-Loirac",
"Jaunay-Clan",
"jaunay-clanais",
"Jaunay-Clanais",
"jaunay-clanaise",
"Jaunay-Clanaise",
"jaunay-clanaises",
"Jaunay-Clanaises",
"Jaunay-Marigny",
"Javerlhac-et-la-Chapelle-Saint-Robert",
"Javron-les-Chapelles",
"JAX-RPC",
"JAX-RS",
"Jeannois-Mitissien",
"jeans-de-gand",
"jeans-de-janten",
"je-m'en-fichisme",
"je-m'en-fichismes",
"je-m'en-fichiste",
"je-m'en-fichistes",
"je-m'en-foutisme",
"je-m'en-foutismes",
"je-m'en-foutiste",
"je-m'en-foutistes",
"Jemeppe-sur-Sambre",
"je-ne-sais-quoi",
"jérôme-boschisme",
"jérôme-boschismes",
"Jésus-Christ",
"jet-set",
"jet-sets",
"jet-settisa",
"jet-settisai",
"jet-settisaient",
"jet-settisais",
"jet-settisait",
"jet-settisâmes",
"jet-settisant",
"jet-settisas",
"jet-settisasse",
"jet-settisassent",
"jet-settisasses",
"jet-settisassiez",
"jet-settisassions",
"jet-settisât",
"jet-settisâtes",
"jet-settise",
"jet-settisé",
"jet-settisée",
"jet-settisées",
"jet-settisent",
"jet-settiser",
"jet-settisera",
"jet-settiserai",
"jet-settiseraient",
"jet-settiserais",
"jet-settiserait",
"jet-settiseras",
"jet-settisèrent",
"jet-settiserez",
"jet-settiseriez",
"jet-settiserions",
"jet-settiserons",
"jet-settiseront",
"jet-settises",
"jet-settisés",
"jet-settisez",
"jet-settisiez",
"jet-settisions",
"jet-settisons",
"jet-stream",
"jet-streams",
"jette-bouts",
"Jettingen-Scheppach",
"Jeu-les-Bois",
"Jeu-Maloches",
"jeu-malochois",
"Jeu-Malochois",
"jeu-malochoise",
"Jeu-Malochoise",
"jeu-malochoises",
"Jeu-Malochoises",
"jeu-parti",
"Jeux-lès-Bard",
"Ji-hu",
"Ji-hun",
"jiu-jitsu",
"Jodoigne-Souveraine",
"John-Bull",
"Joigny-sur-Meuse",
"joint-venture",
"joint-ventures",
"Joinville-le-Pont",
"joli-bois",
"Jollain-Merlin",
"Jonchery-sur-Suippe",
"Jonchery-sur-Vesle",
"Jonquerets-de-Livet",
"Jonquières-Saint-Vincent",
"Jonville-en-Woëvre",
"Jonzier-Epagny",
"Jonzier-Épagny",
"Jorat-Menthue",
"Jouars-Pontchartrain",
"Joué-du-Bois",
"Joué-du-Plain",
"Joué-en-Charnie",
"Joué-Étiau",
"Joué-l'Abbé",
"Joué-lès-Tours",
"Joué-sur-Erdre",
"Jouet-sur-l'Aubois",
"jour-homme",
"jour-lumière",
"Jours-en-Vaux",
"jours-hommes",
"Jours-lès-Baigneux",
"jours-lumière",
"Jou-sous-Monjou",
"Joux-la-Ville",
"Jouxtens-Mézery",
"Jouy-aux-Arches",
"Jouy-en-Argonne",
"Jouy-en-Josas",
"Jouy-en-Pithiverais",
"Jouy-le-Châtel",
"Jouy-le-Moutier",
"Jouy-le-Potier",
"Jouy-lès-Reims",
"Jouy-Mauvoisin",
"Jouy-sous-Thelle",
"Jouy-sur-Eure",
"Jouy-sur-Morin",
"J-pop",
"J-rock",
"j't'aime",
"Juan-les-Pins",
"Juaye-Mondaye",
"Jubbega-Schurega",
"Jû-Belloc",
"judéo-allemand",
"judéo-alsacien",
"judéo-arabe",
"judéo-arabes",
"judéo-asiatique",
"judéo-bolchévisme",
"judéo-centrisme",
"judéo-chrétien",
"judéo-chrétienne",
"judéo-chrétiennes",
"judéo-chrétiens",
"judéo-christianisme",
"judéo-christiano-islamique",
"judéo-christiano-islamiques",
"judéo-christiano-musulman",
"judéo-espagnol",
"judéo-espagnole",
"judéo-espagnoles",
"judéo-espagnols",
"judéo-iranien",
"judéo-libyen",
"judéo-lybien",
"judéo-maçonnique",
"judéo-maçonniques",
"judéo-musulman",
"judéo-musulmans",
"judéo-nazi",
"judéo-nazis",
"Jugeals-Nazareth",
"Jugon-les-Lacs",
"juǀ'hoan",
"Juif-Errant",
"Juifs-Errants",
"Juigné-Béné",
"Juigné-des-Moutiers",
"Juigné-sur-Loire",
"Juigné-sur-Sarthe",
"Juillac-le-Coq",
"ju-jitsu",
"ju-ju",
"juke-box",
"juke-boxes",
"Jully-lès-Buxy",
"jully-sarçois",
"Jully-Sarçois",
"jully-sarçoise",
"Jully-Sarçoise",
"jully-sarçoises",
"Jully-Sarçoises",
"Jully-sur-Sarce",
"Jumilhac-le-Grand",
"junk-food",
"junk-foods",
"jupe-culotte",
"jupes-culottes",
"Jupille-sur-Meuse",
"juridico-politique",
"juridico-politiques",
"jusque-là",
"Jussecourt-Minecourt",
"Jussy-Champagne",
"Jussy-le-Chaudrier",
"juste-à-temps",
"juste-au-corps",
"Justine-Herbigny",
"Juvigny-en-Perthois",
"Juvigny-les-Vallées",
"Juvigny-le-Tertre",
"Juvigny-sous-Andaine",
"Juvigny-sur-Loison",
"Juvigny-sur-Orne",
"Juvigny-sur-Seulles",
"Juvigny-Val-d'Andaine",
"Juvincourt-et-Damary",
"Juvisy-sur-Orge",
"juxta-position",
"juxta-positions",
"Juzet-de-Luchon",
"Juzet-d'Izaut",
"Kaala-Gomen",
"Kabardino-Balkarie",
"Kaiser-Wilhelm-Koog",
"Kalenborn-Scheuern",
"kali'na",
"Kamerik-Houtdijken",
"Kamerik-Mijzijde",
"Kamp-Bornhofen",
"Kamperzeedijk-Oost",
"Kamperzeedijk-West",
"Kamp-Lintfort",
"Kani-Kéli",
"kan-kan",
"kan-kans",
"kansai-ben",
"Kapel-Avezaath",
"Kapellen-Drusweiler",
"Kapelle-op-den-Bos",
"Kappel-Grafenhausen",
"karachay-balkar",
"karafuto-ken",
"kara-gueuz",
"kara-kalpak",
"Kara-Koum",
"Karangasso-Sambla",
"Karangasso-Vigué",
"karatchaï-balkar",
"Karatchaïévo-Tcherkassie",
"Karbow-Vietlübbe",
"Karlsdorf-Neuthard",
"Karlstadt-sur-le-Main",
"Kasbach-Ohlenberg",
"Kasel-Golzig",
"Kastel-Staadt",
"Katlenburg-Lindau",
"Kaysersberg-Vignoble",
"K-bis",
"Kédange-sur-Canner",
"Kelpen-Oler",
"kem's",
"Kenz-Küstrow",
"kérato-pharyngien",
"kérato-staphylin",
"kérato-staphylins",
"Kerckom-lez-Saint-Trond",
"Kergrist-Moëlou",
"Kerk-Avezaath",
"Kerkom-bij-Sint-Truiden",
"Kerling-lès-Sierck",
"Kermaria-Sulard",
"Kermoroc'h",
"Kerprich-aux-Bois",
"Kerprich-lès-Dieuze",
"Kerry-Blue-terrier",
"Kersaint-Plabennec",
"Kersbeek-Miskom",
"Kessel-Eik",
"Kessel-Lo",
"khambo-lama",
"khambo-lamas",
"khatti-chérif",
"khatti-chérifs",
"khi-carré",
"khi-carrés",
"khi-deux",
"Kiel-Windeweer",
"kif-kif",
"kilo-électrons-volts",
"kiloélectrons-volts",
"kilo-électron-volt",
"kiloélectron-volt",
"kilo-électron-volts",
"kiloélectron-volts",
"kilogramme-force",
"kilogramme-poids",
"kilogrammes-force",
"kilogrammes-poids",
"kilomètre-heure",
"kilomètres-heure",
"kilo-ohm",
"kilo-ohms",
"kin-ball",
"Kingston-sur-Tamise",
"Kingston-upon-Hull",
"Kingston-upon-Thames",
"kino-congolais",
"Kino-Congolais",
"kip-kap",
"kip-kaps",
"Kirkby-in-Ashfield",
"Kirrwiller-Bosselshausen",
"Kirsch-lès-Sierck",
"kirsch-wasser",
"kirsch-wassers",
"kiss-in",
"kite-surf",
"kite-surfa",
"kite-surfai",
"kite-surfaient",
"kite-surfais",
"kite-surfait",
"kite-surfâmes",
"kite-surfant",
"kite-surfas",
"kite-surfasse",
"kite-surfassent",
"kite-surfasses",
"kite-surfassiez",
"kite-surfassions",
"kite-surfât",
"kite-surfâtes",
"kite-surfe",
"kite-surfé",
"kite-surfent",
"kite-surfer",
"kite-surfera",
"kite-surferai",
"kite-surferaient",
"kite-surferais",
"kite-surferait",
"kite-surferas",
"kite-surfèrent",
"kite-surferez",
"kite-surferiez",
"kite-surferions",
"kite-surferons",
"kite-surferont",
"kite-surfers",
"kite-surfes",
"kite-surfez",
"kite-surfiez",
"kite-surfions",
"kite-surfons",
"Kizil-Arvat",
"Klazienaveen-Noord",
"Klein-Amsterdam",
"Klein-Bedaf",
"Klein-Brabant",
"Klein-Delfgauw",
"Klein-Doenrade",
"Klein-Dongen",
"Kleine-Brogel",
"Kleine-Spouwen",
"Klein-Overleek",
"Klein-Ulsda",
"Klein-Valkenisse",
"Klein-Wetsinge",
"Klein-Winternheim",
"Klein-Zundert",
"Kleßen-Görne",
"Klooster-Lidlum",
"Klosters-Serneus",
"knicker-bocker",
"knicker-bockers",
"knock-out",
"knock-outa",
"knock-outai",
"knock-outaient",
"knock-outais",
"knock-outait",
"knock-outâmes",
"knock-outant",
"knock-outas",
"knock-outasse",
"knock-outassent",
"knock-outasses",
"knock-outassiez",
"knock-outassions",
"knock-outât",
"knock-outâtes",
"knock-oute",
"knock-outé",
"knock-outée",
"knock-outées",
"knock-outent",
"knock-outer",
"knock-outera",
"knock-outerai",
"knock-outeraient",
"knock-outerais",
"knock-outerait",
"knock-outeras",
"knock-outèrent",
"knock-outerez",
"knock-outeriez",
"knock-outerions",
"knock-outerons",
"knock-outeront",
"knock-outes",
"knock-outés",
"knock-outez",
"knock-outiez",
"knock-outions",
"knock-outons",
"knock-outs",
"Knokke-Heist",
"Knopp-Labach",
"Kobern-Gondorf",
"Kœur-la-Grande",
"Kœur-la-Petite",
"Kohren-Sahlis",
"Kölln-Reisiek",
"Komki-Ipala",
"Königsbach-Stein",
"Königshain-Wiederau",
"Korbeek-Dijle",
"Korbeek-Lo",
"Korntal-Münchingen",
"ko-soto-gake",
"Kottweiler-Schwanden",
"kouan-hoa",
"kouign-aman",
"kouign-amann",
"kouign-amanns",
"kouign-amans",
"K-pop",
"K-Pop",
"K-POP",
"Kradolf-Schönenberg",
"krav-naga",
"Kreba-Neudorf",
"Kreimbach-Kaulbach",
"krésoxim-méthyl",
"Kröppelshagen-Fahrendorf",
"Kuhlen-Wendorf",
"kung-fu",
"k-voisinage",
"k-voisinages",
"kwan-li-so",
"k-way",
"K-way",
"k-ways",
"K-ways",
"KwaZulu-Natal",
"Kyzyl-Arvat",
"Laag-Caestert",
"Laag-Keppel",
"Laag-Nieuwkoop",
"Laag-Soeren",
"Laà-Mondrans",
"Labarthe-Bleys",
"Labarthe-Inard",
"Labarthe-Rivière",
"Labarthe-sur-Lèze",
"là-bas",
"Labastide-Beauvoir",
"Labastide-Castel-Amouroux",
"Labastide-Cézéracq",
"Labastide-Chalosse",
"Labastide-Clairence",
"Labastide-Clermont",
"Labastide-d'Anjou",
"Labastide-d'Armagnac",
"Labastide-de-Juvinas",
"Labastide-de-Lévis",
"Labastide-Dénat",
"Labastide-de-Penne",
"Labastide-de-Virac",
"Labastide-du-Haut-Mont",
"Labastide-du-Temple",
"Labastide-du-Vert",
"Labastide-en-Val",
"Labastide-Esparbairenque",
"Labastide-Gabausse",
"Labastide-Marnhac",
"Labastide-Monréjeau",
"Labastide-Murat",
"Labastide-Paumès",
"Labastide-Rouairoux",
"Labastide-Saint-Georges",
"Labastide-Saint-Pierre",
"Labastide-Saint-Sernin",
"Labastide-Savès",
"Labastide-sur-Bésorgues",
"Labastide-Villefranche",
"Labatie-d'Andaure",
"Labatut-Rivière",
"Labécède-Lauragais",
"Labergement-du-Navois",
"Labergement-Foigney",
"Labergement-lès-Auxonne",
"Labergement-lès-Seurre",
"Labergement-Sainte-Marie",
"Labessière-Candeil",
"Labets-Biscay",
"lab-ferment",
"lab-ferments",
"Laboissière-en-Santerre",
"Laboissière-en-Thelle",
"Laboissière-Saint-Martin",
"Labruyère-Dorsa",
"lac-à-l'épaule",
"Lacam-d'Ourcet",
"Lacapelle-Barrès",
"Lacapelle-Biron",
"Lacapelle-Cabanac",
"Lacapelle-del-Fraisse",
"Lacapelle-Livron",
"Lacapelle-Marival",
"Lacapelle-Pinet",
"Lacapelle-Ségalar",
"Lacapelle-Viescamp",
"Lacarry-Arhan-Charritte-de-Haut",
"Lac-aux-Sables",
"Lac-Beauportois",
"Lac-Bouchettien",
"Lac-Carréen",
"Lac-des-Rouges-Truites",
"Lac-Édouard",
"Lac-Etcheminois",
"Lachamp-Raphaël",
"Lachapelle-aux-Pots",
"Lachapelle-Auzac",
"Lachapelle-en-Blaisy",
"Lachapelle-Graillouse",
"Lachapelle-Saint-Pierre",
"Lachapelle-sous-Aubenas",
"Lachapelle-sous-Chanéac",
"Lachapelle-sous-Chaux",
"Lachapelle-sous-Gerberoy",
"Lachapelle-sous-Rougemont",
"Lachaussée-du-Bois-d'Ecu",
"Lachaussée-du-Bois-d'Écu",
"lache-bras",
"lâcher-tout",
"Lac-Humquien",
"lac-laque",
"lac-laques",
"là-contre",
"Lacougotte-Cadoul",
"Lacour-d'Arcenay",
"Lacourt-Saint-Pierre",
"Lac-ou-Villers",
"Lac-Poulinois",
"lacrima-christi",
"lacrima-Christi",
"Lacrima-Christi",
"Lacroix-Barrez",
"Lacroix-Falgarde",
"Lacroix-Saint-Ouen",
"Lacroix-sur-Meuse",
"lacryma-christi",
"lacryma-Christi",
"Lacryma-Christi",
"Lac-Saguayen",
"lacs-à-l'épaule",
"lacto-végétarisme",
"lacto-végétarismes",
"là-dedans",
"là-delez",
"Ladern-sur-Lauquet",
"là-dessous",
"là-dessus",
"Ladevèze-Rivière",
"Ladevèze-Ville",
"Ladignac-le-Long",
"Ladignac-sur-Rondelles",
"Ladoix-Serrigny",
"Ladoye-sur-Seille",
"laemmer-geier",
"laemmer-geiers",
"læmmer-geyer",
"læmmer-geyers",
"Laethem-Sainte-Marie",
"Laethem-Saint-Martin",
"Lafage-sur-Sombre",
"Laferté-sur-Amance",
"Laferté-sur-Aube",
"la-fertois",
"La-Fertois",
"la-fertoise",
"La-Fertoise",
"la-fertoises",
"La-Fertoises",
"Lafeuillade-en-Vézie",
"Laffite-Toupière",
"Lafitte-sur-Lot",
"Lafitte-Vigordane",
"Lafresguimont-Saint-Martin",
"Lagarde-d'Apt",
"Lagarde-Enval",
"Lagarde-Hachan",
"Lagardelle-sur-Lèze",
"Lagarde-Paréol",
"Lagarde-sur-le-Né",
"Lagnicourt-Marcel",
"Lagny-le-Sec",
"Lagny-sur-Marne",
"Lagrâce-Dieu",
"Lagraulet-du-Gers",
"Lagraulet-Saint-Nicolas",
"Laguian-Mazous",
"Laguinge-Restoue",
"là-haut",
"Lahaye-Saint-Romain",
"Lahitte-Toupière",
"Lahn-Dill",
"Laigné-en-Belin",
"Lailly-en-Val",
"Laines-aux-Bois",
"Lainville-en-Vexin",
"laissée-pour-compte",
"laissées-pour-compte",
"laissé-pour-compte",
"laisser-aller",
"laisser-allers",
"laisser-courre",
"laisser-faire",
"laisser-sur-place",
"laissés-pour-compte",
"laissez-faire",
"laissez-passer",
"Laître-sous-Amance",
"Laize-Clinchamps",
"Laize-la-Ville",
"la-la-la",
"Lalande-de-Pomerol",
"Lalande-en-Son",
"Lalanne-Arqué",
"Lalanne-Trie",
"Lalevade-d'Ardèche",
"Lalouret-Laffiteau",
"Lamadeleine-Val-des-Anges",
"Lamalou-les-Bains",
"Lamarche-en-Woëvre",
"Lamarche-sur-Saône",
"Lamargelle-aux-Bois",
"Lamarque-Pontacq",
"Lamarque-Rustaing",
"Lamazière-Basse",
"Lamazière-Haute",
"lambda-cyhalothrine",
"Lambres-lès-Aire",
"Lambres-lez-Aire",
"Lambres-lez-Douai",
"Lamenay-sur-Loire",
"L-aminoacide",
"L-aminoacides",
"Lamonzie-Montastruc",
"Lamonzie-Saint-Martin",
"Lamothe-Capdeville",
"Lamothe-Cassel",
"Lamothe-Cumont",
"Lamothe-en-Blaisy",
"Lamothe-Fénelon",
"Lamothe-Goas",
"Lamothe-Landerron",
"Lamothe-Montravel",
"Lamotte-Beuvron",
"Lamotte-Brebière",
"Lamotte-Buleux",
"Lamotte-du-Rhône",
"Lamotte-Warfusée",
"Lampaul-Guimiliau",
"Lampaul-Plouarzel",
"Lampaul-Ploudalmézeau",
"lampes-tempête",
"lampe-tempête",
"l-amphétamine",
"lampris-lune",
"Lamure-sur-Azergues",
"lance-amarres",
"lance-balles",
"lance-bombe",
"lance-bombes",
"lance-flamme",
"lance-flammes",
"lance-fusée",
"lance-fusées",
"lance-grenade",
"lance-grenades",
"lance-missile",
"lance-missiles",
"lance-patates",
"lance-pierre",
"lance-pierres",
"lance-roquette",
"lance-roquettes",
"lance-torpille",
"lance-torpilles",
"Lanches-Saint-Hilaire",
"Lanciego-Lantziego",
"Lancken-Granitz",
"Lançon-Provence",
"Lande-de-Libourne",
"Landelles-et-Coupigny",
"Landerrouet-sur-Ségur",
"Landes-le-Gaulois",
"Landes-sur-Ajon",
"Landes-Vieilles-et-Neuves",
"land-ice",
"land-ices",
"Landifay-et-Bertaignemont",
"Landouzy-la-Cour",
"Landouzy-la-Ville",
"Landrecourt-Lempire",
"Landres-et-Saint-Georges",
"Landrethun-le-Nord",
"Landrethun-lès-Ardres",
"Landsberg-am-Lech",
"Laneuveville-aux-Bois",
"Laneuveville-derrière-Foug",
"Laneuveville-devant-Bayon",
"Laneuveville-devant-Nancy",
"Laneuveville-en-Saulnois",
"Laneuveville-lès-Lorquin",
"Laneuville-à-Rémy",
"Laneuville-au-Bois",
"Laneuville-au-Pont",
"Laneuville-au-Rupt",
"Laneuville-sur-Meuse",
"Langemark-Poelkapelle",
"Langenleuba-Niederhain",
"Langrolay-sur-Rance",
"Langrune-sur-Mer",
"langue-de-boeuf",
"langue-de-chat",
"langue-de-moineau",
"langue-de-serpent",
"langue-de-vache",
"Languedoc-Roussillon",
"Languedoc-Roussillon-Midi-Pyrénées",
"langues-de-boeuf",
"langues-de-chat",
"langues-de-vache",
"langues-toit",
"langue-toit",
"Languevoisin-Quiquery",
"Lanitz-Hassel-Tal",
"Lanne-en-Barétous",
"Lanne-Soubiran",
"lanne-soubiranais",
"Lanne-Soubiranais",
"lanne-soubiranaise",
"Lanne-Soubiranaise",
"lanne-soubiranaises",
"Lanne-Soubiranaises",
"Lannoy-Cuillère",
"Lanques-sur-Rognon",
"Lansen-Schönau",
"Lans-en-Vercors",
"Lanslebourg-Mont-Cenis",
"Lans-l'Hermitage",
"Lantenne-Vertière",
"Lanty-sur-Aube",
"Lapanouse-de-Cernon",
"Laperrière-sur-Saône",
"Lapeyrouse-Fossat",
"Lapeyrouse-Mornay",
"lapin-garou",
"lapins-garous",
"lapis-lazuli",
"là-pour-ça",
"lapu-lapu",
"Laragne-Montéglin",
"Larceveau-Arros-Cibits",
"Lardier-et-Valença",
"Largillay-Marsonnay",
"Largny-sur-Automne",
"Larians-et-Munans",
"Larivière-Arnoncourt",
"larme-de-Job",
"larmes-de-Job",
"Larmor-Baden",
"Larmor-Plage",
"Laroche-près-Feyt",
"Laroche-Saint-Cydroine",
"Laroque-de-Fa",
"Laroque-des-Albères",
"Laroque-des-Arcs",
"Laroque-d'Olmes",
"Laroque-Timbaut",
"Larribar-Sorhapuru",
"Larrivière-Saint-Savin",
"Larroque-Engalin",
"Larroque-Saint-Sernin",
"Larroque-sur-l'Osse",
"Larroque-Toirac",
"Lasarte-Oria",
"Lascellas-Ponzano",
"Lasne-Chapelle-Saint-Lambert",
"Lassay-les-Châteaux",
"Lassay-sur-Croisne",
"Lasserre-de-Prouille",
"Lasseube-Propre",
"Lathus-Saint-Rémy",
"Lâ-Todin",
"Latouille-Lentillac",
"Latour-Bas-Elne",
"Latour-de-Carol",
"Latour-de-France",
"Latour-en-Woëvre",
"Latrecey-Ormoy-sur-Aube",
"Lattre-Saint-Quentin",
"Lau-Balagnas",
"lau-balutin",
"Lau-Balutin",
"lau-balutine",
"Lau-Balutine",
"lau-balutines",
"Lau-Balutines",
"lau-balutins",
"Lau-Balutins",
"Laucha-sur-Unstrut",
"Lauda-Königshofen",
"Laudio-Llodio",
"Laudun-l'Ardoise",
"Laufen-Uhwiesen",
"launay-villersois",
"Launay-Villersois",
"launay-villersoise",
"Launay-Villersoise",
"launay-villersoises",
"Launay-Villersoises",
"Launay-Villiers",
"Launois-sur-Vence",
"Laurac-en-Vivarais",
"Laure-Minervois",
"laurier-cerise",
"laurier-rose",
"laurier-sauce",
"lauriers-cerises",
"lauriers-roses",
"lauriers-tins",
"laurier-tarte",
"laurier-thym",
"laurier-tin",
"Lauwin-Planque",
"Laux-Montaux",
"Laval-Atger",
"Laval-d'Aix",
"Laval-d'Aurelle",
"Laval-de-Cère",
"laval-de-cérois",
"Laval-de-Cérois",
"laval-de-céroise",
"Laval-de-Céroise",
"laval-de-céroises",
"Laval-de-Céroises",
"Laval-du-Tarn",
"Laval-en-Brie",
"Laval-en-Laonnois",
"Laval-le-Prieuré",
"Laval-Morency",
"Laval-Pradel",
"Laval-Roquecezière",
"Laval-Saint-Roman",
"Laval-sur-Doulon",
"Laval-sur-Luzège",
"Laval-sur-Tourbe",
"Laval-sur-Vologne",
"Lavancia-Epercy",
"Lavans-lès-Dole",
"Lavans-lès-Saint-Claude",
"lavans-quingeois",
"Lavans-Quingeois",
"lavans-quingeoise",
"Lavans-Quingeoise",
"lavans-quingeoises",
"Lavans-Quingeoises",
"Lavans-Quingey",
"Lavans-sur-Valouse",
"Lavans-Vuillafans",
"Lavault-de-Frétoy",
"Lavault-Sainte-Anne",
"Lavau-sur-Loire",
"Lavaux-Oron",
"Lavaux-Sainte-Anne",
"Lavaveix-les-Mines",
"lave-auto",
"lave-autos",
"lavé-de-vert",
"lave-glace",
"Lavelanet-de-Comminges",
"Laveline-devant-Bruyères",
"Laveline-du-Houx",
"lave-linge",
"lave-linges",
"lave-main",
"lave-mains",
"Laveno-Mombello",
"lave-pont",
"lave-ponts",
"Lavernose-Lacasse",
"lavés-de-vert",
"lave-tête",
"lave-têtes",
"laveuse-sécheuse",
"lave-vaisselle",
"lave-vaisselles",
"Lavey-Morcles",
"Laville-aux-Bois",
"Lavilleneuve-au-Roi",
"Lavilleneuve-aux-Fresnes",
"Lavoûte-Chilhac",
"Lavoûte-sur-Loire",
"Lawarde-Mauger-l'Hortoy",
"Lay-Lamidou",
"Layrac-sur-Tarn",
"Lay-Saint-Christophe",
"Lay-Saint-Remy",
"Lays-sur-le-Doubs",
"lazur-apatite",
"lazur-apatites",
"Léa-Lisa",
"lease-back",
"leather-jacket",
"lèche-botta",
"lèche-bottai",
"lèche-bottaient",
"lèche-bottais",
"lèche-bottait",
"lèche-bottâmes",
"lèche-bottant",
"lèche-bottas",
"lèche-bottasse",
"lèche-bottassent",
"lèche-bottasses",
"lèche-bottassiez",
"lèche-bottassions",
"lèche-bottât",
"lèche-bottâtes",
"lèche-botte",
"lèche-botté",
"lèche-bottée",
"lèche-bottées",
"lèche-bottent",
"lèche-botter",
"lèche-bottera",
"lèche-botterai",
"lèche-botteraient",
"lèche-botterais",
"lèche-botterait",
"lèche-botteras",
"lèche-bottèrent",
"lèche-botterez",
"lèche-botteriez",
"lèche-botterions",
"lèche-botterons",
"lèche-botteront",
"lèche-bottes",
"lèche-bottés",
"lèche-bottez",
"lèche-bottiez",
"lèche-bottions",
"lèche-bottons",
"lèche-cul",
"lèche-culs",
"lèche-vitrine",
"lèche-vitrines",
"lecteur-graveur",
"lecteurs-graveurs",
"Lédas-et-Penthiès",
"Leers-et-Fosteau",
"Leers-Nord",
"Lées-Athas",
"Leeuw-Saint-Pierre",
"Lège-Cap-Ferret",
"Légéville-et-Bonfays",
"Légion-d'Honneur",
"Léguillac-de-Cercles",
"Léguillac-de-l'Auche",
"légume-feuille",
"légume-fleur",
"légume-fruit",
"légume-racine",
"légumes-feuilles",
"légumes-fleurs",
"légumes-fruits",
"légumes-racines",
"légumes-tiges",
"légume-tige",
"Leidschendam-Voorburg",
"Leigné-les-Bois",
"Leignes-sur-Fontaine",
"Leigné-sur-Usseau",
"Leinefelde-Worbis",
"Leinfelden-Echterdingen",
"Leintz-Gatzaga",
"Lelin-Lapujolle",
"Leménil-Mitry",
"lemmer-geyer",
"lemmer-geyers",
"Lempdes-sur-Allagnon",
"Lempire-aux-Bois",
"Lens-Lestang",
"Lens-Saint-Remy",
"Lens-Saint-Servais",
"Lens-sur-Geer",
"Lentillac-du-Causse",
"Lentillac-Lauzès",
"Lentillac-Saint-Blaise",
"léopard-garou",
"léopards-garous",
"Leo-Stichting",
"Lépanges-sur-Vologne",
"Lépin-le-Lac",
"lépisostée-alligator",
"Lépron-les-Vallées",
"lepto-kurticité",
"lepto-kurticités",
"lepto-kurtique",
"lepto-kurtiques",
"Lepuix-Neuf",
"Lerm-et-Musset",
"Leschères-sur-le-Blaiseron",
"Lesches-en-Diois",
"Lescouët-Gouarec",
"Lescouët-Jugon",
"Lescure-d'Albigeois",
"Lescure-Jaoul",
"lèse-majesté",
"lèse-majestés",
"Lésignac-Durand",
"Lesparre-Médoc",
"Lespielle-Germenaud-Lannegrasse",
"Lesquielles-Saint-Germain",
"Lessard-en-Auge",
"Lessard-en-Bresse",
"Lessard-et-le-Chêne",
"Lessard-le-National",
"Lestelle-Bétharram",
"Lestelle-de-Saint-Martory",
"Lestiac-sur-Garonne",
"Lestrade-et-Thouels",
"Leuben-Schleinitz",
"Leudon-en-Brie",
"Leuilly-sous-Coucy",
"Leulinghen-Bernes",
"Leusden-Zuid",
"Leuville-sur-Orge",
"Leuze-en-Hainaut",
"Leval-Chaudeville",
"Levallois-Perret",
"Leval-Trahegnies",
"lève-cul",
"lève-culs",
"lève-gazon",
"lève-glace",
"lève-glaces",
"lever-dieu",
"Levesville-la-Chenard",
"lève-tard",
"lève-tôt",
"lève-vitre",
"lève-vitres",
"Lévignac-de-Guyenne",
"Lévis-Saint-Nom",
"lévi-straussien",
"lévi-straussienne",
"lévi-straussiennes",
"lévi-straussiens",
"Lévy-Saint-Nom",
"Leyritz-Moncassin",
"Lézat-sur-Lèze",
"Lez-Fontaine",
"Lézignan-Corbières",
"Lézignan-la-Cèbe",
"L-flampropisopropyl",
"lgbti-friendly",
"LGBTI-friendly",
"lgbti-phobie",
"LGBTI-phobie",
"lgbti-phobies",
"LGBTI-phobies",
"L-glycéraldéhyde",
"Liancourt-Fosse",
"Liancourt-Saint-Pierre",
"liane-corail",
"lianes-corail",
"Lias-d'Armagnac",
"libéral-conservateur",
"libéral-conservatisme",
"liberum-veto",
"libidino-calotin",
"Libramont-Chevigny",
"libre-choix",
"libre-échange",
"libre-échangisme",
"libre-échangismes",
"libre-échangiste",
"libre-échangistes",
"libre-penseur",
"libre-penseuse",
"libres-choix",
"libre-service",
"libres-penseurs",
"libres-penseuses",
"libres-services",
"Libre-Ville",
"libyco-berbère",
"libyco-berbères",
"lice-po",
"Licey-sur-Vingeanne",
"Lichans-Sunhar",
"liche-casse",
"Lichères-près-Aigremont",
"Lichères-sur-Yonne",
"Lichterfeld-Schacksdorf",
"licol-drisse",
"licols-drisses",
"Licq-Athérey",
"Licy-Clignon",
"lie-de-vin",
"Lierde-Sainte-Marie",
"Lierde-Saint-Martin",
"Liesse-Notre-Dame",
"Liesville-sur-Douve",
"lieu-dit",
"Lieuran-Cabrières",
"Lieuran-lès-Béziers",
"Lieu-Saint-Amand",
"lieu-saint-amandinois",
"Lieu-Saint-Amandinois",
"lieu-saint-amandinoise",
"Lieu-Saint-Amandinoise",
"lieu-saint-amandinoises",
"Lieu-Saint-Amandinoises",
"lieutenant-colonel",
"lieutenant-général",
"lieutenant-gouverneur",
"lieutenants-colonels",
"lieux-dits",
"Liffol-le-Grand",
"Liffol-le-Petit",
"Li-Fi",
"Lignan-de-Bazas",
"Lignan-de-Bordeaux",
"Lignan-sur-Orb",
"ligne-de-foulée",
"lignes-de-foulée",
"Lignières-Châtelain",
"Lignières-de-Touraine",
"Lignières-en-Vimeu",
"Lignières-la-Carelle",
"Lignières-Orgères",
"Lignières-Sonneville",
"Lignières-sur-Aire",
"Lignol-le-Château",
"Ligny-en-Barrois",
"Ligny-en-Brionnais",
"Ligny-en-Cambrésis",
"Ligny-Haucourt",
"Ligny-le-Châtel",
"Ligny-le-Ribault",
"Ligny-lès-Aire",
"Ligny-Saint-Flochel",
"Ligny-sur-Canche",
"Ligny-Thilloy",
"Lille-sous-Mauréal",
"Lille-sous-Montréal",
"Lillois-Witterzée",
"limande-sole",
"limande-soles",
"limandes-soles",
"Limbach-Oberfrohna",
"Limburg-Weilburg",
"lime-bois",
"Limeil-Brévannes",
"Limetz-Villez",
"lime-uranite",
"lime-uranites",
"Limey-Remenauville",
"Limoges-Fourches",
"Limogne-en-Quercy",
"Limont-Fontaine",
"Limours-en-Hurepoix",
"Lincheux-Hallivillers",
"Lindre-Basse",
"Lindre-Haute",
"Linières-Bouton",
"Linkenheim-Hochstetten",
"linon-batiste",
"linon-batistes",
"Lintot-les-Bois",
"Liny-devant-Dun",
"Lion-devant-Dun",
"Lion-en-Beauce",
"Lion-en-Sullias",
"lion-garou",
"lions-garous",
"Lion-sur-Mer",
"Liorac-sur-Louyre",
"Lioux-les-Monges",
"Lippersdorf-Erdmannsdorf",
"lire-écrire",
"Lisle-en-Barrois",
"Lisle-en-Rigault",
"Lisle-sur-Tarn",
"Lissac-et-Mouret",
"Lissac-sur-Couze",
"Lissay-Lochy",
"Lisse-en-Champagne",
"Listrac-de-Durèze",
"Listrac-Médoc",
"lit-cage",
"lit-clos",
"Lit-et-Mixe",
"litho-typographia",
"litho-typographiai",
"litho-typographiaient",
"litho-typographiais",
"litho-typographiait",
"litho-typographiâmes",
"litho-typographiant",
"litho-typographias",
"litho-typographiasse",
"litho-typographiassent",
"litho-typographiasses",
"litho-typographiassiez",
"litho-typographiassions",
"litho-typographiât",
"litho-typographiâtes",
"litho-typographie",
"litho-typographié",
"litho-typographiée",
"litho-typographiées",
"litho-typographient",
"litho-typographier",
"litho-typographiera",
"litho-typographierai",
"litho-typographieraient",
"litho-typographierais",
"litho-typographierait",
"litho-typographieras",
"litho-typographièrent",
"litho-typographierez",
"litho-typographieriez",
"litho-typographierions",
"litho-typographierons",
"litho-typographieront",
"litho-typographies",
"litho-typographiés",
"litho-typographiez",
"litho-typographiiez",
"litho-typographiions",
"litho-typographions",
"lits-cages",
"lits-clos",
"little-endian",
"Livarot-Pays-d'Auge",
"Liverdy-en-Brie",
"Livers-Cazelles",
"Livet-en-Saosnois",
"Livet-et-Gavet",
"Livet-sur-Authou",
"living-room",
"living-rooms",
"Livinhac-le-Haut",
"Livré-la-Touche",
"livres-cassettes",
"Livré-sur-Changeon",
"livret-police",
"Livron-sur-Drôme",
"Livry-Gargan",
"Livry-Louvercy",
"Livry-sur-Seine",
"Lixing-lès-Rouhling",
"Lixing-lès-Saint-Avold",
"Lizy-sur-Ourcq",
"localité-type",
"location-financement",
"Loc-Brévalaire",
"Loc-Eguiner",
"Loc-Éguiner",
"Loc-Eguiner-Saint-Thégonnec",
"Loc-Éguiner-Saint-Thégonnec",
"Loc-Envel",
"Loches-sur-Ource",
"Loché-sur-Indrois",
"lock-out",
"lock-outa",
"lock-outai",
"lock-outaient",
"lock-outais",
"lock-outait",
"lock-outâmes",
"lock-outant",
"lock-outas",
"lock-outasse",
"lock-outassent",
"lock-outasses",
"lock-outassiez",
"lock-outassions",
"lock-outât",
"lock-outâtes",
"lock-oute",
"lock-outé",
"lock-outée",
"lock-outées",
"lock-outent",
"lock-outer",
"lock-outera",
"lock-outerai",
"lock-outeraient",
"lock-outerais",
"lock-outerait",
"lock-outeras",
"lock-outèrent",
"lock-outerez",
"lock-outeriez",
"lock-outerions",
"lock-outerons",
"lock-outeront",
"lock-outes",
"lock-outés",
"lock-outez",
"lock-outiez",
"lock-outions",
"lock-outons",
"lock-outs",
"Locmaria-Berrien",
"Locmaria-Grand-Champ",
"Locmaria-Plouzané",
"Locoal-Mendon",
"locoalo-mendonnais",
"Locoalo-Mendonnais",
"locoalo-mendonnaise",
"Locoalo-Mendonnaise",
"locoalo-mendonnaises",
"Locoalo-Mendonnaises",
"locution-phrase",
"locutions-phrases",
"Loèche-les-Bains",
"Loèche-Ville",
"loemmer-geyer",
"lœmmer-geyer",
"loemmer-geyers",
"lœmmer-geyers",
"Loenen-Kronenburg",
"logan-berry",
"logan-berrys",
"Loge-Fougereuse",
"logiciel-socle",
"Logny-Bogny",
"Logny-lès-Aubenton",
"Logny-lès-Chaumont",
"Logonna-Daoulas",
"Logonna-Quimerch",
"logo-syllabique",
"logo-syllabiques",
"Logrian-et-Comiac-de-Florian",
"Logrian-Florian",
"Loguivy-lès-Lannion",
"Loguivy-Plougras",
"Lohe-Föhrden",
"Lohe-Rickelshof",
"Lohitzun-Oyhercq",
"Lohn-Ammannsegg",
"loi-cadre",
"loi-écran",
"Loigné-sur-Mayenne",
"Loigny-la-Bataille",
"loi-programme",
"Loire-Atlantique",
"Loire-Authion",
"Loire-Inférieure",
"Loire-les-Marais",
"Loiré-sur-Nie",
"Loire-sur-Rhône",
"Loir-et-Cher",
"Loiron-Ruillé",
"lois-cadre",
"lois-écrans",
"Loisey-Culey",
"Loison-sous-Lens",
"Loison-sur-Créquoise",
"lois-programme",
"Loisy-en-Brie",
"Loisy-sur-Marne",
"Loitsche-Heinrichsberg",
"Lombeek-Notre-Dame",
"lombo-costal",
"lombo-costo-trachélien",
"lombo-dorso-trachélien",
"lombo-huméral",
"lombo-sacré",
"lombri-composta",
"lombri-compostai",
"lombri-compostaient",
"lombri-compostais",
"lombri-compostait",
"lombri-compostâmes",
"lombri-compostant",
"lombri-compostas",
"lombri-compostasse",
"lombri-compostassent",
"lombri-compostasses",
"lombri-compostassiez",
"lombri-compostassions",
"lombri-compostât",
"lombri-compostâtes",
"lombri-composte",
"lombri-composté",
"lombri-compostée",
"lombri-compostées",
"lombri-compostent",
"lombri-composter",
"lombri-compostera",
"lombri-composterai",
"lombri-composteraient",
"lombri-composterais",
"lombri-composterait",
"lombri-composteras",
"lombri-compostèrent",
"lombri-composterez",
"lombri-composteriez",
"lombri-composterions",
"lombri-composterons",
"lombri-composteront",
"lombri-compostes",
"lombri-compostés",
"lombri-compostez",
"lombri-compostiez",
"lombri-compostions",
"lombri-compostons",
"Lomont-sur-Crête",
"lompénie-serpent",
"Lona-Lases",
"Longchamp-sous-Châtenois",
"Longchamps-sur-Aire",
"Longchamp-sur-Aujon",
"long-courrier",
"long-courriers",
"Longeau-Percey",
"Longecourt-en-Plaine",
"Longecourt-lès-Culêtre",
"Longevelle-lès-Russey",
"Longevelle-sur-Doubs",
"Longeville-en-Barrois",
"Longeville-lès-Metz",
"Longeville-lès-Saint-Avold",
"Longevilles-Mont-d'Or",
"Longeville-sur-la-Laines",
"Longeville-sur-Mer",
"Longeville-sur-Mogne",
"long-grain",
"long-jointé",
"long-jointée",
"long-métrage",
"Longny-au-Perche",
"Longny-les-Villages",
"Longpont-sur-Orge",
"Longpré-les-Corps-Saints",
"Longpré-le-Sec",
"longs-courriers",
"longs-métrages",
"long-temps",
"long-tems",
"longue-épine",
"Longueil-Annel",
"Longueil-Sainte-Marie",
"Longué-Jumelles",
"longue-langue",
"Longuenée-en-Anjou",
"Longue-Rivois",
"longues-épines",
"longues-langues",
"Longues-sur-Mer",
"longues-vues",
"Longueval-Barbonval",
"Longueville-sur-Aube",
"Longueville-sur-Scie",
"longue-vue",
"Longwé-l'Abbaye",
"Longwy-sur-le-Doubs",
"Lonlay-l'Abbaye",
"Lonlay-le-Tesson",
"Lons-le-Saunier",
"Loon-Plage",
"Loos-en-Gohelle",
"loqu'du",
"loqu'due",
"loqu'dues",
"loqu'dus",
"lord-lieutenance",
"lord-lieutenances",
"lord-lieutenant",
"lord-lieutenants",
"lord-maire",
"Lo-Reninge",
"Loreto-di-Casinca",
"Loreto-di-Tallano",
"Loriol-du-Comtat",
"Loriol-sur-Drôme",
"Lorp-Sentaraille",
"Lorrez-le-Bocage-Préaux",
"Lorry-lès-Metz",
"Lorry-Mardigny",
"Loscouët-sur-Meu",
"Louan-Villegruis-Fontaine",
"Loubens-Lauragais",
"Loubès-Bernac",
"Louchy-Montfand",
"Louette-Saint-Denis",
"Louette-Saint-Pierre",
"Lougé-sur-Maire",
"louise-bonne",
"louises-bonnes",
"Loulans-les-Forges",
"Loulans-Verchamp",
"loup-cerve",
"loup-cervier",
"loup-garou",
"Loupiac-de-la-Réole",
"Loup-Maëlle",
"Louppy-le-Château",
"Louppy-sur-Chée",
"Louppy-sur-Loison",
"loups-cerves",
"loups-cerviers",
"loups-garous",
"Lourdios-Ichère",
"lourd-léger",
"Lourdoueix-Saint-Michel",
"Lourdoueix-Saint-Pierre",
"lourds-légers",
"Loures-Barousse",
"Louresse-Rochemenier",
"Lourouer-Saint-Laurent",
"Louroux-Bourbonnais",
"Louroux-de-Beaune",
"Louroux-de-Bouble",
"Louroux-Hodement",
"lourouzien-bourbonnais",
"Lourouzien-Bourbonnais",
"lourouzienne-bourbonnaise",
"Lourouzienne-Bourbonnaise",
"lourouziennes-bourbonnaises",
"Lourouziennes-Bourbonnaises",
"lourouziens-bourbonnais",
"Lourouziens-Bourbonnais",
"Lourties-Monbrun",
"Loussous-Débat",
"Louvain-la-Neuve",
"louve-garelle",
"louve-garolle",
"louve-garou",
"Louvemont-Côte-du-Poivre",
"louves-garelles",
"louves-garolles",
"louves-garous",
"louveteau-garou",
"louveteaux-garous",
"Louvie-Juzon",
"Louvières-en-Auge",
"Louvie-Soubiron",
"louvie-soubironnais",
"Louvie-Soubironnais",
"louvie-soubironnaise",
"Louvie-Soubironnaise",
"louvie-soubironnaises",
"Louvie-Soubironnaises",
"Louvigné-de-Bais",
"Louvigné-du-Désert",
"Louvignies-Bavay",
"Louvignies-Quesnoy",
"Louville-la-Chenard",
"Louvilliers-en-Drouais",
"Louvilliers-lès-Perche",
"Louzac-Saint-André",
"love-in",
"low-cost",
"low-costs",
"low-tech",
"Loye-sur-Arnon",
"Lozoyuela-Navas-Sieteiglesias",
"Lubret-Saint-Luc",
"Luby-Betmont",
"Luc-Armau",
"Luçay-le-Libre",
"Luçay-le-Mâle",
"Lucbardez-et-Bargues",
"Lucenay-le-Duc",
"Lucenay-lès-Aix",
"Lucenay-l'Evêque",
"Lucenay-l'Évêque",
"Luc-en-Diois",
"Lucé-sous-Ballon",
"Luché-Pringé",
"Luché-sur-Brioux",
"Luché-Thouarsais",
"Lüchow-Dannenberg",
"Luc-la-Primaube",
"Lucq-de-Béarn",
"Luc-sur-Aude",
"Luc-sur-Mer",
"Luc-sur-Orbieu",
"Lucy-le-Bocage",
"Lucy-le-Bois",
"Lucy-sur-Cure",
"Lucy-sur-Yonne",
"ludo-éducatif",
"Ludon-Médoc",
"ludo-sportif",
"ludo-sportifs",
"ludo-sportive",
"ludo-sportives",
"Lué-en-Baugeois",
"Lugaut-Retjons",
"Lugny-Bourbonnais",
"Lugny-Champagne",
"Lugny-lès-Charolles",
"Lugo-di-Nazza",
"Lugon-et-l'Île-du-Carnay",
"Luhe-Wildenau",
"lui-même",
"lumen-seconde",
"lumens-secondes",
"Luméville-en-Ornois",
"Lumigny-Nesles-Ormeaux",
"Lunel-Viel",
"luni-solaire",
"luni-solaires",
"Lunow-Stolzenhagen",
"Lupiñén-Ortilla",
"Luppé-Violles",
"Lurbe-Saint-Christau",
"Lurcy-le-Bourg",
"Lurcy-Lévis",
"Lurcy-Lévy",
"Lury-sur-Arnon",
"Lusignan-Grand",
"Lusignan-Petit",
"Lusigny-sur-Barse",
"Lusigny-sur-Ouche",
"Lus-la-Croix-Haute",
"Lussac-les-Châteaux",
"Lussac-les-Eglises",
"Lussac-les-Églises",
"Lussagnet-Lusson",
"Lussan-Adeilhac",
"Lussas-et-Nontronneau",
"Lussault-sur-Loire",
"Lussery-Villars",
"Lussy-sur-Morges",
"Lüterkofen-Ichertswil",
"Lüterswil-Gächliwil",
"Luthenay-Uxeloup",
"Łutselk'e",
"Luttenbach-près-Munster",
"Lüttow-Valluhn",
"Lutz-en-Dunois",
"Luxémont-et-Villotte",
"Luxe-Sumberraute",
"Luxeuil-les-Bains",
"Luz-Saint-Sauveur",
"Luzy-Saint-Martin",
"Luzy-sur-Marne",
"Ly-Fontaine",
"Lyons-la-Forêt",
"lyro-guitare",
"Lys-Haut-Layon",
"Lys-lez-Lannoy",
"Lys-Saint-Georges",
"Maarke-Kerkem",
"Maast-et-Violaine",
"mac-adamisa",
"mac-adamisai",
"mac-adamisaient",
"mac-adamisais",
"mac-adamisait",
"mac-adamisâmes",
"mac-adamisant",
"mac-adamisas",
"mac-adamisasse",
"mac-adamisassent",
"mac-adamisasses",
"mac-adamisassiez",
"mac-adamisassions",
"mac-adamisât",
"mac-adamisâtes",
"mac-adamise",
"mac-adamisé",
"mac-adamisée",
"mac-adamisées",
"mac-adamisent",
"mac-adamiser",
"mac-adamisera",
"mac-adamiserai",
"mac-adamiseraient",
"mac-adamiserais",
"mac-adamiserait",
"mac-adamiseras",
"mac-adamisèrent",
"mac-adamiserez",
"mac-adamiseriez",
"mac-adamiserions",
"mac-adamiserons",
"mac-adamiseront",
"mac-adamises",
"mac-adamisés",
"mac-adamisez",
"mac-adamisiez",
"mac-adamisions",
"mac-adamisons",
"Macédoine-Centrale",
"Macédoine-Occidentale",
"Macédoine-Orientale-et-Thrace",
"mac-ferlane",
"mac-ferlanes",
"mâche-bouchons",
"Machecoul-Saint-Même",
"mâche-dru",
"mâche-laurier",
"machin-chose",
"machin-choses",
"machin-chouette",
"machine-outil",
"machines-outils",
"machins-chouettes",
"machon-gorgeon",
"mac-kintosh",
"mac-kintoshs",
"Mâcot-la-Plagne",
"ma'di",
"Madlitz-Wilmersdorf",
"Madonne-et-Lamerey",
"maël-carhaisien",
"Maël-Carhaisien",
"maël-carhaisienne",
"Maël-Carhaisienne",
"maël-carhaisiennes",
"Maël-Carhaisiennes",
"maël-carhaisiens",
"Maël-Carhaisiens",
"Maël-Carhaix",
"Maël-Pestivien",
"Maen-Roch",
"Mae-West",
"Mae-Wests",
"magasin-pilote",
"magasins-pilotes",
"Magnac-Bourg",
"Magnac-Laval",
"Magnac-Lavalette-Villars",
"Magnac-sur-Touvre",
"Magnat-l'Etrange",
"Magnat-l'Étrange",
"magnésio-anthophyllite",
"magnésio-anthophyllites",
"magnésio-axinite",
"magnésio-axinites",
"magnésio-calcite",
"magnésio-calcites",
"magnéto-électrique",
"magnéto-électriques",
"magnéto-optique",
"magnéto-optiques",
"Magneux-Haute-Rive",
"Magnicourt-en-Comte",
"Magnicourt-sur-Canche",
"Magny-Châtelard",
"Magny-Cours",
"Magny-Danigon",
"Magny-en-Bessin",
"Magny-en-Vexin",
"Magny-Fouchard",
"Magny-Jobert",
"Magny-la-Campagne",
"Magny-la-Fosse",
"Magny-Lambert",
"Magny-la-Ville",
"Magny-le-Désert",
"Magny-le-Freule",
"Magny-le-Hongre",
"Magny-lès-Aubigny",
"Magny-les-Hameaux",
"Magny-lès-Jussey",
"Magny-lès-Villers",
"Magny-Lormes",
"Magny-Montarlot",
"Magny-Saint-Médard",
"Magny-sur-Tille",
"Magny-Vernois",
"Magstatt-le-Bas",
"Magstatt-le-Haut",
"mahi-mahi",
"mah-jong",
"mah-jongs",
"Maignaut-Tauzia",
"Maignelay-Montigny",
"mail-coach",
"Mailhac-sur-Benaize",
"Mailleroncourt-Charette",
"Mailleroncourt-Saint-Pancras",
"Mailley-et-Chazelot",
"mailly-castellois",
"Mailly-Castellois",
"mailly-castelloise",
"Mailly-Castelloise",
"mailly-castelloises",
"Mailly-Castelloises",
"Mailly-Champagne",
"Mailly-la-Ville",
"Mailly-le-Camp",
"Mailly-le-Château",
"Mailly-Maillet",
"Mailly-Raineval",
"Mailly-sur-Seille",
"main-brune",
"main-courante",
"Maincourt-sur-Yvette",
"main-d'oeuvre",
"main-d'œuvre",
"maine-anjou",
"Maine-de-Boixe",
"Maine-et-Loire",
"main-forte",
"Main-Kinzig",
"main-militaire",
"mains-courantes",
"mains-d'oeuvre",
"mains-d'œuvre",
"Main-Spessart",
"Main-Tauber",
"Main-Taunus",
"maire-adjoint",
"Mairé-Levescault",
"maires-adjoints",
"Mairy-Mainville",
"Mairy-sur-Marne",
"Maisdon-sur-Sèvre",
"Maisey-le-Duc",
"Maisières-Notre-Dame",
"Maisnil-lès-Ruitz",
"Maison-Blanche",
"Maisoncelle-et-Villers",
"Maisoncelle-Saint-Pierre",
"Maisoncelles-du-Maine",
"Maisoncelles-en-Brie",
"Maisoncelles-en-Gâtinais",
"Maisoncelles-la-Jourdan",
"Maisoncelles-Pelvey",
"Maisoncelles-sur-Ajon",
"Maisoncelle-Tuilerie",
"Maison-des-Champs",
"Maison-Feyne",
"Maison-Maugis",
"maison-mère",
"Maisonnais-sur-Tardoire",
"Maison-Ponthieu",
"Maison-Roland",
"Maison-Rouge",
"Maisons-Alfort",
"Maisons-du-Bois-Lièvremont",
"Maisons-en-Champagne",
"Maisons-Laffitte",
"Maisons-lès-Chaource",
"Maisons-lès-Soulaines",
"maisons-mères",
"maître-assistant",
"maitre-autel",
"maître-autel",
"maître-bau",
"maitre-chanteur",
"maître-chanteur",
"maître-chanteuse",
"maitre-chien",
"maître-chien",
"maître-cylindre",
"maître-jacques",
"maître-mot",
"maitre-nageur",
"maître-nageur",
"maitre-nageuse",
"maître-nageuse",
"maîtres-assistants",
"maîtres-autels",
"maîtres-chanteurs",
"maîtres-chanteuses",
"maitres-chiens",
"maîtres-chiens",
"maîtres-cylindres",
"maîtres-jacques",
"maîtres-mots",
"maitres-nageurs",
"maîtres-nageurs",
"maitres-nageuses",
"maîtres-nageuses",
"maîtresse-femme",
"maitresse-nageuse",
"maîtresse-nageuse",
"maîtresses-femmes",
"maitresses-nageuses",
"maîtresses-nageuses",
"Maizières-la-Grande-Paroisse",
"Maizières-lès-Brienne",
"Maizières-lès-Metz",
"Maizières-lès-Vic",
"Maizières-sur-Amance",
"ma-jong",
"ma-jongs",
"make-up",
"make-ups",
"making-of",
"makura-e",
"makura-es",
"mal-aimé",
"mal-aimée",
"mal-aimés",
"Malaincourt-sur-Meuse",
"Malancourt-la-Montagne",
"Malarce-sur-la-Thines",
"Malaucourt-sur-Seille",
"Malay-le-Grand",
"Malay-le-Petit",
"malayo-polynésien",
"malayo-polynésienne",
"malayo-polynésiennes",
"malayo-polynésiens",
"Malayo-Polynésiens",
"mal-baisé",
"mal-baisée",
"mal-baisées",
"mal-baisés",
"Malborghetto-Valbruna",
"mal-comprenant",
"mal-comprenants",
"malécite-passamaquoddy",
"mal-égal",
"Malemort-du-Comtat",
"Malemort-sur-Corrèze",
"mal-en-point",
"mâles-stériles",
"mâle-stérile",
"mâle-stériles",
"mal-être",
"mal-êtres",
"Malèves-Sainte-Marie-Wastines",
"malgré-nous",
"Malherbe-sur-Ajon",
"Malicorne-sur-Sarthe",
"Malines-sur-Meuse",
"mal-information",
"mal-informations",
"mal-jugé",
"mal-jugés",
"Mallefougasse-Augès",
"malle-poste",
"Malleret-Boussac",
"Mallersdorf-Pfaffenberg",
"Malleval-en-Vercors",
"Malleville-les-Grès",
"Malleville-sur-le-Bec",
"mal-logement",
"mal-logements",
"Malo-les-Bains",
"Malons-et-Elze",
"mal-peigné",
"Mal-Peigné",
"mal-peignée",
"Mal-Peignée",
"mal-pensans",
"mal-pensant",
"mal-pensante",
"mal-pensantes",
"mal-pensants",
"Malsburg-Marzell",
"mals-peignées",
"Mals-Peignées",
"mals-peignés",
"Mals-Peignés",
"mal-venant",
"mal-venants",
"Malves-en-Minervois",
"mal-voyant",
"mal-voyants",
"m'amie",
"mamie-boomeuse",
"mamie-boomeuses",
"mam'selle",
"mam'selles",
"mamy-boomeuse",
"mamy-boomeuses",
"mam'zelle",
"mam'zelles",
"Manas-Bastanous",
"man-bun",
"man-buns",
"Mancenans-Lizerne",
"manche-à-balle",
"manche-à-balles",
"manco-liste",
"manco-listes",
"Mandailles-Saint-Julien",
"mandant-dépendant",
"mandat-carte",
"mandat-cash",
"mandat-lettre",
"mandat-poste",
"mandats-cartes",
"mandats-cash",
"mandats-lettres",
"mandats-poste",
"Mandelieu-la-Napoule",
"Mandeville-en-Bessin",
"Mandres-aux-Quatre-Tours",
"Mandres-en-Barrois",
"Mandres-la-Côte",
"Mandres-les-Roses",
"Mandres-sur-Vair",
"Manent-Montané",
"manganico-potassique",
"mangano-ankérite",
"mangano-ankérites",
"mangano-phlogopite",
"mangano-phlogopites",
"manganoso-ammonique",
"mange-Canayen",
"mange-debout",
"mange-disque",
"mange-disques",
"mange-merde",
"mange-piles",
"mange-tout",
"Mango-Rosa",
"maniaco-dépressif",
"maniaco-dépressifs",
"maniaco-dépressive",
"maniaco-dépressives",
"Maninghen-Henne",
"Manneken-pis",
"Manneville-ès-Plains",
"Manneville-la-Goupil",
"Manneville-la-Pipard",
"Manneville-la-Raoult",
"Manneville-sur-Risle",
"Mannweiler-Cölln",
"Manoncourt-en-Vermois",
"Manoncourt-en-Woëvre",
"Manoncourt-sur-Seille",
"Mansat-la-Courrière",
"Mansfeld-Harz-du-Sud",
"Mantenay-Montlin",
"Mantes-Gassicourt",
"Mantes-la-Jolie",
"Mantes-la-Ville",
"Manzac-sur-Vern",
"mappe-monde",
"mappes-mondes",
"Marainville-sur-Madon",
"Marais-Vernier",
"Marange-Silvange",
"Marange-Zondrange",
"Marat-sur-Aisne",
"Maraye-en-Othe",
"Marbourg-Biedenkopf",
"Marcellaz-Albanais",
"Marcé-sur-Esves",
"Marcey-les-Grèves",
"Marchais-Beton",
"Marchais-Béton",
"Marchais-en-Brie",
"Marché-Allouarde",
"Marche-en-Famenne",
"marché-gare",
"marché-gares",
"Marche-les-Dames",
"Marche-lez-Écaussinnes",
"marche-palier",
"Marchéville-en-Woëvre",
"Marchienne-au-Pont",
"Marchiennes-Campagne",
"Marcigny-sous-Thil",
"Marcilhac-sur-Célé",
"Marcillac-la-Croisille",
"Marcillac-la-Croze",
"Marcillac-Lanville",
"Marcillac-Saint-Quentin",
"Marcillac-Vallon",
"Marcillat-en-Combraille",
"Marcillé-la-Ville",
"Marcillé-Raoul",
"Marcillé-Robert",
"Marcilly-d'Azergues",
"Marcilly-en-Bassigny",
"Marcilly-en-Beauce",
"Marcilly-en-Gault",
"Marcilly-en-Villette",
"Marcilly-et-Dracy",
"Marcilly-la-Campagne",
"Marcilly-la-Gueurce",
"Marcilly-le-Châtel",
"Marcilly-le-Hayer",
"Marcilly-le-Pavé",
"Marcilly-lès-Buxy",
"Marcilly-lès-Vitteaux",
"Marcilly-Ogny",
"Marcilly-sur-Eure",
"Marcilly-sur-Maulne",
"Marcilly-sur-Seine",
"Marcilly-sur-Tille",
"Marcilly-sur-Vienne",
"Marc-la-Tour",
"Marcols-les-Eaux",
"marco-lucanien",
"marco-lucanienne",
"marco-lucaniennes",
"marco-lucaniens",
"Marcq-en-Barœul",
"Marcq-en-Ostrevent",
"Marcq-et-Chevières",
"Marcy-l'Etoile",
"Marcy-l'Étoile",
"Marcy-sous-Marle",
"Mareau-aux-Bois",
"Mareau-aux-Prés",
"maréchal-ferrant",
"maréchaux-ferrans",
"maréchaux-ferrants",
"Mareil-en-Champagne",
"Mareil-en-France",
"Mareil-le-Guyon",
"Mareil-Marly",
"Mareil-sur-Loir",
"Mareil-sur-Mauldre",
"Maren-Kessel",
"Maresquel-Ecquemicourt",
"Marest-Dampcourt",
"Marest-sur-Matz",
"Mareuil-Caubert",
"Mareuil-en-Brie",
"Mareuil-en-Dôle",
"Mareuil-la-Motte",
"Mareuil-le-Port",
"Mareuil-lès-Meaux",
"Mareuil-sur-Arnon",
"Mareuil-sur-Ay",
"Mareuil-sur-Cher",
"Mareuil-sur-Lay",
"Mareuil-sur-Lay-Dissais",
"Mareuil-sur-Ourcq",
"Marey-lès-Fussey",
"Marey-sur-Tille",
"margarino-sulfurique",
"Margaux-Cantenac",
"Margerie-Chantagret",
"Margerie-Hancourt",
"margis-chef",
"margis-chefs",
"Margny-aux-Cerises",
"Margny-lès-Compiègne",
"Margny-sur-Matz",
"Margouët-Meymes",
"mariage-sacrement",
"Maria-Hoop",
"Marie-Ange",
"Marie-Antoinette",
"Marie-blanque",
"marie-chantal",
"Marie-Chantal",
"marie-chantalerie",
"marie-chantaleries",
"Marie-Christine",
"Marie-Claire",
"Marie-Claude",
"marie-couche-toi-là",
"Marie-couche-toi-là",
"Marie-Crochet",
"Marie-Élise",
"Marie-Ève",
"Marie-France",
"Marie-Françoise",
"marie-galante",
"Marie-Galante",
"marie-galantes",
"Marie-Gisèle",
"Marie-Hélène",
"marie-jeanne",
"marie-jeannes",
"Marie-José",
"Marie-Laure",
"marie-louise",
"Marie-Louise",
"marie-louises",
"Marie-Madeleine",
"Marie-Marc",
"marie-monastérien",
"Marie-Monastérien",
"marie-monastérienne",
"Marie-Monastérienne",
"marie-monastériennes",
"Marie-Monastériennes",
"marie-monastériens",
"Marie-Monastériens",
"marie-montois",
"Marie-Montois",
"marie-montoise",
"Marie-Montoise",
"marie-montoises",
"Marie-Montoises",
"Marie-Noëlle",
"Marie-Paule",
"Marie-Pier",
"Marie-Pierre",
"marie-salope",
"maries-salopes",
"Marie-Thérèse",
"marie-trintigner",
"Marignac-en-Diois",
"Marignac-Lasclares",
"Marignac-Laspeyres",
"Marigna-sur-Valouse",
"Marigné-Laillé",
"Marigné-Peuton",
"Marigny-Brizay",
"Marigny-Chemereau",
"Marigny-en-Orxois",
"Marigny-le-Cahouët",
"Marigny-le-Châtel",
"Marigny-l'Eglise",
"Marigny-l'Église",
"Marigny-Le-Lozon",
"Marigny-lès-Reullée",
"Marigny-les-Usages",
"Marigny-Marmande",
"Marigny-Saint-Marcel",
"Marigny-sur-Yonne",
"Marillac-le-Franc",
"Marimont-lès-Bénestroff",
"Maring-Noviand",
"marin-pêcheur",
"marins-pêcheurs",
"Marizy-Sainte-Geneviève",
"Marizy-Saint-Mard",
"marka-dafing",
"Markina-Xemein",
"Marles-en-Brie",
"Marles-les-Mines",
"Marles-sur-Canche",
"Marly-Gomont",
"Marly-la-Ville",
"Marly-le-Roi",
"Marly-sous-Issy",
"Marly-sur-Arroux",
"Marmont-Pachas",
"Marnay-sur-Marne",
"Marnay-sur-Seine",
"Marnes-la-Coquette",
"Marnhagues-et-Latour",
"marno-bitumineux",
"marno-calcaire",
"marno-calcaires",
"Marolles-en-Beauce",
"Marolles-en-Brie",
"Marolles-en-Hurepoix",
"Marolles-lès-Bailly",
"Marolles-les-Braults",
"Marolles-les-Buis",
"Marolles-lès-Saint-Calais",
"Marolles-sous-Lignières",
"Marolles-sur-Seine",
"Marqueny-au-Vallage",
"marque-ombrelle",
"marque-page",
"marque-pagé",
"marque-pagea",
"marque-pageai",
"marque-pageaient",
"marque-pageais",
"marque-pageait",
"marque-pageâmes",
"marque-pageant",
"marque-pageas",
"marque-pageasse",
"marque-pageassent",
"marque-pageasses",
"marque-pageassiez",
"marque-pageassions",
"marque-pageât",
"marque-pageâtes",
"marque-pagée",
"marque-pagées",
"marque-pagent",
"marque-pageons",
"marque-pager",
"marque-pagera",
"marque-pagerai",
"marque-pageraient",
"marque-pagerais",
"marque-pagerait",
"marque-pageras",
"marque-pagèrent",
"marque-pagerez",
"marque-pageriez",
"marque-pagerions",
"marque-pagerons",
"marque-pageront",
"marque-pages",
"marque-pagés",
"marque-pagez",
"marque-pagiez",
"marque-pagions",
"marque-produit",
"marque-produits",
"marques-ombrelles",
"Marquette-en-Ostrevant",
"Marquette-lez-Lille",
"Marsac-en-Livradois",
"Marsac-sur-Don",
"Marsac-sur-l'Isle",
"Marsais-Sainte-Radégonde",
"Marsannay-la-Côte",
"Marsannay-le-Bois",
"Marseille-en-Beauvaisis",
"Marseille-lès-Aubigny",
"Marseilles-lès-Aubigny",
"Mars-la-Tour",
"Marson-sur-Barboure",
"Marssac-sur-Tarn",
"Mars-sous-Bourcq",
"Mars-sur-Allier",
"Martailly-lès-Brancion",
"Martainville-Epreville",
"Martainville-Épreville",
"marteau-de-mer",
"marteau-pilon",
"marteau-piqueur",
"marteaux-pilons",
"marteaux-piqueurs",
"marte-piquant",
"marte-piquants",
"Martignas-sur-Jalle",
"Martigné-Briand",
"Martigné-Ferchaud",
"Martigné-sur-Mayenne",
"Martigny-Combe",
"Martigny-Courpierre",
"Martigny-le-Comte",
"Martigny-les-Bains",
"Martigny-les-Gerbonvaux",
"Martigny-sur-l'Ante",
"martin-bâton",
"Martin-bâton",
"martin-bâtons",
"Martin-bâtons",
"martin-chasseur",
"Martincourt-sur-Meuse",
"Martin-Eglise",
"Martin-Église",
"martin-pêcheur",
"martins-chasseurs",
"martin-sec",
"martin-sire",
"martins-pêcheurs",
"martins-sires",
"martins-sucrés",
"martin-sucré",
"Martouzin-Neuville",
"Martres-d'Artières",
"Martres-de-Rivière",
"Martres-sur-Morge",
"Martres-Tolosane",
"martres-zibelines",
"martre-zibeline",
"Maruéjols-lès-Gardon",
"Maruri-Jatabe",
"Marvaux-Vieux",
"Marville-les-Bois",
"Marville-Moutiers-Brûlé",
"marxisme-léninisme",
"marxiste-léniniste",
"marxistes-léninistes",
"Mary-sur-Marne",
"m'as",
"masa'il",
"masa'ils",
"Masbaraud-Mérignat",
"Mas-Blanc",
"Mas-Blanc-des-Alpilles",
"Mas-Cabardès",
"Mascaraàs-Haron",
"mas-chélyen",
"Mas-Chélyen",
"mas-chélyenne",
"Mas-Chélyenne",
"mas-chélyennes",
"Mas-Chélyennes",
"mas-chélyens",
"Mas-Chélyens",
"Mas-d'Auvignon",
"Mas-de-Londres",
"Mas-des-Cours",
"Mas-d'Orcières",
"Masevaux-Niederbruck",
"Mas-Grenier",
"Masnuy-Saint-Jean",
"Masnuy-Saint-Pierre",
"Maspie-Lalonquère-Juillacq",
"Massa-Carrara",
"Massac-Séran",
"Mas-Saint-Chély",
"Mas-Saintes-Puelles",
"Massen-Niederlausitz",
"masseur-kinésithérapeute",
"masseurs-kinésithérapeutes",
"masseuse-kinésithérapeute",
"masseuses-kinésithérapeutes",
"Massignieu-de-Rives",
"Massillargues-Attuech",
"Massingy-lès-Semur",
"Massingy-lès-Vitteaux",
"mass-média",
"mass-médias",
"mas-tençois",
"Mas-Tençois",
"mas-tençoise",
"Mas-Tençoise",
"mas-tençoises",
"Mas-Tençoises",
"m'as-tu-vu",
"m'as-tu-vue",
"m'as-tu-vues",
"m'as-tu-vus",
"Matafelon-Granges",
"Matagne-la-Grande",
"Matagne-la-Petite",
"materno-infantile",
"materno-infantiles",
"mathématico-informatique",
"mathématico-informatiques",
"Matignicourt-Goncourt",
"matthéo-lucanien",
"matthéo-lucanienne",
"matthéo-lucaniennes",
"matthéo-lucaniens",
"Matton-et-Clémency",
"Matzlow-Garwitz",
"Maubert-Fontaine",
"Maucourt-sur-Orne",
"Maudétour-en-Vexin",
"Mauges-sur-Loire",
"Mauléon-Barousse",
"Mauléon-d'Armagnac",
"Mauléon-Licharre",
"Maulévrier-Sainte-Gertrude",
"Maumusson-Laguian",
"Maupertus-sur-Mer",
"Maure-de-Bretagne",
"Mauregny-en-Haye",
"Maureilhan-et-Raméjean",
"Maureillas-las-Illas",
"Maurens-Scopont",
"mauritano-marocain",
"mauritano-sénégalais",
"Maurupt-le-Montois",
"Maussane-les-Alpilles",
"Mauves-sur-Huisne",
"Mauves-sur-Loire",
"Mauvezin-d'Armagnac",
"Mauvezin-de-Prat",
"Mauvezin-de-Sainte-Croix",
"Mauvezin-sur-Gupie",
"Mauzac-et-Grand-Castang",
"Mauzens-et-Miremont",
"Mauzé-sur-le-Mignon",
"Mauzé-Thouarsais",
"Mavilly-Mandelot",
"Mawashi-geri",
"Maxey-sur-Meuse",
"Maxey-sur-Vaise",
"Maxhütte-Haidhof",
"maxillo-dentaire",
"maxillo-facial",
"maxillo-labial",
"maxillo-musculaire",
"Maxilly-sur-Léman",
"Maxilly-sur-Saône",
"Mayence-Bingen",
"Mayen-Coblence",
"May-en-Multien",
"Mayres-Savel",
"Mayrinhac-Lentour",
"May-sur-Orne",
"Mazan-l'Abbaye",
"Mazé-Milon",
"Mazerat-Aurouze",
"Mazères-de-Neste",
"Mazères-Lezons",
"Mazères-sur-Salat",
"Mazerolles-du-Razès",
"Mazerolles-le-Salin",
"Mazet-Saint-Voy",
"Mazeyrat-Aurouze",
"Mazeyrat-d'Allier",
"Mazières-de-Touraine",
"Mazières-en-Gâtine",
"Mazières-en-Mauges",
"Mazières-Naresse",
"Mazières-sur-Béronne",
"Mbanza-Ngungu",
"m'bororo",
"McDonald's",
"m-commerce",
"m'demma",
"mea-culpa",
"meâ-culpâ",
"Meaulne-Vitray",
"Meaux-la-Montagne",
"Mechelen-aan-de-Maas",
"Mecklembourg-du-Nord-Ouest",
"Mecklembourg-Poméranie-Occidentale",
"Mecklembourg-Strelitz",
"mécoprop-P",
"médecine-ball",
"médecine-balls",
"médiévale-fantastique",
"médiévales-fantastiques",
"médiéval-fantastique",
"médiévaux-fantastiques",
"Medina-Sidonia",
"médio-dorsal",
"médio-européen",
"médio-européenne",
"médio-européennes",
"médio-européens",
"médio-jurassique",
"médio-jurassiques",
"médio-latin",
"médio-latine",
"médio-latines",
"médio-latins",
"médio-océanique",
"médio-océaniques",
"méduse-boite",
"méduse-boîte",
"méduses-boites",
"méduses-boîtes",
"Meensel-Kiezegem",
"Meerlo-Wanssum",
"Meeuwen-Gruitrode",
"méfenpyr-diéthyl",
"méga-ampère",
"méga-ampères",
"méga-église",
"méga-églises",
"méga-électron-volt",
"mégaélectron-volt",
"méga-électron-volts",
"mégaélectron-volts",
"méga-herbivore",
"méga-herbivores",
"mégalo-martyr",
"mégalo-martyrs",
"méga-océan",
"méga-océans",
"méga-ohm",
"méga-ohms",
"mégléno-roumain",
"Mehun-sur-Yèvre",
"Meigné-le-Vicomte",
"Meilhan-sur-Garonne",
"Meillier-Fontaine",
"Meilly-sur-Rouvres",
"Meix-devant-Virton",
"Meix-le-Tige",
"Méjannes-le-Clap",
"Méjannes-lès-Alès",
"mêlé-cass",
"mêlé-casse",
"mêlé-casses",
"mêlé-cassis",
"mele-fila",
"mêle-tout",
"Méligny-le-Grand",
"Méligny-le-Petit",
"méli-mélo",
"mêli-mêlo",
"mélis-mélos",
"mêlis-mêlos",
"Mellenbach-Glasbach",
"Melleray-la-Vallée",
"Melun-Sénart",
"Melz-sur-Seine",
"membrano-calcaire",
"Ménestérol-Montignac",
"Ménestreau-en-Villette",
"Menetou-Couture",
"Menetou-Râtel",
"Menetou-Salon",
"Menetou-sur-Nahon",
"Ménétréol-sous-Sancerre",
"Ménétréols-sous-Vatan",
"Ménétréol-sur-Sauldre",
"Ménétreux-le-Pitois",
"Menétru-le-Vignoble",
"Menétrux-en-Joux",
"m'enfin",
"Mengersgereuth-Hämmern",
"Ménil-Annelles",
"ménil-annellois",
"Ménil-Annellois",
"ménil-annelloise",
"Ménil-Annelloise",
"ménil-annelloises",
"Ménil-Annelloises",
"Ménil-aux-Bois",
"Ménil-de-Senones",
"Ménil-en-Xaintois",
"Ménil-Erreux",
"Ménil-Froger",
"Ménil-Gondouin",
"ménil-gondoyen",
"Ménil-Gondoyen",
"ménil-gondoyenne",
"Ménil-Gondoyenne",
"ménil-gondoyennes",
"Ménil-Gondoyennes",
"ménil-gondoyens",
"Ménil-Gondoyens",
"Ménil-Hermei",
"Ménil-Hubert-en-Exmes",
"Ménil-Hubert-sur-Orne",
"Ménil-Jean",
"Ménil-la-Horgne",
"Ménil-la-Tour",
"Ménil-Lépinois",
"Ménil'muche",
"Ménil-sur-Belvitte",
"Ménil-sur-Saulx",
"Ménil-Vin",
"méningo-encéphalite",
"méningo-gastrique",
"méningo-gastriques",
"Mennetou-sur-Cher",
"menthe-coq",
"Menthonnex-en-Bornes",
"Menthonnex-sous-Clermont",
"Menthon-Saint-Bernard",
"Mentque-Nortbécourt",
"menuisier-moulurier",
"Méolans-Revel",
"Méounes-lès-Montrieux",
"mépiquat-chlorure",
"Merbes-le-Château",
"Merbes-Sainte-Marie",
"Mercey-le-Grand",
"Mercey-sur-Saône",
"Mercin-et-Vaux",
"Merck-Saint-Liévin",
"Mercurol-Veaunes",
"mercuroso-mercurique",
"Mercury-Gémilly",
"Mercus-Garrabet",
"Mercy-le-Bas",
"Mercy-le-Haut",
"mère-grand",
"Mérens-les-Vals",
"mères-grand",
"Mérey-sous-Montrond",
"Mérey-Vieilley",
"Méricourt-en-Vimeu",
"Méricourt-l'Abbé",
"Méricourt-sur-Somme",
"mérier-blanc",
"mériers-blancs",
"Mérindol-les-Oliviers",
"merisier-pays",
"merisiers-pays",
"Merkers-Kieselbach",
"Merkwiller-Pechelbronn",
"Merle-Leignec",
"Merles-sur-Loison",
"Merlieux-et-Fouquerolles",
"Meroux-Moval",
"Merrey-sur-Arce",
"Merry-la-Vallée",
"Merry-Sec",
"Merry-sur-Yonne",
"Mers-les-Bains",
"Mers-sur-Indre",
"Merville-au-Bois",
"Merville-Franceville-Plage",
"Méry-Bissières-en-Auge",
"Méry-Corbon",
"Méry-ès-Bois",
"Méry-la-Bataille",
"Méry-Prémecy",
"Méry-sur-Cher",
"Méry-sur-Marne",
"Méry-sur-Oise",
"Méry-sur-Seine",
"Merzig-Wadern",
"Mesbrecourt-Richecourt",
"Meschers-sur-Gironde",
"Meslay-du-Maine",
"Meslay-le-Grenet",
"Meslay-le-Vidame",
"Meslin-l'Évêque",
"Mesnard-la-Barotière",
"Mesnières-en-Bray",
"Mesnils-sur-Iton",
"méso-américain",
"méso-américaine",
"méso-américaines",
"méso-américains",
"Méso-Amérique",
"méso-diastolique",
"méso-diastoliques",
"méso-hygrophile",
"méso-hygrophiles",
"mésosulfuron-méthyl-sodium",
"méso-systolique",
"méso-systoliques",
"Messey-sur-Grosne",
"Messia-sur-Sorne",
"Messigny-et-Vantoux",
"Messimy-sur-Saône",
"Mesves-sur-Loire",
"métacarpo-phalangien",
"Métairies-Saint-Quirin",
"métalaxyl-M",
"métam-sodium",
"métaphysico-théologo-cosmolo-nigologie",
"métaphysico-théologo-cosmolo-nigologies",
"métatarso-phalangien",
"météo-dépendant",
"météo-dépendante",
"météo-dépendantes",
"météo-dépendants",
"méthyl-buténol",
"métirame-zinc",
"mètre-ruban",
"mètres-ruban",
"métro-boulot-dodo",
"mets-en",
"Metz-Campagne",
"Metz-en-Couture",
"Metzerlen-Mariastein",
"Metz-le-Comte",
"Metz-Robert",
"metz-tesseran",
"Metz-Tesseran",
"metz-tesseranne",
"Metz-Tesseranne",
"metz-tesserannes",
"Metz-Tesserannes",
"metz-tesserans",
"Metz-Tesserans",
"Metz-Tessy",
"Metz-Ville",
"Meulan-en-Yvelines",
"Meunet-Planches",
"Meunet-sur-Vatan",
"Meung-sur-Loire",
"meurt-de-faim",
"meurt-de-soif",
"Meurthe-et-Moselle",
"meurt-la-faim",
"Meuselbach-Schwarzmühle",
"meuse-rhin-yssel",
"Mévergnies-lez-Lens",
"Meyrieu-les-Etangs",
"Meyrieu-les-Étangs",
"Meyrieux-Trouet",
"Meyrignac-l'Eglise",
"Meyrignac-l'Église",
"Mézidon-Canon",
"Mézières-au-Perche",
"Mézières-en-Brenne",
"Mézières-en-Drouais",
"Mézières-en-Gâtinais",
"Mézières-en-Santerre",
"Mézières-en-Vexin",
"Mézières-lez-Cléry",
"Mézières-sous-Lavardin",
"Mézières-sur-Couesnon",
"Mézières-sur-Issoire",
"Mézières-sur-Oise",
"Mézières-sur-Ponthouin",
"Mézières-sur-Seine",
"Mézy-Moulins",
"Mézy-sur-Seine",
"mezzo-soprano",
"mezzo-sopranos",
"mezzo-termine",
"mezzo-tinto",
"Mezzovico-Vira",
"m'halla",
"m'hallas",
"miam-miam",
"miaou-miaou",
"Michel-Ange",
"michel-angélesque",
"michel-angélesques",
"Michelbach-le-Bas",
"Michelbach-le-Haut",
"microélectron-volt",
"microélectron-volts",
"Midden-Delfland",
"Midden-Drenthe",
"Midden-Eierland",
"midi-chlorien",
"midi-chloriens",
"midi-pelle",
"midi-pelles",
"midi-pyrénéen",
"Midi-Pyrénéen",
"Midi-Pyrénéens",
"Midi-Pyrénées",
"Midsland-Noord",
"Mielen-boven-Aalst",
"Mierlo-Hout",
"mieux-disant",
"mieux-disante",
"mieux-disantes",
"mieux-disants",
"mieux-être",
"Mignaloux-Beauvoir",
"Migné-Auxances",
"Milhac-d'Auberoche",
"Milhac-de-Nontron",
"militaro-bureaucratique",
"militaro-bureaucratiques",
"militaro-industriel",
"militaro-industrielle",
"militaro-industrielles",
"militaro-industriels",
"Milizac-Guipronvel",
"milk-bar",
"milk-bars",
"milk-shake",
"milk-shakes",
"mille-au-godet",
"mille-canton",
"mille-feuille",
"mille-feuilles",
"mille-fleurs",
"Mille-Islois",
"Millencourt-en-Ponthieu",
"mille-pattes",
"mille-pertuis",
"mille-pieds",
"mille-points",
"milliampère-heure",
"milliampères-heures",
"milli-électron-volt",
"milliélectron-volt",
"milli-électron-volts",
"milliélectron-volts",
"Millienhagen-Oebelitz",
"Millingen-sur-Rhin",
"milli-ohm",
"milli-ohms",
"Milly-la-Forêt",
"Milly-Lamartine",
"Milly-sur-Bradon",
"Milly-sur-Thérain",
"Milon-la-Chapelle",
"mime-acrobate",
"Minaucourt-le-Mesnil-lès-Hurlus",
"Minden-Lübbecke",
"Minho-Lima",
"Miniac-Morvan",
"Miniac-sous-Bécherel",
"Minihy-Tréguier",
"ministre-présidence",
"ministre-présidences",
"ministre-président",
"ministres-présidents",
"Min-jun",
"minn'gotain",
"Minn'Gotain",
"minn'gotaine",
"Minn'Gotaine",
"minn'gotaines",
"Minn'Gotaines",
"minn'gotains",
"Minn'Gotains",
"Min-seo",
"minus-habens",
"minute-lumière",
"minutes-lumière",
"Miossens-Lanusse",
"Miquelon-Langlade",
"Mirabel-aux-Baronnies",
"Mirabel-et-Blacons",
"Miramont-d'Astarac",
"Miramont-de-Comminges",
"Miramont-de-Guyenne",
"Miramont-de-Quercy",
"Miramont-Latour",
"Miramont-Sensacq",
"Mirandol-Bourgnounac",
"Miraval-Cabardes",
"Mirebeau-sur-Bèze",
"mire-oeuf",
"mire-œuf",
"mire-oeufs",
"mire-œufs",
"Mirepoix-sur-Tarn",
"Mireval-Lauragais",
"Miribel-Lanchâtre",
"Miribel-les-Echelles",
"Miribel-les-Échelles",
"miro-miro",
"Miserey-Salines",
"Misery-Courtion",
"Missen-Wilhams",
"Missy-aux-Bois",
"Missy-lès-Pierrepont",
"Missy-sur-Aisne",
"Misy-sur-Yonne",
"Mitry-Mory",
"Mittainvilliers-Vérigny",
"mixed-border",
"mixti-unibinaire",
"m'kahla",
"m'kahlas",
"mobil-home",
"mobil-homes",
"Moca-Croce",
"modèle-vue-contrôleur",
"modern-style",
"Moëlan-sur-Mer",
"Mœurs-Verdey",
"Moffans-et-Vacheresse",
"mofu-gudur",
"Moidieu-Détourbe",
"Moigny-sur-Ecole",
"Moigny-sur-École",
"moi-même",
"moins-disant",
"moins-disants",
"moins-que-rien",
"moins-value",
"moins-values",
"Moinville-la-Jeulin",
"Moirans-en-Montagne",
"Moirey-Flabas-Crépion",
"Moisdon-la-Rivière",
"mois-homme",
"mois-hommes",
"mois-lumière",
"Moissac-Bellevue",
"Moissac-Vallée-Française",
"Moissieu-sur-Dolon",
"moissonner-battre",
"moissonneuse-batteuse",
"moissonneuse-lieuse",
"moissonneuses-batteuses",
"moissonneuses-lieuses",
"Moissy-Cramayel",
"Moissy-Moulinot",
"moite-moite",
"moitié-moitié",
"Moitron-sur-Sarthe",
"mojeño-ignaciano",
"mojeño-javierano",
"mojeño-loretano",
"mojeño-trinitario",
"Molenbeek-Saint-Jean",
"Molenbeek-Wersbeek",
"Molières-Cavaillac",
"Molières-Glandaz",
"Molières-sur-Cèze",
"Molières-sur-l'Alberte",
"Moliets-et-Maa",
"Molines-en-Queyras",
"Molins-sur-Aube",
"Molitg-les-Bains",
"Mollans-sur-Ouvèze",
"Molliens-au-Bois",
"Molliens-Dreuil",
"mollo-mollo",
"moment-clé",
"moment-clés",
"moments-clés",
"Monacia-d'Aullène",
"Monacia-d'Orezza",
"Monassut-Audiracq",
"Moncayolle-Larrory-Mendibieu",
"Monceau-en-Ardenne",
"Monceau-Imbrechies",
"Monceau-le-Neuf-et-Faucouzy",
"Monceau-lès-Leups",
"Monceau-le-Waast",
"Monceau-Saint-Waast",
"Monceau-sur-Oise",
"Monceau-sur-Sambre",
"Monceaux-au-Perche",
"Monceaux-en-Bessin",
"Monceaux-l'Abbaye",
"Monceaux-le-Comte",
"Monceaux-sur-Dordogne",
"Moncé-en-Belin",
"Moncé-en-Saosnois",
"Moncel-lès-Lunéville",
"Moncel-sur-Seille",
"Moncel-sur-Vair",
"Moncetz-l'Abbaye",
"Moncetz-Longevas",
"Monchaux-Soreng",
"Monchaux-sur-Ecaillon",
"Monchaux-sur-Écaillon",
"Moncheaux-lès-Frévent",
"Monchel-sur-Canche",
"Mönchpfiffel-Nikolausrieth",
"Monchy-au-Bois",
"Monchy-Breton",
"Monchy-Cayeux",
"Monchy-Humières",
"Monchy-Lagache",
"Monchy-le-Preux",
"Monchy-Saint-Eloi",
"Monchy-Saint-Éloi",
"Monchy-sur-Eu",
"Monclar-de-Quercy",
"Monclar-sur-Losse",
"Moncorneil-Grazan",
"Mondariz-Balneario",
"Mondement-Montgivroux",
"Mondonville-Saint-Jean",
"Mondorf-les-Bains",
"Monestier-d'Ambel",
"Monestier-de-Clermont",
"Monestier-Merlines",
"Monestier-Port-Dieu",
"Monétay-sur-Allier",
"Monétay-sur-Loire",
"Monêtier-Allemont",
"Monferran-Plavès",
"Monferran-Savès",
"Monflorite-Lascasas",
"Mongolie-intérieure",
"Monguelfo-Tesido",
"Monistrol-d'Allier",
"Monistrol-sur-Loire",
"Monlaur-Bernet",
"Monléon-Magnoac",
"Monlezun-d'Armagnac",
"monnaie-du-pape",
"Monnetier-Mornex",
"Monnet-la-Ville",
"Monsempron-Libos",
"monsieur-dame",
"Monsteroux-Milieu",
"Montacher-Villegardin",
"Montagnac-d'Auberoche",
"Montagnac-la-Crempse",
"Montagnac-Montpezat",
"Montagnac-sur-Auvignon",
"Montagnac-sur-Lède",
"Montagna-le-Reconduit",
"Montagna-le-Templier",
"Montagne-Fayel",
"Montagney-Servigney",
"Montagny-en-Vexin",
"Montagny-lès-Beaune",
"Montagny-lès-Buxy",
"Montagny-les-Lanches",
"Montagny-lès-Seurre",
"Montagny-près-Louhans",
"Montagny-près-Yverdon",
"Montagny-Sainte-Félicité",
"Montagny-sur-Grosne",
"Montaignac-Saint-Hippolyte",
"Montaigu-de-Quercy",
"Montaiguët-en-Forez",
"Montaigu-la-Brisette",
"Montaigu-le-Blin",
"Montaigu-les-Bois",
"Montaigut-le-Blanc",
"Montaigut-sur-Save",
"Montaigu-Zichem",
"Montalba-d'Amélie",
"Montalba-le-Château",
"Montalet-le-Bois",
"Montalieu-Vercieu",
"Montambert-Tannay",
"Montaren-et-Saint-Médiers",
"Montarlot-lès-Champlitte",
"Montarlot-lès-Rioz",
"Montastruc-de-Salies",
"Montastruc-la-Conseillère",
"Montastruc-Savès",
"Montauban-de-Bretagne",
"Montauban-de-Luchon",
"Montauban-de-Picardie",
"Montauban-sur-l'Ouvèze",
"Montaut-les-Créneaux",
"Montbonnot-Saint-Martin",
"Montboucher-sur-Jabron",
"Montbrison-sur-Lez",
"Montbrun-Bocage",
"Montbrun-des-Corbières",
"Montbrun-Lauragais",
"Montbrun-les-Bains",
"Montceau-et-Echarnant",
"Montceau-et-Écharnant",
"Montceau-les-Mines",
"Montceaux-lès-Meaux",
"Montceaux-lès-Provins",
"Montceaux-lès-Vaudes",
"Montceaux-l'Etoile",
"Montceaux-l'Étoile",
"Montceaux-Ragny",
"Montchanin-les-Mines",
"Montclar-de-Comminges",
"Montclar-Lauragais",
"Montclar-sur-Gervanne",
"Montcombroux-les-Mines",
"Montcornet-en-Ardenne",
"Montcourt-Fromonville",
"Montcuq-en-Quercy-Blanc",
"Montcy-Notre-Dame",
"Montcy-Saint-Pierre",
"monte-au-ciel",
"Monte-Carlo",
"monte-charge",
"monte-charges",
"monte-courroie",
"monte-courroies",
"monte-en-l'air",
"monte-escalier",
"monte-escaliers",
"Montégut-Arros",
"Montégut-Bourjac",
"Montégut-en-Couserans",
"Montégut-Lauragais",
"Montégut-Plantaurel",
"Montégut-Savès",
"Monteignet-sur-l'Andelot",
"monte-jus",
"monte-lait",
"Montel-de-Gelat",
"monte-meuble",
"monte-meubles",
"Montemor-o-Novo",
"Montemor-o-Velho",
"monte-pente",
"monte-pentes",
"monte-plat",
"monte-plats",
"Montereau-Fault-Yonne",
"Montereau-faut-Yonne",
"Montereau-sur-le-Jard",
"Montescourt-Lizerolles",
"Montesquieu-Avantès",
"Montesquieu-des-Albères",
"Montesquieu-Guittaut",
"Montesquieu-Lauragais",
"Montesquieu-Volvestre",
"Montestruc-sur-Gers",
"Montet-et-Bouxal",
"Montfaucon-d'Argonne",
"Montfaucon-en-Velay",
"Montfaucon-Montigné",
"Montferrand-du-Périgord",
"Montferrand-la-Fare",
"Montferrand-le-Château",
"Montferrier-sur-Lez",
"Montfort-en-Chalosse",
"Montfort-l'Amaury",
"Montfort-le-Gesnois",
"Montfort-le-Rotrou",
"Montfort-sur-Argens",
"Montfort-sur-Boulzane",
"Montfort-sur-Meu",
"Montfort-sur-Risle",
"Montgaillard-de-Salies",
"Montgaillard-Lauragais",
"Montgaillard-sur-Save",
"Montgé-en-Goële",
"Montgru-Saint-Hilaire",
"Monthou-sur-Bièvre",
"Monthou-sur-Cher",
"Monthureux-le-Sec",
"Monthureux-sur-Saône",
"monti-corcellois",
"Monti-Corcellois",
"monti-corcelloise",
"Monti-Corcelloise",
"monti-corcelloises",
"Monti-Corcelloises",
"Montier-en-Der",
"Montier-en-l'Isle",
"Montiers-sur-Saulx",
"Monties-Aussos",
"Montignac-Charente",
"Montignac-de-Lauzun",
"Montignac-le-Coq",
"Montignac-Toupinerie",
"Montigné-le-Brillant",
"Montigné-lès-Rairies",
"Montigné-sur-Moine",
"Montignies-lez-Lens",
"Montignies-Saint-Christophe",
"Montignies-sur-Roc",
"Montignies-sur-Sambre",
"Montigny-aux-Amognes",
"Montigny-devant-Sassey",
"Montigny-en-Arrouaise",
"Montigny-en-Cambrésis",
"Montigny-en-Gohelle",
"Montigny-en-Morvan",
"Montigny-en-Ostrevent",
"Montigny-l'Allier",
"Montigny-la-Resle",
"Montigny-le-Bretonneux",
"Montigny-le-Chartif",
"Montigny-le-Franc",
"Montigny-le-Gannelon",
"Montigny-le-Guesdier",
"Montigny-Lencoup",
"Montigny-Lengrain",
"Montigny-lès-Arsures",
"Montigny-lès-Cherlieu",
"Montigny-lès-Condé",
"Montigny-lès-Cormeilles",
"Montigny-les-Jongleurs",
"Montigny-lès-Metz",
"Montigny-les-Monts",
"Montigny-lès-Vaucouleurs",
"Montigny-lès-Vesoul",
"Montigny-le-Teigneux",
"Montigny-le-Tilleul",
"Montigny-Montfort",
"Montigny-Mornay-Villeneuve-sur-Vingeanne",
"Montigny-Saint-Barthélemy",
"Montigny-sous-Marle",
"Montigny-sur-Armançon",
"Montigny-sur-Aube",
"Montigny-sur-Avre",
"Montigny-sur-Canne",
"Montigny-sur-Chiers",
"Montigny-sur-Crécy",
"Montigny-sur-l'Ain",
"Montigny-sur-l'Hallue",
"Montigny-sur-Loing",
"Montigny-sur-Meuse",
"Montigny-sur-Vence",
"Montigny-sur-Vesle",
"Montilly-sur-Noireau",
"montis-fagussin",
"Montis-Fagussin",
"montis-fagussine",
"Montis-Fagussine",
"montis-fagussines",
"Montis-Fagussines",
"montis-fagussins",
"Montis-Fagussins",
"Montjean-sur-Loire",
"Montjoie-en-Couserans",
"Montjoie-le-Château",
"Montjoie-Saint-Martin",
"Montjustin-et-Velotte",
"Montlaur-en-Diois",
"Montlay-en-Auxois",
"Montlieu-la-Garde",
"Montliot-et-Courcelles",
"Montlouis-sur-Loire",
"Montmartin-en-Graignes",
"Montmartin-le-Haut",
"Montmartin-sur-Mer",
"Montmaur-en-Diois",
"Montmelas-Saint-Sorlin",
"Montmerle-sur-Saône",
"Montmirey-la-Ville",
"Montmirey-le-Château",
"Montmoreau-Saint-Cybard",
"Montmorency-Beaufort",
"Montmort-Lucy",
"Montoir-de-Bretagne",
"Montoire-sur-le-Loir",
"Montois-la-Montagne",
"Montot-sur-Rognon",
"Montoulieu-Saint-Bernard",
"Montoy-Flanville",
"Montpellier-de-Médillan",
"Montpezat-de-Quercy",
"Montpezat-sous-Bauzon",
"Montpon-Ménestérol",
"Montpont-en-Bresse",
"Montréal-la-Cluse",
"Montréal-les-Sources",
"montréalo-centrisme",
"montre-bracelet",
"montre-chronomètre",
"Montredon-des-Corbières",
"Montredon-Labessonnié",
"montres-bracelets",
"montres-chronomètres",
"Montreuil-au-Houlme",
"Montreuil-aux-Lions",
"Montreuil-Bellay",
"Montreuil-Bonnin",
"Montreuil-des-Landes",
"Montreuil-en-Auge",
"Montreuil-en-Caux",
"Montreuil-en-Touraine",
"Montreuil-Juigné",
"Montreuil-la-Cambe",
"Montreuil-l'Argillé",
"Montreuil-le-Chétif",
"Montreuil-le-Gast",
"Montreuil-le-Henri",
"Montreuil-Poulay",
"Montreuil-sous-Bois",
"Montreuil-sous-Pérouse",
"Montreuil-sur-Barse",
"Montreuil-sur-Blaise",
"Montreuil-sur-Brêche",
"Montreuil-sur-Epte",
"Montreuil-sur-Ille",
"Montreuil-sur-Loir",
"Montreuil-sur-Lozon",
"Montreuil-sur-Maine",
"Montreuil-sur-Thérain",
"Montreuil-sur-Thonnance",
"Montreux-Château",
"Montreux-Jeune",
"Montreux-Vieux",
"Montrevault-sur-Èvre",
"Montrevel-en-Bresse",
"Montrichard-Val-de-Cher",
"Montricher-Albanne",
"Montrieux-en-Sologne",
"Montrœul-au-Bois",
"Montrœul-sur-Haine",
"Montrol-Sénard",
"Montrond-le-Château",
"Montrond-les-Bains",
"Montsauche-les-Settons",
"Montsecret-Clairefougère",
"Montségur-sur-Lauzon",
"Montsinéry-Tonnegrande",
"Montureux-et-Prantigny",
"Montureux-lès-Baulay",
"Montval-sur-Loir",
"Moon-sur-Elle",
"Moorea-Maiao",
"Moor-Rolofshagen",
"moque-dieu",
"Morainville-Jouveaux",
"Morainville-près-Lieurey",
"Morannes-sur-Sarthe",
"Moras-en-Valloire",
"mords-cheval",
"Mörel-Filet",
"Morêtel-de-Mailles",
"Moret-sur-Loing",
"Morey-Saint-Denis",
"Mörfelden-Walldorf",
"Morgenröthe-Rautenkranz",
"Morgny-en-Thiérache",
"Morgny-la-Pommeraye",
"Morières-lès-Avignon",
"Morigny-Champigny",
"Möriken-Wildegg",
"Morlanwelz-Mariemont",
"Morlhon-le-Haut",
"Mormant-sur-Vernisson",
"Mornac-sur-Seudre",
"Mornand-en-Forez",
"Mornay-Berry",
"Mornay-sur-Allier",
"Morne-à-l'Eau",
"morphine-base",
"Morsang-sur-Orge",
"Morsang-sur-Seine",
"Morsbronn-les-Bains",
"Morschwiller-le-Bas",
"Mortagne-au-Perche",
"Mortagne-du-Nord",
"Mortagne-sur-Gironde",
"Mortagne-sur-Sèvre",
"Mortain-Bocage",
"mort-aux-rats",
"mort-bois",
"mort-chien",
"mort-de-chien",
"mort-dieu",
"Morteaux-Couliboeuf",
"Morteaux-Coulibœuf",
"morte-eau",
"Mortefontaine-en-Thelle",
"morte-paye",
"morte-payes",
"Morterolles-sur-Semme",
"morte-saison",
"mortes-eaux",
"Mortes-Frontières",
"mortes-payes",
"mortes-saisons",
"mortes-vivantes",
"morte-vivante",
"mort-né",
"mort-née",
"mort-nées",
"mort-nés",
"mort-plain",
"mort-plains",
"morts-bois",
"morts-chiens",
"morts-flats",
"morts-terrains",
"morts-vivants",
"mort-terrain",
"mort-vivant",
"Morville-en-Beauce",
"Morville-lès-Vic",
"Morvillers-Saint-Saturnin",
"Morville-sur-Andelle",
"Morville-sur-Nied",
"Morville-sur-Seille",
"Mory-Montcrux",
"moteur-fusée",
"moteurs-fusées",
"Motey-Besuche",
"Motey-sur-Saône",
"moto-cross",
"moto-crotte",
"moto-crottes",
"moto-école",
"moto-écoles",
"moto-réducteur",
"moto-réducteurs",
"Mouans-Sartoux",
"mouche-araignée",
"mouche-sans-raison",
"mouche-scorpion",
"mouches-sans-raison",
"mouches-scorpions",
"Mouchy-le-Châtel",
"Mougon-Thorigné",
"mouille-bouche",
"Mouilleron-en-Pareds",
"Mouilleron-le-Captif",
"Mouilleron-Saint-Germain",
"moule-bite",
"moule-burnes",
"moule-fesses",
"moules-burnes",
"Moulès-et-Baucels",
"Moulézan-et-Montagnac",
"Mouliets-et-Villemartin",
"moulin-à-vent",
"Moulin-l'Évêque",
"Moulin-Mage",
"moulin-mageois",
"Moulin-Mageois",
"moulin-mageoise",
"Moulin-Mageoise",
"moulin-mageoises",
"Moulin-Mageoises",
"Moulin-Neuf",
"moulins-à-vent",
"Moulins-Engilbert",
"Moulins-en-Tonnerrois",
"Moulins-la-Marche",
"Moulins-le-Carbonnel",
"Moulins-lès-Metz",
"Moulin-sous-Touvent",
"Moulins-Saint-Hubert",
"Moulins-sous-Fléron",
"Moulins-sur-Céphons",
"Moulins-sur-Orne",
"Moulins-sur-Ouanne",
"Moulins-sur-Yèvre",
"Moulis-en-Médoc",
"Moult-Chicheboville",
"Mounes-Prohencoux",
"Mourioux-Vieilleville",
"Mourmelon-le-Grand",
"Mourmelon-le-Petit",
"Mournans-Charbonny",
"Mouron-sur-Yonne",
"Mours-Saint-Eusèbe",
"Mourvilles-Basses",
"Mourvilles-Hautes",
"Mousseaux-lès-Bray",
"Mousseaux-Neuville",
"Mousseaux-sur-Seine",
"Moussy-le-Neuf",
"Moussy-le-Vieux",
"Moussy-Verneuil",
"Moustier-en-Fagne",
"Moustiers-Sainte-Marie",
"Moustier-Ventadour",
"moustiques-tigres",
"moustique-tigre",
"Moustoir-Ac",
"Moustoir-Remungol",
"Moutaine-Aresches",
"Mouterre-Silly",
"Mouterre-sur-Blourde",
"Mouthier-en-Bresse",
"Mouthier-Haute-Pierre",
"Mouthiers-sur-Boëme",
"Moutier-d'Ahun",
"Moutier-Malcard",
"Moutier-Rozeille",
"Moutiers-au-Perche",
"Moutiers-en-Puisaye",
"Moutiers-les-Mauxfaits",
"Moutiers-Saint-Jean",
"Moutiers-sous-Argenton",
"Moutiers-sous-Chantemerle",
"Moutiers-sur-le-Lay",
"mouton-noirisa",
"mouton-noirisai",
"mouton-noirisaient",
"mouton-noirisais",
"mouton-noirisait",
"mouton-noirisâmes",
"mouton-noirisant",
"mouton-noirisas",
"mouton-noirisasse",
"mouton-noirisassent",
"mouton-noirisasses",
"mouton-noirisassiez",
"mouton-noirisassions",
"mouton-noirisât",
"mouton-noirisâtes",
"mouton-noirise",
"mouton-noirisé",
"mouton-noirisée",
"mouton-noirisées",
"mouton-noirisent",
"mouton-noiriser",
"mouton-noirisera",
"mouton-noiriserai",
"mouton-noiriseraient",
"mouton-noiriserais",
"mouton-noiriserait",
"mouton-noiriseras",
"mouton-noirisèrent",
"mouton-noiriserez",
"mouton-noiriseriez",
"mouton-noiriserions",
"mouton-noiriserons",
"mouton-noiriseront",
"mouton-noirises",
"mouton-noirisés",
"mouton-noirisez",
"mouton-noirisiez",
"mouton-noirisions",
"mouton-noirisons",
"mouve-chaux",
"Moux-en-Morvan",
"Mouy-sur-Seine",
"Mouzeuil-Saint-Martin",
"Mouzieys-Panens",
"Mouzieys-Teulet",
"Moÿ-de-l'Aisne",
"Moyencourt-lès-Poix",
"Moyenne-Franconie",
"moyens-ducs",
"Moyeuvre-Grande",
"Moyeuvre-Petite",
"Mozé-sur-Louet",
"m-paiement",
"m-paiements",
"m'sieur",
"M'Tsangamouji",
"Muad-Dib",
"muco-pus",
"mud-minnow",
"Muespach-le-Haut",
"Muhlbach-sur-Bruche",
"Muhlbach-sur-Munster",
"Mühlhausen-Ehingen",
"Muides-sur-Loire",
"Muille-Villette",
"mule-jenny",
"Mülheim-Kärlich",
"mull-jenny",
"multiplate-forme",
"multiplates-formes",
"mu-métal",
"Mümliswil-Ramiswil",
"Muncq-Nieurlet",
"Muneville-le-Bingard",
"Muneville-sur-Mer",
"Münster-Geschinen",
"Münster-Sarmsheim",
"Murat-le-Quaire",
"Murat-sur-Vèbre",
"Mur-de-Barrez",
"Mûr-de-Bretagne",
"Mur-de-Sologne",
"Muret-et-Crouttes",
"Muret-le-Château",
"murnau-werdenfels",
"mur-rideau",
"Mûrs-Erigné",
"Mûrs-Érigné",
"Murs-et-Gélignieux",
"murs-rideaux",
"Murtin-Bogny",
"Murtin-et-Bogny",
"Murtin-et-le-Châtelet",
"Murviel-lès-Béziers",
"Murviel-lès-Montpellier",
"musculo-cutané",
"musettes-repas",
"music-hall",
"music-hallesque",
"music-hallesques",
"music-halls",
"Mussey-sur-Marne",
"Mussy-la-Fosse",
"Mussy-la-Ville",
"Mussy-sous-Dun",
"Mussy-sur-Seine",
"mu'ugalavyáni",
"n-3",
"N-(4-hydroxyphényl)éthanamide",
"n-6",
"n-9",
"N-acétylcystéine",
"Nachrodt-Wiblingwerde",
"Nadaillac-de-Rouge",
"na-dené",
"na-déné",
"Nagel-Séez-Mesnil",
"Nages-et-Solorgues",
"Nagorno-Karabakh",
"Nagorny-Karabagh",
"Nagorny-Karabakh",
"Nago-Torbole",
"Nahetal-Waldau",
"Nainville-les-Roches",
"n-aire",
"n-aires",
"Naisey-les-Granges",
"Naives-en-Blois",
"Naives-Rosières",
"Naix-aux-Forges",
"name-dropping",
"nam-nam",
"nam-nams",
"Nampcelles-la-Cour",
"Namps-au-Mont",
"Namps-Maisnil",
"Nampteuil-sous-Muret",
"Nanc-lès-Saint-Amour",
"Nançois-le-Grand",
"Nançois-sur-Ornain",
"Nancray-sur-Rimarde",
"Nancy-sur-Cluses",
"Nandin-sur-Aisne",
"nano-ohm",
"nano-ohms",
"Nans-les-Pins",
"Nan-sous-Thil",
"Nans-sous-Sainte-Anne",
"Nanteau-sur-Essonne",
"Nanteau-sur-Lunain",
"Nantes-en-Ratier",
"Nanteuil-Auriac-de-Bourzac",
"Nanteuil-en-Vallée",
"Nanteuil-la-Forêt",
"Nanteuil-la-Fosse",
"Nanteuil-le-Haudouin",
"Nanteuil-lès-Meaux",
"Nanteuil-Notre-Dame",
"Nanteuil-sur-Aisne",
"Nanteuil-sur-Marne",
"Nant-le-Grand",
"Nant-le-Petit",
"naphtoxy-2-acétamide",
"Napoléon-Vendée",
"narco-État",
"narco-États",
"narco-guérilla",
"narco-guérillas",
"narcotico-âcre",
"narco-trafiquant",
"narco-trafiquants",
"naso-génien",
"naso-lobaire",
"naso-lobaires",
"naso-oculaire",
"naso-palatin",
"naso-palpébral",
"naso-sourcilier",
"naso-transversal",
"Nassandres-sur-Risle",
"nat-gadaw",
"nat-gadaws",
"nationale-socialiste",
"nationales-socialistes",
"national-socialisme",
"national-socialiste",
"nationaux-socialistes",
"nat-kadaw",
"nat-kadaws",
"natro-feldspat",
"natro-feldspats",
"natu-majorité",
"Naujac-sur-Mer",
"Naujan-et-Postiac",
"Naussac-Fontanes",
"nautico-estival",
"Navailles-Angos",
"navarro-aragonais",
"navarro-labourdin",
"Nâves-Parmelan",
"navire-citerne",
"navire-école",
"navire-mère",
"navires-citernes",
"navires-écoles",
"navires-mères",
"navire-usine",
"Nay-Bourdettes",
"Nayemont-les-Fosses",
"Nazelles-Négron",
"Naz-Sciaves",
"n-boule",
"n-boules",
"n-butane",
"n-butanes",
"n-butyle",
"n-cube",
"n-cubes",
"N.-D.",
"n'dama",
"n'damas",
"N-déméthyla",
"N-déméthylai",
"N-déméthylaient",
"N-déméthylais",
"N-déméthylait",
"N-déméthylâmes",
"N-déméthylant",
"N-déméthylas",
"N-déméthylasse",
"N-déméthylassent",
"N-déméthylasses",
"N-déméthylassiez",
"N-déméthylassions",
"N-déméthylât",
"N-déméthylâtes",
"N-déméthyle",
"N-déméthylé",
"N-déméthylée",
"N-déméthylées",
"N-déméthylent",
"N-déméthyler",
"N-déméthylera",
"N-déméthylerai",
"N-déméthyleraient",
"N-déméthylerais",
"N-déméthylerait",
"N-déméthyleras",
"N-déméthylèrent",
"N-déméthylerez",
"N-déméthyleriez",
"N-déméthylerions",
"N-déméthylerons",
"N-déméthyleront",
"N-déméthyles",
"N-déméthylés",
"N-déméthylez",
"N-déméthyliez",
"N-déméthylions",
"N-déméthylons",
"n-dimensionnel",
"N'Djamena",
"N'Djaména",
"NDM-1",
"N'dorola",
"Néant-sur-Yvel",
"Neaufles-Auvergny",
"Neaufles-Saint-Martin",
"Neaufles-sur-Risle",
"Neauphe-sous-Essai",
"Neauphe-sur-Dive",
"Neauphle-le-Château",
"Neauphle-le-Vieux",
"Nebra-sur-Unstrut",
"Neckar-Odenwald",
"Neder-Betuwe",
"Neder-Hardinxveld",
"Nederhemert-Noord",
"Nederhemert-Zuid",
"Neder-over-Heembeek",
"Neder-Over-Heembeek",
"Nederweert-Eind",
"Nederzwalm-Hermelgem",
"Neewiller-près-Lauterbourg",
"néfaste-food",
"néfaste-foods",
"nègre-soie",
"nègres-soies",
"negro-spiritual",
"negro-spirituals",
"nègue-chien",
"nègue-fol",
"Nehwiller-près-Wœrth",
"Neige-Côtier",
"Neiße-Malxetal",
"ne-m'oubliez-pas",
"Nempont-Saint-Firmin",
"Nemsdorf-Göhrendorf",
"Néons-sur-Creuse",
"néphro-angiosclérose",
"néphro-angioscléroses",
"néphro-gastrique",
"néphro-urétérectomie",
"néphro-urétérectomies",
"neptuno-plutonien",
"neptuno-plutonienne",
"neptuno-plutoniens",
"nerf-ferrure",
"nerf-férure",
"Néris-les-Bains",
"Néronde-sur-Dore",
"Nerville-la-Forêt",
"Nesle-et-Massoult",
"Nesle-Hodeng",
"Nesle-la-Reposte",
"Nesle-le-Repons",
"Nesle-l'Hôpital",
"Nesle-Normandeuse",
"Nesles-la-Gilberde",
"Nesles-la-Montagne",
"Nesles-la-Vallée",
"net-citoyen",
"net-citoyens",
"N-éthyléthanamine",
"nettoie-pipe",
"Neu-Anspach",
"Neu-Bamberg",
"Neublans-Abergement",
"Neubourg-sur-le-Danube",
"Neuburg-Schrobenhausen",
"Neuchâtel-Urtière",
"Neudorf-Bornstein",
"Neu-Eichenberg",
"Neuendorf-Sachsenbande",
"Neuenkirchen-Vörden",
"Neuf-Berquin",
"neuf-berquinois",
"Neuf-Berquinois",
"neuf-berquinoise",
"Neuf-Berquinoise",
"neuf-berquinoises",
"Neuf-Berquinoises",
"Neuf-Brisach",
"neuf-cents",
"Neufchâtel-en-Bray",
"Neufchâtel-en-Saosnois",
"Neufchâtel-Hardelot",
"Neufchâtel-sur-Aisne",
"Neuf-Eglise",
"Neuf-Église",
"Neuf-Marché",
"Neuf-Mesnil",
"Neufmoutiers-en-Brie",
"Neufvy-sur-Aronde",
"Neugartheim-Ittlenheim",
"Neuhaus-Schierschnitz",
"Neuillay-les-Bois",
"Neuillé-le-Lierre",
"Neuillé-Pont-Pierre",
"Neuilly-en-Donjon",
"Neuilly-en-Dun",
"Neuilly-en-Sancerre",
"Neuilly-en-Thelle",
"Neuilly-en-Vexin",
"Neuilly-la-Forêt",
"Neuilly-le-Bisson",
"Neuilly-le-Brignon",
"Neuilly-le-Dien",
"Neuilly-le-Malherbe",
"Neuilly-le-Réal",
"Neuilly-lès-Dijon",
"Neuilly-le-Vendin",
"Neuilly-l'Evêque",
"Neuilly-l'Évêque",
"Neuilly-l'Hôpital",
"Neuilly-Plaisance",
"Neuilly-Saint-Front",
"Neuilly-sous-Clermont",
"Neuilly-sur-Eure",
"Neuilly-sur-Marne",
"Neuilly-sur-Seine",
"Neuilly-sur-Suize",
"Neu-Isenburg",
"Neukirchen-Balbini",
"Neukirchen-Vluyn",
"Neumagen-Dhron",
"Neu-Moresnet",
"Neung-sur-Beuvron",
"Neunkirchen-lès-Bouzonville",
"Neunkirchen-Seelscheid",
"Neunkirch-lès-Sarreguemines",
"Neurey-en-Vaux",
"Neurey-lès-la-Demie",
"neuro-acoustique",
"neuro-acoustiques",
"neuro-anatomie",
"neuro-anatomies",
"neuro-humoral",
"neuro-humorale",
"neuro-humorales",
"neuro-humoraux",
"neuro-imagerie",
"neuro-imageries",
"neuro-linguistique",
"neuro-linguistiques",
"neuro-musculaire",
"neuro-musculaires",
"neuro-stimulation",
"neuro-végétatif",
"neuro-végétatifs",
"neuro-végétative",
"neuro-végétatives",
"Neusalza-Spremberg",
"Neu-Seeland",
"Neussargues-Moissac",
"Neustadt-Glewe",
"neutro-alcalin",
"Neu-Ulm",
"Neuve-Chapelle",
"neuve-chapellois",
"Neuve-Chapellois",
"neuve-chapelloise",
"Neuve-Chapelloise",
"neuve-chapelloises",
"Neuve-Chapelloises",
"Neuve-Eglise",
"Neuve-Église",
"Neuvéglise-sur-Truyère",
"neuve-grangeais",
"Neuve-Grangeais",
"neuve-grangeaise",
"Neuve-Grangeaise",
"neuve-grangeaises",
"Neuve-Grangeaises",
"Neuvelle-lès-Champlitte",
"Neuvelle-lès-Cromary",
"Neuvelle-lès-Grancey",
"Neuvelle-lès-la-Charité",
"Neuvelle-lès-Voisey",
"Neuve-Maison",
"Neuves-Maisons",
"Neuvic-Entier",
"Neuvicq-le-Château",
"Neuvicq-Montguyon",
"Neuville-au-Bois",
"Neuville-au-Cornet",
"Neuville-au-Plain",
"Neuville-aux-Bois",
"Neuville-Bosc",
"neuville-boscien",
"Neuville-Boscien",
"neuville-boscienne",
"Neuville-Boscienne",
"neuville-bosciennes",
"Neuville-Bosciennes",
"neuville-bosciens",
"Neuville-Bosciens",
"Neuville-Bourjonval",
"Neuville-Coppegueule",
"Neuville-Day",
"Neuville-de-Poitou",
"Neuville-en-Avesnois",
"Neuville-en-Beaumont",
"Neuville-en-Condroz",
"Neuville-en-Ferrain",
"Neuville-en-Verdunois",
"Neuville-Ferrières",
"Neuville-les-Dames",
"Neuville-lès-Decize",
"Neuville-lès-Dieppe",
"Neuville-lès-Lœuilly",
"Neuville-lès-This",
"Neuville-lès-Vaucouleurs",
"Neuville-lez-Beaulieu",
"Neuville-près-Sées",
"Neuviller-la-Roche",
"Neuviller-lès-Badonviller",
"Neuvillers-sur-Fave",
"Neuviller-sur-Moselle",
"Neuville-Saint-Amand",
"Neuville-Saint-Rémy",
"Neuville-Saint-Vaast",
"Neuville-sous-Arzillières",
"Neuville-sous-Montreuil",
"Neuville-sur-Ailette",
"Neuville-sur-Ain",
"Neuville-sur-Authou",
"Neuville-sur-Brenne",
"Neuville-sur-Escaut",
"Neuville-sur-Margival",
"Neuville-sur-Oise",
"Neuville-sur-Ornain",
"Neuville-sur-Saône",
"Neuville-sur-Sarthe",
"Neuville-sur-Seine",
"Neuville-sur-Touques",
"Neuville-sur-Vanne",
"Neuville-sur-Vannes",
"Neuvillette-en-Charnie",
"Neuville-Vitasse",
"Neuvilly-en-Argonne",
"Neuvy-au-Houlme",
"Neuvy-Bouin",
"Neuvy-Deux-Clochers",
"Neuvy-en-Beauce",
"Neuvy-en-Champagne",
"Neuvy-en-Dunois",
"Neuvy-en-Mauges",
"Neuvy-en-Sullias",
"Neuvy-Grandchamp",
"Neuvy-le-Barrois",
"Neuvy-le-Roi",
"Neuvy-Pailloux",
"Neuvy-Saint-Sépulchre",
"Neuvy-Sautour",
"neuvy-sautourien",
"Neuvy-Sautourien",
"neuvy-sautourienne",
"Neuvy-Sautourienne",
"neuvy-sautouriennes",
"Neuvy-Sautouriennes",
"neuvy-sautouriens",
"Neuvy-Sautouriens",
"Neuvy-sur-Barangeon",
"Neuvy-sur-Loire",
"Neuwiller-lès-Saverne",
"Nevi'im",
"Néville-sur-Mer",
"névro-mimosie",
"névro-mimosies",
"Nevy-lès-Dole",
"Nevy-sur-Seille",
"Newcastle-under-Lyme",
"New-Glasgois",
"Newton-in-Makerfield",
"Newton-le-Willows",
"newton-mètre",
"newtons-mètres",
"New-York",
"new-yorkais",
"New-Yorkais",
"new-yorkaise",
"New-Yorkaise",
"new-yorkaises",
"New-Yorkaises",
"new-yorkisa",
"new-yorkisai",
"new-yorkisaient",
"new-yorkisais",
"new-yorkisait",
"new-yorkisâmes",
"new-yorkisant",
"new-yorkisas",
"new-yorkisasse",
"new-yorkisassent",
"new-yorkisasses",
"new-yorkisassiez",
"new-yorkisassions",
"new-yorkisât",
"new-yorkisâtes",
"new-yorkise",
"new-yorkisé",
"new-yorkisée",
"new-yorkisées",
"new-yorkisent",
"new-yorkiser",
"new-yorkisera",
"new-yorkiserai",
"new-yorkiseraient",
"new-yorkiserais",
"new-yorkiserait",
"new-yorkiseras",
"new-yorkisèrent",
"new-yorkiserez",
"new-yorkiseriez",
"new-yorkiserions",
"new-yorkiserons",
"new-yorkiseront",
"new-yorkises",
"new-yorkisés",
"new-yorkisez",
"new-yorkisiez",
"new-yorkisions",
"new-yorkisons",
"nez-en-cœur",
"Nézignan-l'Evêque",
"Nézignan-l'Évêque",
"nez-percé",
"ngaï-ngaï",
"ngaï-ngaïs",
"n-gone",
"n-gones",
"n-gramme",
"n-grammes",
"nian-nian",
"Nicey-sur-Aire",
"niche-crédence",
"nickel-ankérite",
"nickel-ankérites",
"nickel-magnésite",
"nickel-magnésites",
"nickel-skuttérudite",
"nickel-skuttérudites",
"Nicolétain-du-Sud",
"nid-de-poule",
"Niederbronn-les-Bains",
"Nieder-Hilbersheim",
"Nieder-Olm",
"Nieder-Wiesen",
"Niefern-Öschelbronn",
"Niel-bij-As",
"Niel-bij-Sint-Truiden",
"Nielles-lès-Ardres",
"Nielles-lès-Bléquin",
"Nielles-lès-Calais",
"n-ième",
"n-ièmes",
"Nieuil-l'Espoir",
"Nieul-le-Dolent",
"Nieul-lès-Saintes",
"Nieulle-sur-Seudre",
"Nieul-le-Virouil",
"Nieul-sur-l'Autise",
"Nieul-sur-Mer",
"Nieuw-Amsterdam",
"Nieuw-Annerveen",
"Nieuw-Balinge",
"Nieuw-Beerta",
"Nieuw-Beijerland",
"Nieuw-Bergen",
"Nieuw-Buinen",
"Nieuw-Dijk",
"Nieuw-Dordrecht",
"Nieuwer-Amstel",
"Nieuwe-Tonge",
"Nieuw-Ginneken",
"Nieuw-Heeten",
"Nieuw-Helvoet",
"Nieuwkerken-Waas",
"Nieuw-Loosdrecht",
"Nieuw-Milligen",
"Nieuw-Namen",
"Nieuwolda-Oost",
"Nieuw-Reemst",
"Nieuw-Roden",
"Nieuw-Scheemda",
"Nieuw-Schoonebeek",
"Nieuw-Strijen",
"Nieuw-Vennep",
"Nieuw-Vossemeer",
"Nieuw-Weerdinge",
"Nieuw-Wehl",
"Niger-Congo",
"nigéro-congolais",
"night-club",
"night-clubbing",
"night-clubs",
"Nijni-Taguil",
"nilo-saharien",
"nilo-saharienne",
"nilo-sahariennes",
"nilo-sahariens",
"Nil-Saint-Martin",
"Nil-Saint-Vincent",
"Nil-Saint-Vincent-Saint-Martin",
"ni-ni",
"nin-nin",
"Niort-de-Sault",
"Niort-la-Fontaine",
"nippo-américain",
"nippo-américaine",
"nippo-américaines",
"nippo-américains",
"nique-douille",
"nique-douilles",
"Ni-Skutterudites",
"Nissan-lez-Enserune",
"Nister-Möhrendorf",
"Nistos-Haut-et-Bas",
"nitro-cellulose",
"nitro-celluloses",
"nitro-hydrochlorique",
"nitro-hydrochloriques",
"nitrotal-isopropyl",
"niuafo'ou",
"niuafo'ous",
"Nivigne-et-Suran",
"nivo-glaciaire",
"nivo-glaciaires",
"Nivolas-Vermelle",
"Nivollet-Montgriffon",
"nivo-pluvial",
"Nixéville-Blercourt",
"Nizan-Gesse",
"Nizy-le-Comte",
"Nlle-Calédonie",
"Nlle-Écosse",
"Nlle-Zélande",
"N-méthyla",
"N-méthylai",
"N-méthylaient",
"N-méthylais",
"N-méthylait",
"N-méthylâmes",
"N-méthylant",
"N-méthylas",
"N-méthylasse",
"N-méthylassent",
"N-méthylasses",
"N-méthylassiez",
"N-méthylassions",
"N-méthylât",
"N-méthylâtes",
"N-méthyle",
"N-méthylé",
"N-méthylée",
"N-méthylées",
"N-méthylent",
"N-méthyler",
"N-méthylera",
"N-méthylerai",
"N-méthyleraient",
"N-méthylerais",
"N-méthylerait",
"N-méthyleras",
"N-méthylèrent",
"N-méthylerez",
"N-méthyleriez",
"N-méthylerions",
"N-méthylerons",
"N-méthyleront",
"N-méthyles",
"N-méthylés",
"N-méthylez",
"N-méthyliez",
"N-méthylions",
"N-méthylons",
"N,N-dinitronitramide",
"n-octaèdre",
"n-octaèdres",
"Nod-sur-Seine",
"Noël-Cerneux",
"Noé-les-Mallets",
"Noë-les-Mallets",
"nœud-nœud",
"nœuds-nœuds",
"Nœux-lès-Auxi",
"Nœux-les-Mines",
"Nogent-en-Othe",
"Nogent-l'Abbesse",
"Nogent-l'Artaud",
"Nogent-le-Bernard",
"Nogent-le-Phaye",
"Nogent-le-Roi",
"Nogent-le-Rotrou",
"Nogent-le-Sec",
"Nogent-lès-Montbard",
"Nogent-sur-Aube",
"Nogent-sur-Eure",
"Nogent-sur-Loir",
"Nogent-sur-Marne",
"Nogent-sur-Oise",
"Nogent-sur-Seine",
"Nogent-sur-Vernisson",
"Nohant-en-Goût",
"Nohant-en-Graçay",
"Nohant-Vic",
"Noidans-le-Ferroux",
"Noidans-lès-Vesoul",
"Noidant-Chatenoy",
"Noidant-le-Rocheux",
"noie-chien",
"Noirmoutier-en-l'Île",
"Noiron-sous-Gevrey",
"Noiron-sur-Bèze",
"Noiron-sur-Seine",
"noir-pie",
"noir-pioche",
"noir-pioches",
"noir-ployant",
"Noisy-le-Grand",
"Noisy-le-Roi",
"Noisy-le-Sec",
"Noisy-Rudignon",
"noisy-rudignonais",
"Noisy-Rudignonais",
"noisy-rudignonaise",
"Noisy-Rudignonaise",
"noisy-rudignonaises",
"Noisy-Rudignonaises",
"Noisy-sur-Ecole",
"Noisy-sur-École",
"Noisy-sur-Oise",
"Nojals-et-Clotte",
"Nojeon-en-Vexin",
"Nojeon-le-Sec",
"no-kill",
"no-kills",
"noli-me-tangere",
"nonante-cinq",
"nonante-deux",
"nonante-et-un",
"nonante-huit",
"nonante-neuf",
"nonante-quatre",
"nonante-sept",
"nonante-six",
"nonante-trois",
"Nonant-le-Pin",
"Noncourt-sur-le-Rongeant",
"Nonette-Orsonnette",
"Nonsard-Lamarche",
"Nonvilliers-Grandhoux",
"Noorder-Koggenland",
"Noord-Polsbroek",
"Noord-Scharwoude",
"Noord-Sleen",
"Noord-Spierdijk",
"Noord-Stroe",
"Noord-Waddinxveen",
"Noordwijk-Binnen",
"Noordwolde-Zuid",
"no-poo",
"Norges-la-Ville",
"Noron-l'Abbaye",
"Noron-la-Poterie",
"Noroy-le-Bourg",
"Noroy-lès-Jussey",
"Noroy-sur-Ourcq",
"Norrent-Fontes",
"Norrey-en-Auge",
"Norrey-en-Bessin",
"Norroy-le-Sec",
"Norroy-lès-Pont-à-Mousson",
"Norroy-le-Veneur",
"Nörten-Hardenberg",
"Nort-Leulinghem",
"nort-leulinghemois",
"Nort-Leulinghemois",
"nort-leulinghemoise",
"Nort-Leulinghemoise",
"nort-leulinghemoises",
"Nort-Leulinghemoises",
"Nort-sur-Erdre",
"Norwich-terrier",
"Nossage-et-Bénévent",
"Nouaillé-Maupertuis",
"Nouan-le-Fuzelier",
"Nouans-les-Fontaines",
"Nouan-sur-Loire",
"Noues-de-Sienne",
"Nourard-le-Franc",
"nous-même",
"nous-mêmes",
"Nousseviller-lès-Bitche",
"Nousseviller-lès-Puttelange",
"Nousseviller-Saint-Nabor",
"Nouveau-Brunswick",
"Nouveau-Connecticut",
"Nouveau-Continent",
"Nouveau-Cornouaille",
"Nouveau-Cornouailles",
"Nouveau-Cornwall",
"nouveau-gallois",
"Nouveau-Hanovre",
"Nouveau-Léon",
"Nouveau-Mexique",
"Nouveau-Monde",
"nouveau-né",
"nouveau-née",
"nouveau-nées",
"nouveau-nés",
"Nouveau-Norfolk",
"Nouveau-Santander",
"Nouveau-Shetland",
"nouveau-venu",
"nouveaux-nés",
"Nouveaux-Pays-Bas",
"nouveaux-venus",
"Nouvel-Âge",
"nouvel-âgeuse",
"nouvel-âgeuses",
"nouvel-âgeux",
"Nouvelle-Albion",
"Nouvelle-Amsterdam",
"Nouvelle-Andalousie",
"Nouvelle-Angleterre",
"Nouvelle-Aquitaine",
"Nouvelle-Bretagne",
"Nouvelle-Calédonie",
"Nouvelle-Cornouaille",
"Nouvelle-Cornouailles",
"Nouvelle-Cythère",
"Nouvelle-Écosse",
"Nouvelle-Eglise",
"Nouvelle-Église",
"Nouvelle-Espagne",
"Nouvelle-France",
"Nouvelle-Galles",
"Nouvelle-Géorgie",
"Nouvelle-Grenade",
"Nouvelle-Guinée",
"Nouvelle-Hanovre",
"Nouvelle-Hollande",
"Nouvelle-Irlande",
"nouvelle-née",
"Nouvelle-Néerlande",
"Nouvelle-Norfolk",
"Nouvelle-Orléans",
"Nouvelle-Poméranie",
"Nouvelles-Hébrides",
"Nouvelle-Sibérie",
"nouvelles-nées",
"nouvelles-venues",
"nouvelle-venue",
"Nouvelle-Zamble",
"Nouvelle-Zélande",
"Nouvelle-Zemble",
"Nouvion-et-Catillon",
"Nouvion-le-Comte",
"Nouvion-le-Vineux",
"Nouvion-sur-Meuse",
"Nouvron-Vingré",
"Novéant-sur-Moselle",
"Noviant-aux-Prés",
"Noville-les-Bois",
"Noville-sur-Mehaigne",
"Novion-Porcien",
"Novy-Chevrières",
"Noyal-Châtillon-sur-Seiche",
"Noyal-Muzillac",
"Noyal-Pontivy",
"Noyal-sous-Bazouges",
"Noyal-sur-Brutz",
"Noyal-sur-Vilaine",
"Noyant-d'Allier",
"Noyant-de-Touraine",
"Noyant-et-Aconin",
"Noyant-la-Gravoyère",
"Noyant-la-Plaine",
"noyé-d'eau",
"Noyelles-en-Chaussée",
"Noyelles-Godault",
"Noyelles-lès-Humières",
"Noyelles-lès-Seclin",
"Noyelles-lès-Vermelles",
"Noyelles-sous-Bellonne",
"Noyelles-sous-Lens",
"Noyelles-sur-Escaut",
"Noyelles-sur-l'Escaut",
"Noyelles-sur-Mer",
"Noyelles-sur-Sambre",
"Noyelles-sur-Selle",
"Noyelle-Vion",
"Noyen-sur-Sarthe",
"Noyen-sur-Seine",
"Noyers-Auzécourt",
"Noyers-Bocage",
"Noyers-Missy",
"Noyers-Pont-Maugis",
"Noyers-Saint-Martin",
"Noyers-sur-Cher",
"Noyers-sur-Jabron",
"Noyers-Thélonne",
"n-polytope",
"n-polytopes",
"n-simplexe",
"n-simplexes",
"n-sphère",
"n-sphères",
"n'srani",
"N'Tcham",
"Nuaillé-d'Aunis",
"Nuaillé-sur-Boutonne",
"Nueil-les-Aubiers",
"Nueil-sous-Faye",
"Nueil-sous-les-Aubiers",
"Nueil-sur-Layon",
"nue-propriétaire",
"nue-propriété",
"nuer-dinka",
"nues-propriétaires",
"nues-propriétés",
"Nuillé-le-Jalais",
"Nuillé-sur-Ouette",
"Nuillé-sur-Vicoin",
"Nuisement-aux-Bois",
"Nuisement-sur-Coole",
"nuit-deboutiste",
"nuit-deboutistes",
"Nuits-Saint-Georges",
"Nuka-Hiva",
"Nuku-Hiva",
"Nuncq-Hautecôte",
"nuoc-mam",
"nuoc-mâm",
"nu-pied",
"nu-pieds",
"n-uple",
"n-uples",
"n-uplet",
"n-uplets",
"nu-propriétaire",
"Nuret-le-Ferron",
"Nurieux-Volognat",
"nus-propriétaires",
"nu-tête",
"Nuthe-Urstromtal",
"N.-W.",
"Oberdorf-Spachbach",
"Oberehe-Stroheich",
"Ober-Flörsheim",
"Oberhausen-Rheinhausen",
"Ober-Hilbersheim",
"Oberhoffen-lès-Wissembourg",
"Oberhoffen-sur-Moder",
"Oberhonnefeld-Gierend",
"Obermaßfeld-Grimmenthal",
"Obermodern-Zutzendorf",
"Ober-Mörlen",
"Obernheim-Kirchenarnbach",
"Ober-Olm",
"Ober-Ramstadt",
"Oberweiler-Tiefenbach",
"Oberwil-Lieli",
"occipito-atloïdien",
"occipito-atloïdienne",
"occipito-atloïdiennes",
"occipito-atloïdiens",
"occipito-axoïdien",
"occipito-axoïdienne",
"occipito-axoïdiennes",
"occipito-axoïdiens",
"occipito-cotyloïdien",
"occipito-cotyloïdienne",
"occipito-cotyloïdiennes",
"occipito-cotyloïdiens",
"occipito-frontal",
"occipito-méningien",
"occipito-pariétal",
"occipito-pétreuse",
"occipito-pétreuses",
"occipito-pétreux",
"occipito-sacré",
"occipito-sacro-iliaque",
"occitano-roman",
"octante-deux",
"octante-et-un",
"octante-neuf",
"Octeville-l'Avenel",
"Octeville-la-Venelle",
"Octeville-sur-Mer",
"octo-core",
"octo-cores",
"octo-rotor",
"octo-rotors",
"oculo-motricité",
"oculo-motricités",
"oculo-musculaire",
"oculo-musculaires",
"oculo-zygomatique",
"Odeillo-Via",
"O-déméthyla",
"O-déméthylai",
"O-déméthylaient",
"O-déméthylais",
"O-déméthylait",
"O-déméthylâmes",
"O-déméthylant",
"O-déméthylas",
"O-déméthylasse",
"O-déméthylassent",
"O-déméthylasses",
"O-déméthylassiez",
"O-déméthylassions",
"O-déméthylât",
"O-déméthylâtes",
"O-déméthyle",
"O-déméthylé",
"O-déméthylée",
"O-déméthylées",
"O-déméthylent",
"O-déméthyler",
"O-déméthylera",
"O-déméthylerai",
"O-déméthyleraient",
"O-déméthylerais",
"O-déméthylerait",
"O-déméthyleras",
"O-déméthylèrent",
"O-déméthylerez",
"O-déméthyleriez",
"O-déméthylerions",
"O-déméthylerons",
"O-déméthyleront",
"O-déméthyles",
"O-déméthylés",
"O-déméthylez",
"O-déméthyliez",
"O-déméthylions",
"O-déméthylons",
"Oder-Spree",
"O-desvenlafaxine",
"odonto-stomatologie",
"Oebisfelde-Weferlingen",
"oeil-de-boeuf",
"œil-de-bœuf",
"oeil-de-chat",
"œil-de-chat",
"oeil-de-lièvre",
"oeil-de-paon",
"oeil-de-perdrix",
"œil-de-perdrix",
"oeil-de-pie",
"œil-de-pie",
"oeil-de-serpent",
"œil-de-serpent",
"oeil-de-tigre",
"œil-de-tigre",
"oeil-du-soleil",
"œil-du-soleil",
"oeils-de-boeuf",
"œils-de-bœuf",
"oeils-de-chat",
"oeils-de-lièvre",
"oeils-de-paon",
"oeils-de-perdrix",
"oeils-de-pie",
"œils-de-pie",
"oeils-de-serpent",
"œils-de-serpent",
"oeils-de-tigre",
"œils-de-tigre",
"Oer-Erkenschwick",
"oesophago-gastro-duodénoscopie",
"œsophago-gastro-duodénoscopie",
"oesophago-gastro-duodénoscopies",
"œsophago-gastro-duodénoscopies",
"Oestrich-Winkel",
"œuf-coque",
"Œuf-en-Ternois",
"œufs-coque",
"Offenbach-Hundheim",
"Offenbach-sur-le-Main",
"off-market",
"off-shore",
"Ogenne-Camptort",
"Ogeu-les-Bains",
"ogivo-cylindrique",
"Ogooué-Maritime",
"Ogy-Montoy-Flanville",
"ohm-mètre",
"ohms-mètres",
"oie-cygne",
"Oignies-en-Thiérache",
"Oigny-en-Valois",
"Oinville-Saint-Liphard",
"Oinville-sous-Auneau",
"Oinville-sur-Montcient",
"oiseau-chameau",
"oiseau-cloche",
"oiseau-éléphant",
"oiseau-lyre",
"oiseau-mouche",
"oiseau-papillon",
"oiseau-tonnerre",
"oiseau-trompette",
"oiseaux-chameaux",
"oiseaux-cloches",
"oiseaux-lyres",
"oiseaux-mouches",
"oiseaux-papillons",
"oiseaux-tonnerres",
"oiseaux-trompettes",
"Oiselay-et-Grachaux",
"Oisseau-le-Petit",
"Oisy-le-Verger",
"Ojos-Albos",
"Olbia-Tempio",
"Ölbronn-Dürrn",
"old-ice",
"old-ices",
"Oléac-Debat",
"Oléac-Dessus",
"oléo-calcaire",
"oléo-calcaires",
"olé-olé",
"oligo-élément",
"oligo-éléments",
"Olizy-Primat",
"Olizy-sur-Chiers",
"olla-podrida",
"Olloy-sur-Viroin",
"Olmeta-di-Capocorso",
"Olmeta-di-Tuda",
"Olmet-et-Villecun",
"Olmi-Cappella",
"Olonne-sur-Mer",
"Oloron-Sainte-Marie",
"Oloron-Sainte-Marie",
"Ols-et-Rinhodes",
"Olst-Wijhe",
"omaha-ponca",
"omaha-poncas",
"omble-chevalier",
"ombre-chevalier",
"Ombret-Rawsa",
"ombro-thermique",
"ombro-thermiques",
"oméga-3",
"oméga-6",
"oméga-9",
"O-méthyla",
"O-méthylai",
"O-méthylaient",
"O-méthylais",
"O-méthylait",
"O-méthylâmes",
"O-méthylant",
"O-méthylas",
"O-méthylasse",
"O-méthylassent",
"O-méthylasses",
"O-méthylassiez",
"O-méthylassions",
"O-méthylât",
"O-méthylâtes",
"O-méthyle",
"O-méthylé",
"O-méthylée",
"O-méthylées",
"O-méthylent",
"O-méthyler",
"O-méthylera",
"O-méthylerai",
"O-méthyleraient",
"O-méthylerais",
"O-méthylerait",
"O-méthyleras",
"O-méthylèrent",
"O-méthylerez",
"O-méthyleriez",
"O-méthylerions",
"O-méthylerons",
"O-méthyleront",
"O-méthyles",
"O-méthylés",
"O-méthylez",
"O-méthyliez",
"O-méthylions",
"O-méthylons",
"Omonville-la-Petite",
"Omonville-la-Rogue",
"omphalo-mésentérique",
"omphalo-mésentériques",
"omphalo-phlébite",
"omphalo-phlébites",
"Oncy-sur-Ecole",
"Oncy-sur-École",
"on-dit",
"Ondreville-sur-Essonne",
"one-man-show",
"one-shot",
"Onesse-et-Laharie",
"Onesse-Laharie",
"one-step",
"one-steps",
"Onet-le-Château",
"one-woman-show",
"Ons-en-Bray",
"Onze-Lieve-Vrouw-Waver",
"Oost-Barendrecht",
"Oost-Cappel",
"oost-cappelois",
"Oost-Cappelois",
"oost-cappeloise",
"Oost-Cappeloise",
"oost-cappeloises",
"Oost-Cappeloises",
"Ooster-Dalfsen",
"Oosterzee-Buren",
"Oost-Graftdijk",
"Oost-Maarland",
"Oost-Souburg",
"Oost-Vlieland",
"opal-AN",
"open-source",
"open-space",
"open-spaces",
"opéra-comique",
"Opéra-Comique",
"opéras-comiques",
"Ophain-Bois-Seigneur-Isaac",
"Opoul-Périllos",
"opt-in",
"opto-strié",
"opt-out",
"Oradour-Fanais",
"Oradour-Saint-Genest",
"Oradour-sur-Glane",
"Oradour-sur-Vayres",
"orang-outan",
"orang-outang",
"orangs-outangs",
"orangs-outans",
"Oranienbaum-Wörlitz",
"Orbais-l'Abbaye",
"Orbigny-au-Mont",
"Orbigny-au-Val",
"orbito-nasal",
"orbito-palpébral",
"Or-Blanois",
"Orchamps-Vennes",
"Ordan-Larroque",
"Orée-d'Anjou",
"oreille-d'abbé",
"oreille-d'âne",
"oreille-de-lièvre",
"oreille-de-loup",
"oreille-de-mer",
"oreille-de-souris",
"oreille-d'ours",
"oreilles-d'âne",
"oreilles-de-mer",
"oreilles-de-souris",
"oreilles-d'ours",
"organo-calcaire",
"organo-calcaires",
"organo-chloré",
"organo-chlorée",
"organo-chlorées",
"organo-chlorés",
"organo-halogéné",
"organo-halogénée",
"organo-halogénées",
"organo-halogénés",
"organo-phosphoré",
"organo-phosphorée",
"organo-phosphorées",
"organo-phosphorés",
"Orgeans-Blanchefontaine",
"Orgères-en-Beauce",
"Orgères-la-Roche",
"Orgnac-l'Aven",
"Orgnac-sur-Vézère",
"orienté-objet",
"orienteur-marqueur",
"Origny-en-Thiérache",
"Origny-le-Butin",
"Origny-le-Roux",
"Origny-le-Sec",
"Origny-Sainte-Benoite",
"o-ring",
"o-rings",
"Oriol-en-Royans",
"Oris-en-Rattier",
"Orliac-de-Bar",
"Orly-sur-Morin",
"Ormes-et-Ville",
"Ormesson-sur-Marne",
"Ormont-Dessous",
"Ormont-Dessus",
"Ormoy-la-Rivière",
"Ormoy-le-Davien",
"Ormoy-lès-Sexfontaines",
"Ormoy-sur-Aube",
"Ormoy-Villers",
"Ornolac-Ussat-les-Bains",
"Oroz-Betelu",
"Orp-Jauche",
"orp-jauchois",
"Orp-Jauchois",
"Orp-Jauchoise",
"Orp-le-Grand",
"Orry-la-Ville",
"Orsingen-Nenzingen",
"Orsmaal-Gussenhoven",
"or-sol",
"ortho-sympathique",
"ortho-sympathiques",
"Orthoux-Sérignac-Quilhan",
"Orveau-Bellesauve",
"Orvillers-Sorel",
"Orvilliers-Saint-Julien",
"Osann-Monzel",
"Osly-Courtil",
"Os-Marsillon",
"Osmoy-Saint-Valery",
"Osne-le-Val",
"Ossas-Suhare",
"ossau-iraty",
"ossau-iratys",
"Osse-en-Aspe",
"Osselle-Routelle",
"Osserain-Rivareyte",
"Ossétie-du-Nord-Alanie",
"Ossey-les-Trois-Maisons",
"Ossun-ez-Angles",
"Ostabat-Asme",
"ostéo-arthrite",
"ostéo-arthrites",
"Osterholz-Scharmbeck",
"Oster-Ohrstedt",
"Osthausen-Wülfershausen",
"ôte-agrafes",
"oto-rhino",
"oto-rhino-laryngologie",
"oto-rhino-laryngologies",
"oto-rhino-laryngologiste",
"oto-rhino-laryngologistes",
"oto-rhinos",
"Ottendorf-Okrilla",
"Ottignies-Louvain-la-Neuve",
"ouaf-ouaf",
"Oud-Aa",
"Oud-Alblas",
"Oud-Annerveen",
"Oud-Beijerland",
"Oud-Bodegraven",
"Oud-Dijk",
"Oud-Drimmelen",
"Oud-Empel",
"Ouder-Amstel",
"Ouderkerk-sur-l'Amstel",
"Oude-Tonge",
"Oud-Gastel",
"Oud-Heverlee",
"Oud-Kamerik",
"Oud-Leusden",
"Oud-Loosdrecht",
"Oud-Maarsseveen",
"Oud-Reemst",
"Oud-Reeuwijk",
"Oud-Sabbinge",
"Oud-Turnhout",
"Oud-Valkenburg",
"Oud-Vossemeer",
"Oud-Vroenhoven",
"Oud-Wulven",
"Oud-Zuilen",
"ouèche-ouèche",
"ouèches-ouèches",
"Ougney-Douvot",
"oui-da",
"ouï-dire",
"Ouilly-du-Houley",
"Ouilly-le-Basset",
"Ouilly-le-Tesson",
"Ouilly-le-Vicomte",
"oui-non-bof",
"Oui-Oui",
"ouïr-dire",
"Oulan-Bator",
"Oulches-la-Vallée-Foulon",
"Oulchy-la-Ville",
"Oulchy-le-Château",
"Oulens-sous-Échallens",
"ouralo-altaïque",
"ouralo-altaïques",
"Ourcel-Maison",
"Ourches-sur-Meuse",
"Ourdis-Cotdoussan",
"Ourgou-Manega",
"Ourouer-les-Bourdelins",
"Ouroux-en-Morvan",
"Ouroux-sous-le-Bois-Sainte-Marie",
"Ouroux-sur-Saône",
"Oursel-Maison",
"ours-garou",
"ours-garous",
"Ours-Mons",
"Ourville-en-Caux",
"Ousse-et-Suzan",
"Ousse-Suzan",
"Ousson-sur-Loire",
"Oussoy-en-Gâtinais",
"Oust-Marest",
"Ouve-Wirquin",
"ouve-wirquinois",
"Ouve-Wirquinois",
"ouve-wirquinoise",
"Ouve-Wirquinoise",
"ouve-wirquinoises",
"Ouve-Wirquinoises",
"Ouville-l'Abbaye",
"Ouville-la-Bien-Tournée",
"Ouville-la-Rivière",
"Ouvrouer-les-Champs",
"Ouwster-Nijega",
"Ouzilly-Vignolles",
"Ouzouer-des-Champs",
"Ouzouer-le-Doyen",
"Ouzouer-le-Marché",
"Ouzouer-sous-Bellegarde",
"Ouzouer-sur-Loire",
"Ouzouer-sur-Trézée",
"Ouzoun-Ada",
"Over-Diemen",
"Ovillers-la-Boisselle",
"ovo-lacto-végétarisme",
"ovo-lacto-végétarismes",
"ovo-urinaire",
"ovo-végétarisme",
"ovo-végétarismes",
"oxidéméton-méthyl",
"oxo-biodégradable",
"oxo-biodégradables",
"oxo-dégradable",
"oxo-dégradables",
"oxydéméton-méthyl",
"oxydo-réduction",
"oxydo-réductions",
"oxy-iodure",
"oxy-iodures",
"Oye-et-Pallet",
"Oye-Plage",
"Oy-Mittelberg",
"Oyón-Oion",
"Oytier-Saint-Oblas",
"Oza-Cesuras",
"Ozenx-Montestrucq",
"Ozoir-la-Ferrière",
"Ozoir-le-Breuil",
"Ozouer-le-Repos",
"Ozouer-le-Voulgis",
"Ozouër-le-Voulgis",
"pa'anga",
"p-acétylaminophénol",
"package-deal",
"package-deals",
"pack-ice",
"pack-ices",
"Pacy-sur-Armançon",
"Pacy-sur-Eure",
"p-adique",
"p-adiques",
"pagano-chrétien",
"page-turner",
"Pagney-derrière-Barine",
"Pagny-la-Blanche-Côte",
"Pagny-la-Ville",
"Pagny-le-Château",
"Pagny-lès-Goin",
"Pagny-sur-Meuse",
"Pagny-sur-Moselle",
"paille-en-cul",
"paille-en-queue",
"pailles-en-cul",
"pailles-en-queue",
"pail-mail",
"pain-beurre",
"pain-d'épicier",
"pain-d'épicière",
"pain-d'épicières",
"pain-d'épiciers",
"pain-de-pourceau",
"pains-de-pourceau",
"pair-à-pair",
"Pair-et-Grandrupt",
"pair-programma",
"pair-programmai",
"pair-programmaient",
"pair-programmais",
"pair-programmait",
"pair-programmâmes",
"pair-programmant",
"pair-programmas",
"pair-programmasse",
"pair-programmassent",
"pair-programmasses",
"pair-programmassiez",
"pair-programmassions",
"pair-programmât",
"pair-programmâtes",
"pair-programme",
"pair-programmé",
"pair-programment",
"pair-programmer",
"pair-programmera",
"pair-programmerai",
"pair-programmeraient",
"pair-programmerais",
"pair-programmerait",
"pair-programmeras",
"pair-programmèrent",
"pair-programmerez",
"pair-programmeriez",
"pair-programmerions",
"pair-programmerons",
"pair-programmeront",
"pair-programmes",
"pair-programmez",
"pair-programmiez",
"pair-programmions",
"pair-programmons",
"Paisy-Cosdon",
"Paizay-le-Chapt",
"Paizay-le-Sec",
"Paizay-le-Tort",
"Paizay-Naudouin-Embourie",
"Palais-Bourbon",
"Palatinat-Sud-Ouest",
"palato-labial",
"palato-labiale",
"palato-pharyngien",
"palato-pharyngite",
"palato-pharyngites",
"palato-salpingien",
"palato-staphylin",
"palato-staphylins",
"Palau-de-Cerdagne",
"Palau-del-Vidre",
"Palau-sator",
"Palau-saverdera",
"Palavas-les-Flots",
"paléo-continental",
"paléo-lac",
"paléo-lacs",
"paléo-reconstruction",
"paléo-reconstructions",
"pal-fer",
"palladico-potassique",
"Palluau-sur-Indre",
"palmier-chanvre",
"palmier-dattier",
"palmiers-chanvre",
"palmiers-dattiers",
"palpe-mâchoire",
"palu'e",
"palu'es",
"pama-nyungan",
"panchen-lama",
"pancréatico-duodénal",
"Pancy-Courtecon",
"pan-européen",
"pan-européenne",
"pan-européennes",
"pan-européens",
"panier-repas",
"paniers-repas",
"pan-lucanisme",
"pan-mandingue",
"pan-mandingues",
"panpan-cucul",
"Panschwitz-Kuckau",
"panthère-garou",
"panthères-garous",
"Pant'ruche",
"Pa-O",
"papa-gâteau",
"papas-gâteaux",
"papier-caillou-ciseaux",
"papier-calque",
"papier-cul",
"papier-filtre",
"papier-monnaie",
"papiers-calque",
"Papouasie-Nouvelle-Guinée",
"papy-boom",
"papy-boomer",
"papy-boomers",
"papy-boomeur",
"papy-boomeurs",
"paquet-cadeau",
"paquets-cadeaux",
"para-acétyl-amino-phénol",
"parachute-frein",
"parachutes-freins",
"para-continental",
"para-dichlorobenzène",
"para-légal",
"para-légale",
"para-légales",
"para-légaux",
"parathion-éthyl",
"parathion-méthyl",
"Paray-Douaville",
"Paray-le-Frésil",
"Paray-le-Monial",
"Paray-sous-Briailles",
"Paray-Vieille-Poste",
"Parçay-les-Pins",
"Parçay-Meslay",
"Parçay-sur-Vienne",
"Parc-d'Anxtot",
"parc-d'anxtotais",
"Parc-d'Anxtotais",
"parc-d'anxtotaise",
"Parc-d'Anxtotaise",
"parc-d'anxtotaises",
"Parc-d'Anxtotaises",
"Parcé-sur-Sarthe",
"par-cœur",
"Parcoul-Chenaud",
"Parcy-et-Tigny",
"par-dehors",
"par-delà",
"par-derrière",
"par-dessous",
"par-dessus",
"par-devant",
"par-devers",
"Pardies-Piétat",
"Parentis-en-Born",
"Parey-Saint-Césaire",
"Parey-sous-Montfort",
"Parfouru-l'Éclin",
"Parfouru-sur-Odon",
"Pargny-Filain",
"Pargny-la-Dhuys",
"Pargny-les-Bois",
"Pargny-lès-Reims",
"Pargny-Resson",
"Pargny-sous-Mureau",
"Pargny-sur-Saulx",
"Parigné-le-Pôlin",
"Parigné-l'Evêque",
"Parigné-l'Évêque",
"Parigné-sur-Braye",
"Parigny-la-Rose",
"Parigny-les-Vaux",
"Paris-Brest",
"Paris-l'Hôpital",
"parking-relais",
"parler-pour-ne-rien-dire",
"Parné-sur-Roc",
"Parnoy-en-Bassigny",
"parotido-auriculaire",
"parotido-auriculaires",
"Paroy-en-Othe",
"Paroy-sur-Saulx",
"Paroy-sur-Tholon",
"Parsac-Rimondeix",
"Pars-lès-Chavanges",
"Pars-lès-Romilly",
"Parthenay-de-Bretagne",
"participation-pari",
"particule-dieu",
"particules-dieu",
"parti-pris",
"parva-pétricien",
"Parva-Pétricien",
"parva-pétricienne",
"Parva-Pétricienne",
"parva-pétriciennes",
"Parva-Pétriciennes",
"parva-pétriciens",
"Parva-Pétriciens",
"Parves-et-Nattages",
"Parvillers-le-Quesnoy",
"pas-à-pas",
"pascal-seconde",
"pascals-secondes",
"pas-d'âne",
"Pas-de-Calais",
"Pas-de-Jeu",
"pas-de-porte",
"Pas-en-Artois",
"paso-doble",
"paso-dobles",
"Passavant-en-Argonne",
"Passavant-la-Rochère",
"Passavant-sur-Layon",
"passif-agressif",
"passifs-agressifs",
"passing-shot",
"passing-shots",
"Passy-en-Valois",
"Passy-Grigny",
"Passy-les-Tours",
"Passy-sur-Marne",
"Passy-sur-Seine",
"P-ATA",
"pâtissier-chocolatier",
"Pätow-Steegen",
"patronnière-gradeuse",
"patronnières-gradeuses",
"patronnier-gradeur",
"patronniers-gradeurs",
"patte-de-lièvre",
"patte-d'oie",
"patte-pelu",
"patte-pelus",
"pattes-de-lièvre",
"pattes-d'oie",
"pauci-relationnel",
"pauci-relationnelle",
"pauci-relationnelles",
"pauci-relationnels",
"pauci-spécifique",
"pauci-spécifiques",
"Paulhac-en-Margeride",
"Paul-Olivier",
"pause-café",
"pause-carrière",
"pause-santé",
"pauses-café",
"pauses-carrière",
"pauses-santé",
"Paussac-et-Saint-Vivien",
"Pautaines-Augeville",
"payé-emporté",
"pay-per-view",
"Payra-sur-l'Hers",
"Payré-sur-Vendée",
"Payrin-Augmontel",
"Payros-Cazautets",
"pays-bas",
"Pays-Bas",
"Pays-d'Altenbourg",
"Pays-de-Berchtesgaden",
"Pays-de-Jerichow",
"Pays-d'Enhaut",
"Pays-de-Nuremberg",
"pay-to-win",
"Payzac-de-Lanouaille",
"pc-banking",
"P-DG",
"P.-D.G.",
"p.-ê.",
"peau-bleue",
"peau-de-chienna",
"peau-de-chiennai",
"peau-de-chiennaient",
"peau-de-chiennais",
"peau-de-chiennait",
"peau-de-chiennâmes",
"peau-de-chiennant",
"peau-de-chiennas",
"peau-de-chiennasse",
"peau-de-chiennassent",
"peau-de-chiennasses",
"peau-de-chiennassiez",
"peau-de-chiennassions",
"peau-de-chiennât",
"peau-de-chiennâtes",
"peau-de-chienne",
"peau-de-chienné",
"peau-de-chiennée",
"peau-de-chiennées",
"peau-de-chiennent",
"peau-de-chienner",
"peau-de-chiennera",
"peau-de-chiennerai",
"peau-de-chienneraient",
"peau-de-chiennerais",
"peau-de-chiennerait",
"peau-de-chienneras",
"peau-de-chiennèrent",
"peau-de-chiennerez",
"peau-de-chienneriez",
"peau-de-chiennerions",
"peau-de-chiennerons",
"peau-de-chienneront",
"peau-de-chiennes",
"peau-de-chiennés",
"peau-de-chiennez",
"peau-de-chienniez",
"peau-de-chiennions",
"peau-de-chiennons",
"peau-rouge",
"Peau-Rouge",
"Peau-Verte",
"peaux-rouges",
"Peaux-Rouges",
"Peaux-Vertes",
"Pécharic-et-le-Py",
"pêche-bernard",
"pêche-bernards",
"Pech-Luna",
"pédal'eau",
"pédicure-podologue",
"pédicures-podologues",
"Pedro-Rodríguez",
"peer-to-peer",
"Pégairolles-de-Buèges",
"Pégairolles-de-l'Escalette",
"peigne-cul",
"peigne-culs",
"peigne-zizi",
"peine-à-jouir",
"peis-coua",
"Peisey-Nancroix",
"pele-ata",
"Pel-et-Der",
"pelle-à-cul",
"pelle-pioche",
"pelles-à-cul",
"pelles-bêches",
"pelles-pioches",
"Pellouailles-les-Vignes",
"pelure-d'oignon",
"pelvi-crural",
"pelvi-trochantérien",
"pelvi-trochantérienne",
"pelvi-trochantériennes",
"pelvi-trochantériens",
"Peñacerrada-Urizaharra",
"Peñarroya-Pueblonuevo",
"pencak-silat",
"pénicillino-résistance",
"pénicillino-résistances",
"pénicillino-sensibilité",
"pénicillino-sensibilités",
"Penne-d'Agenais",
"Pennes-le-Sec",
"penn-ty",
"pense-bête",
"pense-bêtes",
"Penta-Acquatella",
"penta-cœur",
"penta-cœurs",
"penta-continental",
"penta-core",
"penta-cores",
"Penta-di-Casinca",
"pen-testeur",
"pen-testeurs",
"pen-testeuse",
"pen-testeuses",
"pen-ty",
"people-isa",
"people-isai",
"people-isaient",
"people-isais",
"people-isait",
"people-isâmes",
"people-isant",
"people-isas",
"people-isasse",
"people-isassent",
"people-isasses",
"people-isassiez",
"people-isassions",
"people-isât",
"people-isâtes",
"people-ise",
"people-isé",
"people-isée",
"people-isées",
"people-isent",
"people-iser",
"people-isera",
"people-iserai",
"people-iseraient",
"people-iserais",
"people-iserait",
"people-iseras",
"people-isèrent",
"people-iserez",
"people-iseriez",
"people-iserions",
"people-iserons",
"people-iseront",
"people-ises",
"people-isés",
"people-isez",
"people-isiez",
"people-isions",
"people-isons",
"Percey-le-Grand",
"Percey-le-Pautel",
"Percey-sous-Montormentier",
"perche-brochet",
"Perche-en-Nocé",
"perche-soleil",
"Percy-en-Auge",
"Percy-en-Normandie",
"perdante-perdante",
"perdantes-perdantes",
"perdant-perdant",
"perdants-perdants",
"perd-sa-queue",
"perd-tout",
"père-la-pudeur",
"Père-la-pudeur",
"pères-la-pudeur",
"Péret-Bel-Air",
"perfo-vérif",
"Pergain-Taillac",
"Périers-en-Auge",
"Périers-sur-le-Dan",
"Pérignat-ès-Allier",
"Pérignat-lès-Sarliève",
"Pérignat-sur-Allier",
"Périgny-la-Rose",
"Perles-et-Castelet",
"Perly-Certoux",
"Pernand-Vergelesses",
"Pernes-lès-Boulogne",
"Pernes-les-Fontaines",
"Pero-Casevecchie",
"Pérols-sur-Vézère",
"péronéo-calcanéen",
"péronéo-malléolaire",
"péronéo-malléolaires",
"péronéo-phalangien",
"péronéo-tibial",
"Péronne-en-Mélantois",
"Péronnes-lez-Antoing",
"Péroy-les-Gombries",
"Perpète-la-ouf",
"Perpète-les-Alouettes",
"Perpète-les-Oies",
"Perpète-lès-Oies",
"Perpète-les-Olivettes",
"Perpette-les-Oies",
"Perpezac-le-Blanc",
"Perpezac-le-Noir",
"Perrancey-les-Vieux-Moulins",
"Perrecy-les-Forges",
"Perriers-en-Beauficel",
"Perriers-la-Campagne",
"Perriers-sur-Andelle",
"Perrigny-lès-Dijon",
"Perrigny-sur-Armançon",
"Perrigny-sur-l'Ognon",
"Perrigny-sur-Loire",
"Perrogney-les-Fontaines",
"perroquet-hibou",
"perroquets-hiboux",
"Perros-Guirec",
"perruche-moineau",
"perruches-moineaux",
"Pers-en-Gâtinais",
"Pers-Jussy",
"Perthes-lès-Brienne",
"Perthes-lès-Hurlus",
"Pertheville-Ners",
"pesco-végétarien",
"pèse-acide",
"pèse-acides",
"pèse-alcool",
"pèse-alcools",
"pèse-bébé",
"pèse-bébés",
"pèse-esprit",
"pèse-esprits",
"pèse-lait",
"pèse-laits",
"pèse-lettre",
"pèse-lettres",
"pèse-liqueur",
"pèse-liqueurs",
"pèse-mout",
"pèse-moût",
"pèse-mouts",
"pèse-moûts",
"pèse-nitre",
"pèse-nitres",
"pèse-personne",
"pèse-personnes",
"pèse-sel",
"pèse-sels",
"pèse-sirop",
"pèse-sirops",
"pèse-vernis",
"Pessac-sur-Dordogne",
"Pessa'h",
"Pessat-Villeneuve",
"péta-ampère",
"péta-ampères",
"péta-électron-volt",
"pétaélectron-volt",
"péta-électron-volts",
"pétaélectron-volts",
"pet'che",
"pet-d'âne",
"pet-de-loup",
"pet-de-nonne",
"pet-de-soeur",
"pet-de-sœur",
"Petegem-aan-de-Leie",
"Petegem-aan-de-Schelde",
"pet-en-l'air",
"Peterswald-Löffelscheid",
"pète-sec",
"pète-sèche",
"pète-sèches",
"pète-secs",
"petites-bourgeoises",
"petites-bourgeoisies",
"petites-filles",
"petites-mains",
"petites-maîtresses",
"petites-nièces",
"petites-russes",
"petits-beurre",
"petits-bourgeois",
"petits-chênes",
"petits-déjeuners",
"petits-ducs",
"petits-enfants",
"petits-fils",
"petits-fours",
"petits-gris",
"petits-laits",
"petits-maîtres",
"petits-neveux",
"petits-russes",
"petits-suisses",
"petits-trains",
"Petreto-Bicchisano",
"pétrolier-minéralier",
"pétro-monarchie",
"pétro-monarchies",
"pétro-occipital",
"pétro-salpingo-staphylin",
"pétro-salpingo-staphylins",
"pétro-staphylin",
"pétrus-colien",
"Pétrus-Colien",
"pétrus-colienne",
"Pétrus-Colienne",
"pétrus-coliennes",
"Pétrus-Coliennes",
"pétrus-coliens",
"Pétrus-Coliens",
"pets-de-loup",
"pets-de-nonne",
"peul-peul",
"Peumerit-Quintin",
"peut-être",
"Peux-et-Couffouleux",
"Peypin-d'Aigues",
"Peyrat-de-Bellac",
"Peyrat-la-Nonière",
"Peyrat-le-Château",
"Peyre-en-Aubrac",
"Peyrefitte-du-Razès",
"Peyrefitte-sur-l'Hers",
"Peyrelongue-Abos",
"Peyret-Saint-André",
"Peyriac-de-Mer",
"Peyriac-Minervois",
"Peyrillac-et-Millac",
"Peyrolles-en-Provence",
"Peyrusse-Grande",
"Peyrusse-le-Roc",
"Peyrusse-Massas",
"Peyrusse-Vieille",
"Peyzac-le-Moustier",
"Peyzieux-sur-Saône",
"Pezé-le-Robert",
"Pézènes-les-Mines",
"Pézilla-de-Conflent",
"Pézilla-la-Rivière",
"Pfaffen-Schwabenheim",
"P-frame",
"p-graphe",
"p-graphes",
"pharyngo-laryngite",
"pharyngo-laryngites",
"pharyngo-staphylin",
"phénico-punique",
"phénico-puniques",
"philosopho-théologique",
"philosopho-théologiques",
"pH-mètre",
"phonético-symbolique",
"phoque-garou",
"phoque-léopard",
"phoques-garous",
"phoséthyl-Al",
"phosétyl-Al",
"phosphate-allophane",
"phosphate-allophanes",
"photos-finish",
"phragmito-scirpaie",
"phragmito-scirpaies",
"phrase-clé",
"phrases-clés",
"phréno-glottisme",
"phréno-glottismes",
"physico-chimie",
"physico-chimies",
"physico-chimique",
"physico-chimiques",
"physico-mathématique",
"physico-mathématiques",
"physio-pathologie",
"physio-pathologies",
"piane-piane",
"piano-bar",
"piano-bars",
"piano-forte",
"piano-fortes",
"piano-manivelle",
"Pianotolli-Caldarello",
"Pianottoli-Caldarello",
"pian's",
"pichot-chêne",
"pichots-chênes",
"pick-up",
"pick-ups",
"pico-condensateur",
"pico-condensateurs",
"pico-ohm",
"pico-ohms",
"pics-verts",
"Picto-Charentais",
"pic-vert",
"pic-verts",
"pidgin-english",
"pièces-au-cul",
"pied-à-terre",
"pied-bot",
"pied-d'alouette",
"pied-de-banc",
"pied-de-biche",
"pied-de-boeuf",
"pied-de-bœuf",
"Pied-de-Borne",
"pied-de-chat",
"pied-de-cheval",
"pied-de-chèvre",
"pied-de-coq",
"pied-de-corbeau",
"pied-de-griffon",
"pied-de-lion",
"pied-de-loup",
"pied-de-mouche",
"pied-de-mouton",
"pied-de-pélican",
"pied-de-pigeon",
"pied-de-poule",
"pied-d'étape",
"pied-de-veau",
"pied-d'oiseau",
"pied-droit",
"pié-de-lion",
"pied-fort",
"Piedicorte-di-Gaggio",
"pied-noir",
"pied-noire",
"pied-noirisa",
"pied-noirisai",
"pied-noirisaient",
"pied-noirisais",
"pied-noirisait",
"pied-noirisâmes",
"pied-noirisant",
"pied-noirisas",
"pied-noirisasse",
"pied-noirisassent",
"pied-noirisasses",
"pied-noirisassiez",
"pied-noirisassions",
"pied-noirisât",
"pied-noirisâtes",
"pied-noirise",
"pied-noirisé",
"pied-noirisée",
"pied-noirisées",
"pied-noirisent",
"pied-noiriser",
"pied-noirisera",
"pied-noiriserai",
"pied-noiriseraient",
"pied-noiriserais",
"pied-noiriserait",
"pied-noiriseras",
"pied-noirisèrent",
"pied-noiriserez",
"pied-noiriseriez",
"pied-noiriserions",
"pied-noiriserons",
"pied-noiriseront",
"pied-noirises",
"pied-noirisés",
"pied-noirisez",
"pied-noirisiez",
"pied-noirisions",
"pied-noirisons",
"Pie-d'Orezza",
"pied-plat",
"pied-rouge",
"pieds-bots",
"pieds-d'alouette",
"pieds-de-biche",
"pieds-de-boeuf",
"pieds-de-bœuf",
"pieds-de-chat",
"pieds-de-chèvre",
"pieds-de-coq",
"pieds-de-corbeau",
"pieds-de-griffon",
"pieds-de-lion",
"pieds-de-mouche",
"pieds-de-mouton",
"pieds-de-veau",
"pieds-d'oiseau",
"pieds-droits",
"pieds-forts",
"pieds-noires",
"pieds-noirs",
"pieds-paquets",
"pieds-plats",
"pieds-tendres",
"pied-tendre",
"pied-vert",
"piège-à-cons",
"pièges-à-cons",
"pie-grièche",
"Piégros-la-Clastre",
"Piégut-Pluviers",
"pie-mère",
"Piennes-Onvillers",
"pie-noir",
"pie-noire",
"pie-noires",
"pie-noirs",
"pie-rouge",
"pierre-bénitain",
"Pierre-Bénitain",
"pierre-bénitaine",
"Pierre-Bénitaine",
"pierre-bénitaines",
"Pierre-Bénitaines",
"pierre-bénitains",
"Pierre-Bénitains",
"Pierre-Bénite",
"Pierre-Buffière",
"pierre-buffiérois",
"Pierre-Buffiérois",
"pierre-buffiéroise",
"Pierre-Buffiéroise",
"pierre-buffiéroises",
"Pierre-Buffiéroises",
"Pierre-Chanel",
"Pierre-Châtel",
"pierre-châtelois",
"Pierre-Châtelois",
"pierre-châteloise",
"Pierre-Châteloise",
"pierre-châteloises",
"Pierre-Châteloises",
"Pierre-de-Bresse",
"Pierrefeu-du-Var",
"pierre-feuille-ciseaux",
"Pierrefitte-en-Auge",
"Pierrefitte-en-Beauvaisis",
"Pierrefitte-en-Cinglais",
"Pierrefitte-ès-Bois",
"Pierrefitte-Nestalas",
"Pierrefitte-sur-Aire",
"Pierrefitte-sur-Loire",
"Pierrefitte-sur-Sauldre",
"Pierrefitte-sur-Seine",
"Pierrefontaine-lès-Blamont",
"Pierrefontaine-les-Varans",
"Pierre-la-Treiche",
"Pierre-Levée",
"pierre-levéen",
"Pierre-Levéen",
"pierre-levéenne",
"Pierre-Levéenne",
"pierre-levéennes",
"Pierre-Levéennes",
"pierre-levéens",
"Pierre-Levéens",
"Pierre-Louis",
"Pierre-Marie",
"pierre-montois",
"Pierre-Montois",
"pierre-montoise",
"Pierre-Montoise",
"pierre-montoises",
"Pierre-Montoises",
"Pierremont-sur-Amance",
"Pierre-Morains",
"Pierre-Olivier",
"pierre-papier-ciseaux",
"Pierre-Percée",
"Pierre-Perthuis",
"Pierrepont-sur-Avre",
"Pierrepont-sur-l'Arentèle",
"pierre-qui-vire",
"pierres-qui-virent",
"Pierre-Yves",
"piés-de-lion",
"pies-grièches",
"pies-mères",
"piétin-échaudage",
"piétin-verse",
"Pietra-di-Verde",
"Piets-Plasence-Moustrou",
"piézo-électricité",
"piézo-électricités",
"piézo-électrique",
"piézo-électriques",
"Pihen-lès-Guînes",
"Pijnacker-Nootdorp",
"Pila-Canale",
"pile-poil",
"pilo-sébacé",
"Pin-Balma",
"pince-balle",
"pince-balles",
"pince-érigne",
"pince-érignes",
"pince-fesse",
"pince-fesses",
"pince-lisière",
"pince-maille",
"pince-mailles",
"pince-monseigneur",
"pince-nez",
"pince-notes",
"pince-oreille",
"pince-oreilles",
"pince-sans-rire",
"pinces-monseigneur",
"Pinel-Hauterive",
"ping-pong",
"ping-pongs",
"Pin-Moriès",
"pino-balméen",
"Pino-Balméen",
"pino-balméenne",
"Pino-Balméenne",
"pino-balméennes",
"Pino-Balméennes",
"pino-balméens",
"Pino-Balméens",
"pin-pon",
"pin's",
"Pin-Saint-Denis",
"Pins-Justaret",
"pins-justarétois",
"Pins-Justarétois",
"pins-justarétoise",
"Pins-Justarétoise",
"pins-justarétoises",
"Pins-Justarétoises",
"Piñuécar-Gandullas",
"pin-up",
"piou-piou",
"piou-pious",
"pipe-line",
"pipe-lines",
"piqueur-suceur",
"Pi-Ramsès",
"Piré-sur-Seiche",
"Piriac-sur-Mer",
"pirimiphos-éthyl",
"pirimiphos-méthyl",
"pis-aller",
"pis-allers",
"pisse-au-lit",
"pisse-chien",
"pisse-chiens",
"pisse-copie",
"pisse-copies",
"pisse-debout",
"pisse-froid",
"pisse-mémé",
"pisse-mémère",
"pisse-sang",
"pisse-trois-gouttes",
"pisse-vinaigre",
"pisse-vinaigres",
"pisse-z-yeux",
"pissy-pôvillais",
"Pissy-Pôvillais",
"pissy-pôvillaise",
"Pissy-Pôvillaise",
"pissy-pôvillaises",
"Pissy-Pôvillaises",
"Pissy-Pôville",
"pistillo-staminé",
"pistolet-mitrailleur",
"pistolets-mitrailleurs",
"pit-bulls",
"Pithiviers-le-Vieil",
"pixie-bob",
"Plachy-Buyon",
"plachy-buyonnais",
"Plachy-Buyonnais",
"plachy-buyonnaise",
"Plachy-Buyonnaise",
"plachy-buyonnaises",
"Plachy-Buyonnaises",
"Placy-Montaigu",
"Plaimbois-du-Miroir",
"Plaimbois-Vennes",
"Plaimpied-Givaudins",
"plain-chant",
"Plain-de-Corravillers",
"Plaine-de-Walsch",
"Plaine-Haute",
"Plaines-Saint-Lange",
"plain-pied",
"plains-chants",
"plains-pieds",
"Plaisance-du-Touch",
"plaît-il",
"Plancher-Bas",
"Plancher-les-Mines",
"planches-contacts",
"Plancy-l'Abbaye",
"Plan-d'Aups",
"Plan-d'Aups-Sainte-Baume",
"Plan-de-Baix",
"Plan-de-Cuques",
"Plan-de-la-Tour",
"Plan-d'Orgon",
"Plan-les-Ouates",
"plan-masse",
"plan-plan",
"plan-planisme",
"plan-planismes",
"plan-séquence",
"plan-séquences",
"plans-masses",
"plan-socialisa",
"plan-socialisai",
"plan-socialisaient",
"plan-socialisais",
"plan-socialisait",
"plan-socialisâmes",
"plan-socialisant",
"plan-socialisas",
"plan-socialisasse",
"plan-socialisassent",
"plan-socialisasses",
"plan-socialisassiez",
"plan-socialisassions",
"plan-socialisât",
"plan-socialisâtes",
"plan-socialise",
"plan-socialisé",
"plan-socialisée",
"plan-socialisées",
"plan-socialisent",
"plan-socialiser",
"plan-socialisera",
"plan-socialiserai",
"plan-socialiseraient",
"plan-socialiserais",
"plan-socialiserait",
"plan-socialiseras",
"plan-socialisèrent",
"plan-socialiserez",
"plan-socialiseriez",
"plan-socialiserions",
"plan-socialiserons",
"plan-socialiseront",
"plan-socialises",
"plan-socialisés",
"plan-socialisez",
"plan-socialisiez",
"plan-socialisions",
"plan-socialisons",
"plans-séquences",
"plante-crayon",
"plante-éponge",
"plantes-crayons",
"plaque-bière",
"plaque-tonnerre",
"Plassac-Rouffiac",
"plat-bord",
"plat-cul",
"plat-culs",
"plat-de-bierre",
"Plateau-Central",
"plateau-repas",
"plateaux-repas",
"plate-bande",
"plate-bière",
"plate-face",
"plate-forme",
"plate-longe",
"plates-bandes",
"plates-formes",
"plates-longes",
"platinico-ammonique",
"plats-bords",
"play-back",
"play-backs",
"play-boy",
"play-boys",
"play-off",
"play-offs",
"plein-cintre",
"pleine-fougerais",
"Pleine-Fougerais",
"pleine-fougeraise",
"Pleine-Fougeraise",
"pleine-fougeraises",
"Pleine-Fougeraises",
"Pleine-Fougères",
"plein-emploi",
"Pleine-Selve",
"Pleine-Sève",
"Pleines-Œuvres",
"pleins-cintres",
"Pleisweiler-Oberhofen",
"Plélan-le-Grand",
"Plélan-le-Petit",
"Plénée-Jugon",
"Pléneuf-Val-André",
"Pleslin-Trigavou",
"plessis-ansoldien",
"Plessis-Ansoldien",
"plessis-ansoldienne",
"Plessis-Ansoldienne",
"plessis-ansoldiennes",
"Plessis-Ansoldiennes",
"plessis-ansoldiens",
"Plessis-Ansoldiens",
"Plessis-Barbuise",
"plessis-brionnais",
"Plessis-Brionnais",
"plessis-brionnaise",
"Plessis-Brionnaise",
"plessis-brionnaises",
"Plessis-Brionnaises",
"plessis-bucardésien",
"Plessis-Bucardésien",
"plessis-bucardésienne",
"Plessis-Bucardésienne",
"plessis-bucardésiennes",
"Plessis-Bucardésiennes",
"plessis-bucardésiens",
"Plessis-Bucardésiens",
"Plessis-de-Roye",
"Plessis-du-Mée",
"plessis-episcopien",
"Plessis-Episcopien",
"plessis-épiscopien",
"Plessis-Épiscopien",
"plessis-episcopienne",
"Plessis-Episcopienne",
"plessis-épiscopienne",
"Plessis-Épiscopienne",
"plessis-episcopiennes",
"Plessis-Episcopiennes",
"plessis-épiscopiennes",
"Plessis-Épiscopiennes",
"plessis-episcopiens",
"Plessis-Episcopiens",
"plessis-épiscopiens",
"Plessis-Épiscopiens",
"Plessis-Gatebled",
"plessis-grammoirien",
"Plessis-Grammoirien",
"plessis-grammoirienne",
"Plessis-Grammoirienne",
"plessis-grammoiriennes",
"Plessis-Grammoiriennes",
"plessis-grammoiriens",
"Plessis-Grammoiriens",
"plessis-luzarchois",
"Plessis-Luzarchois",
"plessis-luzarchoise",
"Plessis-Luzarchoise",
"plessis-luzarchoises",
"Plessis-Luzarchoises",
"plessis-macéen",
"Plessis-Macéen",
"plessis-macéenne",
"Plessis-Macéenne",
"plessis-macéennes",
"Plessis-Macéennes",
"plessis-macéens",
"Plessis-Macéens",
"Plessis-Saint-Benoist",
"Plessis-Saint-Jean",
"Plessix-Balisson",
"Plestin-les-Grèves",
"Pleudihen-sur-Rance",
"Pleumeur-Bodou",
"Pleumeur-Gautier",
"pleu-pleu",
"pleure-misère",
"pleure-misères",
"pleuronecte-guitare",
"pleuro-péricardite",
"Pleyber-Christ",
"plieuse-inséreuse",
"plieuses-inséreuses",
"Plobannalec-Lesconil",
"Plœuc-L'Hermitage",
"Plœuc-sur-Lié",
"Plogastel-Saint-Germain",
"Plombières-les-Bains",
"Plombières-lès-Dijon",
"Plonéour-Lanvern",
"Plonévez-du-Faou",
"Plonévez-Porzay",
"plongée-spéléo",
"plongées-spéléo",
"Plorec-sur-Arguenon",
"Plouëc-du-Trieux",
"Plouégat-Guérand",
"Plouégat-Moysan",
"Plouër-sur-Rance",
"Plouezoc'h",
"plouezoc'hois",
"Plouezoc'hois",
"plouezoc'hoise",
"Plouezoc'hoise",
"plouezoc'hoises",
"Plouezoc'hoises",
"Plougastel-Daoulas",
"Ploulec'h",
"ploulec'hois",
"Ploulec'hois",
"ploulec'hoise",
"Ploulec'hoise",
"ploulec'hoises",
"Ploulec'hoises",
"Ploumanac'h",
"Plounéour-Brignogan-Plages",
"Plounéour-Ménez",
"Plounéour-Trez",
"plounéour-trezien",
"Plounéour-Trezien",
"plounéour-trezienne",
"Plounéour-Trezienne",
"plounéour-treziennes",
"Plounéour-Treziennes",
"plounéour-treziens",
"Plounéour-Treziens",
"Plounévez-Lochrist",
"Plounévez-Moëdec",
"Plounévez-Quintin",
"Plourac'h",
"Plourin-lès-Morlaix",
"Ployart-et-Vaurseine",
"ploye-ressort",
"plui-plui",
"plumbo-aragonite",
"plumbo-aragonites",
"plum-cake",
"plum-cakes",
"plume-couteau",
"plumes-couteaux",
"plum-pudding",
"plû-part",
"pluri-continental",
"pluri-interprétable",
"pluri-interprétables",
"pluri-journalier",
"pluri-modal",
"pluri-national",
"pluri-nationale",
"pluri-nationales",
"pluri-nationaux",
"plus-d'atouts",
"plus-disant",
"plus-part",
"plus-payé",
"plus-pétition",
"plus-produit",
"plus-produits",
"plus-que-parfait",
"plus-que-parfaits",
"plus-value",
"plus-values",
"pluto-neptunien",
"pluvier-hirondelle",
"Pobé-Mengao",
"Pocé-les-Bois",
"Pocé-sur-Cisse",
"poche-cuiller",
"poche-revolver",
"poches-revolver",
"pochettes-surprise",
"pochettes-surprises",
"pochette-surprise",
"podio-régalien",
"Podio-Régalien",
"podio-régalienne",
"Podio-Régalienne",
"podio-régaliennes",
"Podio-Régaliennes",
"podio-régaliens",
"Podio-Régaliens",
"podo-orthésiste",
"podo-orthésistes",
"poët-lavalien",
"Poët-Lavalien",
"poët-lavalienne",
"Poët-Lavalienne",
"poët-lavaliennes",
"Poët-Lavaliennes",
"poët-lavaliens",
"Poët-Lavaliens",
"Poey-de-Lescar",
"Poey-d'Oloron",
"Poggio-di-Nazza",
"Poggio-di-Tallano",
"Poggio-di-Venaco",
"Poggio-d'Oletta",
"Poggio-Marinaccio",
"Poggio-Mezzana",
"poggio-mezzanais",
"Poggio-Mezzanais",
"poggio-mezzanaise",
"Poggio-Mezzanaise",
"poggio-mezzanaises",
"Poggio-Mezzanaises",
"pogne-cul",
"pogne-culs",
"Poids-de-Fiole",
"poids-lourd",
"poids-lourds",
"Poigny-la-Forêt",
"Poilcourt-Sydney",
"Poillé-sur-Vègre",
"Poilly-lez-Gien",
"Poilly-sur-Serein",
"Poilly-sur-Tholon",
"Poinçon-lès-Larrey",
"Poinson-lès-Fayl",
"Poinson-lès-Grancey",
"Poinson-lès-Nogent",
"point-arrière",
"point-col",
"Pointe-à-Pitre",
"Pointe-Claire",
"pointe-de-coeur",
"pointe-de-cœur",
"pointe-de-diamant",
"Pointe-du-Laquois",
"Pointe-Fortunais",
"Pointe-Fortunien",
"pointe-noirais",
"Pointe-Noirais",
"pointe-noiraise",
"Pointe-Noiraise",
"pointe-noiraises",
"Pointe-Noiraises",
"Pointe-Noire",
"pointer-et-cliquer",
"pointes-de-coeur",
"pointes-de-cœur",
"pointes-de-diamant",
"Pointis-de-Rivière",
"Pointis-Inard",
"point-milieu",
"point-selle",
"points-virgules",
"points-voyelles",
"point-virgule",
"point-voyelle",
"Poiseul-la-Grange",
"Poiseul-la-Ville-et-Laperrière",
"Poiseul-lès-Saulx",
"poissonnier-écailler",
"poitevin-saintongeais",
"Poitou-Charentes",
"poivre-sel",
"Poix-de-Picardie",
"Poix-du-Nord",
"poix-résine",
"Poix-Terron",
"poka-yoké",
"Polaincourt-et-Clairefontaine",
"Poleymieux-au-Mont-d'Or",
"Poliez-Pittet",
"politico-économique",
"politico-économiques",
"politico-idéologique",
"politico-idéologiques",
"politico-médiatique",
"politico-religieuse",
"politico-religieuses",
"politico-religieux",
"pollueur-payeur",
"pollueurs-payeurs",
"poly-articulaire",
"poly-articulaires",
"polychlorodibenzo-p-dioxine",
"polychlorodibenzo-p-dioxines",
"poly-insaturé",
"poly-insaturée",
"poly-insaturées",
"poly-insaturés",
"poly-sexuel",
"poly-sexuelle",
"Poméranie-Occidentale-de-l'Est",
"Poméranie-Occidentale-du-Nord",
"pomme-de-pin",
"pomme-grenade",
"Pommerit-Jaudy",
"Pommerit-le-Vicomte",
"pommes-de-pin",
"Pommier-de-Beaurepaire",
"Pommiers-la-Placette",
"Pommiers-Moulons",
"pompages-turbinages",
"pompage-turbinage",
"Pompierre-sur-Doubs",
"Poncé-sur-le-Loir",
"Poncey-lès-Athée",
"Poncey-sur-l'Ignon",
"Ponches-Estruval",
"Ponet-et-Saint-Auban",
"Ponlat-Taillebourg",
"Ponsan-Soubiran",
"Ponson-Debat-Pouts",
"Ponson-Dessus",
"Pontailler-sur-Saône",
"Pontamafrey-Montpascal",
"Pontault-Combault",
"Pontcharra-sur-Turdine",
"Ponte-Avancé",
"Ponteils-et-Brésis",
"Pontenx-les-Forges",
"Pontfaverger-Moronvilliers",
"Pontiacq-Viellepinte",
"Pontoise-lès-Noyon",
"Pontonx-sur-l'Adour",
"ponts-bascules",
"ponts-canaux",
"ponts-de-céais",
"Ponts-de-Céais",
"ponts-de-céaise",
"Ponts-de-Céaise",
"ponts-de-céaises",
"Ponts-de-Céaises",
"Ponts-et-Marais",
"ponts-levis",
"ponts-neufs",
"popa'a",
"pop-corn",
"pop-in",
"pop-ins",
"pop-punk",
"pop-up",
"pop-ups",
"porc-épic",
"Porcieu-Amblagnieu",
"porcs-épics",
"Portel-des-Corbières",
"Porté-Puymorens",
"Portes-en-Valdaine",
"portes-fenêtres",
"Portes-lès-Valence",
"portes-tambour",
"Portet-d'Aspet",
"Portet-de-Luchon",
"Portet-sur-Garonne",
"porteur-de-peau",
"Porto-Novo",
"Porto-Ricain",
"Porto-Ricaine",
"Porto-Ricaines",
"Porto-Ricains",
"Porto-Rico",
"porto-vecchiais",
"Porto-Vecchiais",
"porto-vecchiaise",
"Porto-Vecchiaise",
"porto-vecchiaises",
"Porto-Vecchiaises",
"Porto-Vecchio",
"portrait-charge",
"portrait-robot",
"portraits-charges",
"portraits-robots",
"posé-décollé",
"posé-décollés",
"pose-tubes",
"post-11-Septembre",
"Postbauer-Heng",
"potassico-ammonique",
"potassico-mercureux",
"pot-au-feu",
"pot-au-noir",
"pot-beurrier",
"pot-bouille",
"pot-de-vin",
"pot-en-tête",
"poto-poto",
"pot-pourri",
"potron-jacquet",
"potron-minet",
"Potsdam-Mittelmark",
"pots-de-vin",
"pots-pourris",
"Pouan-les-Vallées",
"pouce-pied",
"pouces-pieds",
"pou-de-soie",
"poudre-éclair",
"poudres-éclair",
"poudres-éclairs",
"pouët-pouët",
"Pougne-Hérisson",
"Pougues-les-Eaux",
"Pouillé-les-Côteaux",
"Pouilley-Français",
"Pouilley-les-Vignes",
"Pouilly-en-Auxois",
"Pouilly-le-Monial",
"Pouilly-lès-Feurs",
"Pouilly-les-Nonains",
"Pouilly-sous-Charlieu",
"Pouilly-sur-Loire",
"Pouilly-sur-Meuse",
"Pouilly-sur-Saône",
"Pouilly-sur-Serre",
"Pouilly-sur-Vingeanne",
"Poulan-Pouzols",
"Pouldavid-sur-Mer",
"Poule-les-Echarmeaux",
"Poule-les-Écharmeaux",
"Pouligney-Lusans",
"Pouligny-Notre-Dame",
"Pouligny-Saint-Martin",
"pouligny-saint-pierre",
"Pouligny-Saint-Pierre",
"Poullan-sur-Mer",
"poult-de-soie",
"Poulton-le-Fylde",
"poults-de-soie",
"Pouques-Lormes",
"pour-boire",
"pour-cent",
"Pournoy-la-Chétive",
"Pournoy-la-Grasse",
"pourri-gâté",
"Poursay-Garnaud",
"Poursiugues-Boucoue",
"poursuite-bâillon",
"Pouru-aux-Bois",
"Pouru-Saint-Remy",
"pousse-au-crime",
"pousse-au-jouir",
"pousse-au-vice",
"pousse-broche",
"pousse-broches",
"pousse-café",
"pousse-cafés",
"pousse-caillou",
"pousse-cailloux",
"pousse-cambrure",
"pousse-cambrures",
"pousse-cul",
"pousse-culs",
"pousse-fiche",
"pousse-goupille",
"pousse-mégot",
"pousse-mégots",
"pousse-navette",
"pousse-pied",
"pousse-pieds",
"pousse-pointe",
"pousse-pointes",
"pousse-pousse",
"Poussy-la-Campagne",
"pout-de-soie",
"pouts-de-soie",
"poux-de-soie",
"Pouy-de-Touges",
"Pouy-Loubrin",
"pouy-roquelain",
"Pouy-Roquelain",
"pouy-roquelaine",
"Pouy-Roquelaine",
"pouy-roquelaines",
"Pouy-Roquelaines",
"pouy-roquelains",
"Pouy-Roquelains",
"Pouy-Roquelaure",
"Pouy-sur-Vannes",
"Pouzols-Minervois",
"Pouzy-Mésangy",
"pow-wow",
"pow-wows",
"Pozo-Lorente",
"PPD-T",
"Pradelles-Cabardès",
"Pradelles-en-Val",
"Pradère-les-Bourguets",
"Prades-d'Aubrac",
"Prades-le-Lez",
"Prades-Salars",
"Prades-sur-Vernazobre",
"Prads-Haute-Bléone",
"Pralognan-la-Vanoise",
"Prat-Bonrepaux",
"Prat-et-Bonrepaux",
"Prato-di-Giovellina",
"Prats-de-Carlux",
"Prats-de-Mollo",
"Prats-de-Mollo-la-Preste",
"Prats-de-Sournia",
"Prats-du-Périgord",
"Praz-sur-Arly",
"Préaux-Bocage",
"Préaux-du-Perche",
"Préaux-Saint-Sébastien",
"Préchacq-Josbaig",
"Préchacq-les-Bains",
"Préchacq-Navarrenx",
"Préchac-sur-Adour",
"Précy-le-Sec",
"Précy-Notre-Dame",
"Précy-Saint-Martin",
"Précy-sous-Thil",
"Précy-sur-Marne",
"Précy-sur-Oise",
"Précy-sur-Vrin",
"Pregny-Chambésy",
"Premeaux-Prissey",
"premier-ministra",
"Premier-ministrable",
"Premier-ministrables",
"premier-ministrai",
"premier-ministraient",
"premier-ministrais",
"premier-ministrait",
"premier-ministrâmes",
"premier-ministrant",
"premier-ministras",
"premier-ministrasse",
"premier-ministrassent",
"premier-ministrasses",
"premier-ministrassiez",
"premier-ministrassions",
"premier-ministrât",
"premier-ministrâtes",
"premier-ministre",
"premier-ministré",
"premier-ministrée",
"premier-ministrées",
"premier-ministrent",
"premier-ministrer",
"premier-ministrera",
"premier-ministrerai",
"premier-ministreraient",
"premier-ministrerais",
"premier-ministrerait",
"premier-ministreras",
"premier-ministrèrent",
"premier-ministrerez",
"premier-ministreriez",
"premier-ministrerions",
"premier-ministrerons",
"premier-ministreront",
"premier-ministres",
"premier-ministrés",
"premier-ministrez",
"premier-ministriez",
"premier-ministrions",
"premier-ministrons",
"premier-né",
"premiers-nés",
"Premosello-Chiovenda",
"prés-bois",
"président-candidat",
"présidente-candidate",
"présidentes-candidates",
"présidents-candidats",
"présidents-directeurs",
"Presles-en-Brie",
"Presles-et-Boves",
"Presles-et-Thierny",
"presqu'accident",
"presqu'accidents",
"presqu'ile",
"presqu'île",
"presqu'iles",
"presqu'îles",
"Pressagny-l'Orgueilleux",
"prés-salés",
"press-book",
"press-books",
"presse-agrume",
"presse-agrumes",
"presse-ail",
"presse-artère",
"presse-artères",
"presse-citron",
"presse-citrons",
"presse-étoffe",
"presse-étoffes",
"presse-étoupe",
"presse-étoupes",
"presse-fruits",
"presse-légumes",
"presse-papier",
"presse-papiers",
"presse-purée",
"presse-purées",
"presse-urètre",
"presse-urètres",
"pressignaco-vicois",
"Pressignaco-Vicois",
"pressignaco-vicoise",
"Pressignaco-Vicoise",
"pressignaco-vicoises",
"Pressignaco-Vicoises",
"Pressignac-Vicq",
"Pressigny-les-Pins",
"Pressy-sous-Dondin",
"prés-vergers",
"prêt-à-monter",
"prêt-à-penser",
"prêt-à-porter",
"prêt-à-poster",
"prête-nom",
"prête-noms",
"Prétot-Sainte-Suzanne",
"Prétot-Vicquemare",
"prêtres-ouvriers",
"prêts-à-penser",
"prêts-à-porter",
"Pretz-en-Argonne",
"Preuilly-la-Ville",
"Preuilly-sur-Claise",
"Preutin-Higny",
"Preux-au-Bois",
"Preux-au-Sart",
"preux-romanien",
"Preux-Romanien",
"preux-romanienne",
"Preux-Romanienne",
"preux-romaniennes",
"Preux-Romaniennes",
"preux-romaniens",
"Preux-Romaniens",
"Prévessin-Moëns",
"Preyssac-d'Excideuil",
"Prez-sous-Lafauche",
"Prez-sur-Marne",
"Prez-vers-Noréaz",
"prie-Dieu",
"Prignac-en-Médoc",
"Prignac-et-Marcamps",
"Prignitz-de-l'Est-Ruppin",
"prima-mensis",
"prime-sautier",
"prim'holstein",
"prince-édouardien",
"Prince-Édouardien",
"prince-édouardienne",
"Prince-Édouardienne",
"prince-édouardiennes",
"Prince-Édouardiennes",
"prince-édouardiens",
"Prince-Édouardiens",
"prince-électeur",
"prince-président",
"prince-sans-rire",
"princes-électeurs",
"princes-présidents",
"Principauté-Ultérieure",
"Prin-Deyrançon",
"Prinsuéjols-Malbouzon",
"prisons-écoles",
"Prissé-la-Charrière",
"privat-docent",
"privat-docentisme",
"privat-docentismes",
"prix-choc",
"prix-chocs",
"Prix-lès-Mézières",
"p'rlotte",
"Proche-Orient",
"Proença-a-Nova",
"programme-cadre",
"programmes-cadres",
"prohexadione-calcium",
"promène-couillon",
"promène-couillons",
"promis-juré",
"promis-jurée",
"promis-jurées",
"promis-jurés",
"prône-misère",
"pronom-adjectif",
"pronoms-adjectifs",
"propre-à-rien",
"propres-à-rien",
"prostato-péritonéal",
"prostato-péritonéale",
"prostato-péritonéales",
"prostato-péritonéaux",
"protège-cahier",
"protège-cahiers",
"protège-dent",
"protège-dents",
"protège-mamelon",
"protège-mamelons",
"protège-oreille",
"protège-oreilles",
"protège-slip",
"protège-slips",
"protège-tibia",
"protège-tibias",
"prout-prout",
"prout-proute",
"prout-proutes",
"prout-prouts",
"Provence-Alpes-Côte-d'Azur",
"Provenchères-et-Colroy",
"Provenchères-lès-Darney",
"Provenchères-sur-Fave",
"Provenchères-sur-Marne",
"Provenchères-sur-Meuse",
"Provinces-Unies",
"Proviseux-et-Plesnoy",
"prud'homal",
"prud'homale",
"prud'homales",
"prud'homaux",
"prud'homie",
"prud'homies",
"Pruillé-le-Chétif",
"Pruillé-l'Eguillé",
"Pruillé-l'Éguillé",
"Prunay-Belleville",
"Prunay-Cassereau",
"Prunay-en-Yvelines",
"Prunay-le-Gillon",
"Prunay-le-Temple",
"Prunay-sur-Essonne",
"Prunelli-di-Casacconi",
"Prunelli-di-Fiumorbo",
"Prunet-et-Belpuig",
"prunet-puigois",
"Prunet-Puigois",
"prunet-puigoise",
"Prunet-Puigoise",
"prunet-puigoises",
"Prunet-Puigoises",
"prunier-cerise",
"pruniers-cerises",
"Pruniers-en-Sologne",
"Prusly-sur-Ource",
"Prusse-Orientale",
"pschitt-pschitt",
"psycho-physiologique",
"psycho-physiologiques",
"psycho-physique",
"psycho-physiques",
"psycho-pop",
"p'tain",
"ptérygo-pharyngien",
"p't-être",
"p'tit",
"p'tite",
"p'tites",
"p'tits",
"pub-restaurant",
"pub-restaurants",
"puce-chique",
"puces-chiques",
"Puch-d'Agenais",
"Puech-Cabrier",
"pue-la-sueur",
"Puget-Rostang",
"Puget-sur-Argens",
"Puget-Théniers",
"Puget-Ville",
"Pugny-Chatenod",
"Puig-reig",
"Puilly-et-Charbeaux",
"Puiselet-le-Marais",
"puiset-doréen",
"Puiset-Doréen",
"puiset-doréenne",
"Puiset-Doréenne",
"puiset-doréennes",
"Puiset-Doréennes",
"puiset-doréens",
"Puiset-Doréens",
"Puiseux-en-Bray",
"Puiseux-en-France",
"Puiseux-en-Retz",
"Puiseux-le-Hauberger",
"Puiseux-les-Louvres",
"Puiseux-Pontoise",
"Puisieux-et-Clanlieu",
"puis-je",
"Puits-et-Nuisement",
"Puits-la-Lande",
"Puits-la-Vallée",
"Pujo-le-Plan",
"Pujols-sur-Ciron",
"Puligny-Montrachet",
"pull-buoy",
"pull-buoys",
"pull-over",
"pull-overs",
"pull-up",
"pulmo-aortique",
"pulso-réacteurs",
"pulvérisateur-mélangeur",
"punaise-mouche",
"punaises-mouches",
"punching-ball",
"punching-balls",
"punkah-wallah",
"pure-laine",
"purge-mariage",
"purge-mariages",
"pur-sang",
"pur-sangs",
"purs-sangs",
"push-back",
"push-up",
"Pusy-et-Epenoux",
"Pusy-et-Épenoux",
"Putanges-le-Lac",
"Putanges-Pont-Ecrepin",
"Putanges-Pont-Écrepin",
"putot-bessinois",
"Putot-Bessinois",
"putot-bessinoise",
"Putot-Bessinoise",
"putot-bessinoises",
"Putot-Bessinoises",
"Putot-en-Auge",
"Putot-en-Bessin",
"Puttelange-aux-Lacs",
"Puttelange-lès-Farschviller",
"Puttelange-lès-Thionville",
"Puygaillard-de-Lomagne",
"Puygaillard-de-Quercy",
"Puyol-Cazalet",
"pyraflufen-éthyl",
"Pyrénées-Atlantiques",
"Pyrénées-Orientales",
"pyrimiphos-éthyl",
"pyrimiphos-méthyl",
"pyro-électricité",
"pyro-électricités",
"pyro-électrique",
"pyro-électriques",
"q'anjob'al",
"Qo'noS",
"quad-core",
"quad-cores",
"quadri-accélération",
"quadri-accélérationnellement",
"quadri-ailé",
"quadri-couche",
"quadri-couches",
"quadri-courant",
"quadri-dimensionnel",
"quadri-dimensionnelle",
"quadri-dimensionnelles",
"quadri-dimensionnels",
"quadri-rotor",
"quadri-rotors",
"quadruple-croche",
"quadruples-croches",
"Quaix-en-Chartreuse",
"quant-à-moi",
"quant-à-soi",
"quarante-cinq",
"quarante-deux",
"quarante-douze",
"quarante-et-un",
"quarante-et-une",
"quarante-huit",
"quarante-huitard",
"quarante-huitarde",
"quarante-huitardes",
"quarante-huitards",
"quarante-huitième",
"quarante-huitièmes",
"quarante-langues",
"quarante-neuf",
"quarante-neuvième",
"quarante-neuvièmes",
"quarante-quatre",
"quarante-sept",
"quarante-six",
"quarante-trois",
"quarante-vingt",
"Quarré-les-Tombes",
"quart-arrière",
"quart-biscuité",
"quart-de-cercle",
"quart-de-finaliste",
"quart-de-finalistes",
"quart-de-pouce",
"quart-d'heure",
"quarte-fagot",
"quartier-général",
"quartier-maitre",
"quartier-maître",
"quartier-maitres",
"quartier-mestre",
"quartiers-maîtres",
"quart-monde",
"quarts-arrières",
"quarts-de-cercle",
"quart-temps",
"quatorze-marsiste",
"quatorze-marsistes",
"quatre-cent-vingt-et-un",
"Quatre-Champs",
"quatre-chevaux",
"quatre-cinq-un",
"quatre-cornes",
"quatre-de-chiffre",
"quatre-épées",
"quatre-épices",
"quatre-feuilles",
"quatre-heura",
"quatre-heurai",
"quatre-heuraient",
"quatre-heurais",
"quatre-heurait",
"quatre-heurâmes",
"quatre-heurant",
"quatre-heuras",
"quatre-heurasse",
"quatre-heurassent",
"quatre-heurasses",
"quatre-heurassiez",
"quatre-heurassions",
"quatre-heurât",
"quatre-heurâtes",
"quatre-heure",
"quatre-heuré",
"quatre-heurent",
"quatre-heurer",
"quatre-heurera",
"quatre-heurerai",
"quatre-heureraient",
"quatre-heurerais",
"quatre-heurerait",
"quatre-heureras",
"quatre-heurèrent",
"quatre-heurerez",
"quatre-heureriez",
"quatre-heurerions",
"quatre-heurerons",
"quatre-heureront",
"quatre-heures",
"quatre-heurez",
"quatre-heuriez",
"quatre-heurions",
"quatre-heurons",
"quatre-huit",
"quatre-mâts",
"Quatre-Nations",
"quatre-œil",
"quatre-pieds",
"quatre-quart",
"quatre-quarts",
"quatre-quatre",
"quatre-quatre-deux",
"quatre-quint",
"quatre-quints",
"quatre-quinze",
"quatre-quinzes",
"quatre-routois",
"Quatre-Routois",
"quatre-routoise",
"Quatre-Routoise",
"quatre-routoises",
"Quatre-Routoises",
"quatre-saisons",
"quatres-de-chiffre",
"quatre-temps",
"quatre-trois-trois",
"quatre-vingt",
"quatre-vingtaine",
"quatre-vingtaines",
"quatre-vingt-cinq",
"quatre-vingt-deux",
"quatre-vingt-dix",
"quatre-vingt-dix-huit",
"quatre-vingt-dixième",
"quatre-vingt-dixièmes",
"quatre-vingt-dix-neuf",
"quatre-vingt-dix-neuvième",
"quatre-vingt-dix-neuvièmes",
"quatre-vingt-dix-sept",
"quatre-vingt-dizaine",
"quatre-vingt-dizaines",
"quatre-vingt-douze",
"quatre-vingt-huit",
"quatre-vingtième",
"quatre-vingtièmes",
"quatre-vingt-neuf",
"quatre-vingt-onze",
"quatre-vingt-quatorze",
"quatre-vingt-quatre",
"quatre-vingt-quinze",
"quatre-vingts",
"quatre-vingt-seize",
"quatre-vingt-sept",
"quatre-vingt-six",
"quatre-vingt-treize",
"quatre-vingt-trois",
"quatre-vingt-un",
"quatre-vingt-une",
"quat'z'arts",
"Quelaines-Saint-Gault",
"quelques-unes",
"quelques-uns",
"quelqu'un",
"quelqu'une",
"Quemigny-Poisot",
"Quemigny-sur-Seine",
"Quemper-Guézennec",
"que'ques",
"Quesnay-Guesnon",
"Quesnoy-le-Montant",
"Quesnoy-sur-Airaines",
"Quesnoy-sur-Deûle",
"questche-wasser",
"question-piège",
"questions-pièges",
"questions-réponses",
"questions-tags",
"question-tag",
"Quet-en-Beaumont",
"Quettreville-sur-Sienne",
"queue-d'aronde",
"queue-de-carpe",
"queue-de-chat",
"queue-de-cheval",
"queue-de-cochon",
"queue-de-lion",
"queue-de-loup",
"queue-de-morue",
"queue-de-paon",
"queue-de-pie",
"queue-de-poêle",
"queue-de-poireau",
"queue-de-porc",
"queue-de-pourceau",
"queue-de-rat",
"queue-de-renard",
"queue-de-scorpion",
"queue-de-souris",
"queue-de-vache",
"queue-d'hironde",
"queue-d'oison",
"queue-d'or",
"Queue-du-Bois",
"queue-du-chat",
"queue-fourchue",
"queue-rouge",
"queues-d'aronde",
"queues-de-chat",
"queues-de-cheval",
"queues-de-cochon",
"queues-de-morue",
"queues-de-pie",
"queues-de-poêle",
"queues-de-pourceau",
"queues-de-rat",
"queues-de-renard",
"queues-de-vache",
"queues-d'hironde",
"queues-d'or",
"Quévreville-la-Poterie",
"Quévy-le-Grand",
"Quévy-le-Petit",
"Queyssac-les-Vignes",
"quick-and-dirty",
"Quiers-sur-Bézonde",
"Quiéry-la-Motte",
"Quillebeuf-sur-Seine",
"Quincampoix-Fleuzy",
"Quincié-en-Beaujolais",
"Quincy-Basse",
"Quincy-Landzécourt",
"Quincy-le-Vicomte",
"Quincy-sous-le-Mont",
"Quincy-sous-Sénart",
"Quincy-Voisins",
"qu-in-situ",
"Quint-Fonsegrives",
"quintuple-croche",
"quintuples-croches",
"quinze-vingt",
"quinze-vingts",
"Quiry-le-Sec",
"qui-va-là",
"qui-vive",
"quizalofop-éthyl",
"quizalofop-p-éthyl",
"quizalofop-P-éthyl",
"Quœux-Haut-Maînil",
"quote-part",
"quotes-parts",
"Qwa-Qwa",
"Raa-Besenbek",
"Rabastens-de-Bigorre",
"rabat-eau",
"rabat-eaux",
"rabat-joie",
"rabat-joies",
"Rabat-les-Trois-Seigneurs",
"Rabenkirchen-Faulück",
"rabi'-oul-aououal",
"rabi'-out-tani",
"Rablay-sur-Layon",
"Rachecourt-sur-Marne",
"Rachecourt-Suzémont",
"racine-blanche",
"racines-blanches",
"radars-tronçons",
"radar-tronçon",
"Raddon-et-Chapendu",
"radicale-socialiste",
"radicales-socialistes",
"radical-socialisme",
"radical-socialismes",
"radical-socialiste",
"radicaux-socialistes",
"Radinghem-en-Weppes",
"radio-actinium",
"radio-activité",
"radio-activités",
"radio-amateur",
"radio-amateurs",
"radio-canadien",
"radio-carpien",
"radio-carpienne",
"radio-carpiennes",
"radio-carpiens",
"radio-crochet",
"radio-crochets",
"radio-cubital",
"radio-diffusion",
"radio-étiquette",
"radio-étiquettes",
"radio-gramophone",
"radio-gramophones",
"radio-identification",
"radio-identifications",
"radio-interféromètre",
"radio-interféromètres",
"radio-isotope",
"radio-isotopes",
"radio-opacité",
"radio-opacités",
"radio-palmaire",
"radio-phonographe",
"radio-phonographes",
"radio-réalité",
"radio-réalités",
"radio-réveil",
"radio-taxi",
"radio-télévisé",
"radio-télévisée",
"radio-télévisées",
"radio-télévisés",
"radio-télévision",
"radio-télévisions",
"radio-thorium",
"rad-soc",
"rad'soc",
"rad-socs",
"rad'socs",
"Ragow-Merz",
"rag-time",
"rag-times",
"Raguhn-Jeßnitz",
"rahat-lokoum",
"rahat-lokoums",
"rahat-loukoum",
"rahat-loukoums",
"raid-aventure",
"rai-de-coeur",
"rai-de-cœur",
"raie-aigle",
"raie-guitare",
"raie-papillon",
"raies-aigles",
"raies-papillons",
"Raillencourt-Sainte-Olle",
"rail-road",
"rail-route",
"Raï'n'B",
"rais-de-coeur",
"rais-de-cœur",
"raisin-de-chien",
"raisins-de-chien",
"Raissac-d'Aude",
"Raissac-sur-Lampy",
"Ralbitz-Rosenthal",
"ralé-poussé",
"râlé-poussé",
"Râlé-Poussé",
"rallie-papier",
"rallonge-bouton",
"rallonge-boutons",
"ramasse-bourrier",
"ramasse-bourriers",
"ramasse-couvert",
"ramasse-couverts",
"ramasse-miette",
"ramasse-miettes",
"ramasse-monnaie",
"ramasse-poussière",
"ramasse-poussières",
"ramasse-ton-bras",
"ramasseuse-presse",
"ramasseuses-presses",
"Rambluzin-et-Benoite-Vaux",
"Ramegnies-Chin",
"Ramillies-Offus",
"Ramonville-Saint-Agne",
"(R)-amphétamine",
"Ramstein-Miesenbach",
"Rancourt-sur-Ornain",
"Rang-du-Fliers",
"Ransbach-Baumbach",
"Ranspach-le-Bas",
"Ranspach-le-Haut",
"Ranville-Breuillaud",
"Raon-aux-Bois",
"Raon-lès-Leau",
"Raon-l'Etape",
"Raon-l'Étape",
"Raon-sur-Plaine",
"Rapide-Danseurois",
"Rapperswil-Jona",
"Raschau-Markersbach",
"ras-de-cou",
"rase-motte",
"rase-mottes",
"rase-pet",
"rase-pets",
"ras-la-moule",
"ras-le-bol",
"ras-le-bonbon",
"ras-le-cresson",
"ras-les-fesses",
"rat-baillet",
"rat-bayard",
"rat-de-cave",
"rat-garou",
"ratisse-caisse",
"rats-de-cave",
"rats-garous",
"rat-taupe",
"rat-trompette",
"Raucourt-au-Bois",
"Raucourt-et-Flaba",
"Rauville-la-Bigot",
"Rauville-la-Place",
"Ravel-et-Ferriers",
"Raville-sur-Sânon",
"Raye-sur-Authie",
"ray-grass",
"Rayol-Canadel-sur-Mer",
"Ray-sur-Saône",
"Razac-de-Saussignac",
"Razac-d'Eymet",
"Razac-sur-l'Isle",
"raz-de-marée",
"ready-made",
"reality-show",
"reality-shows",
"réal-politique",
"réal-politiques",
"réarc-bouta",
"réarc-boutai",
"réarc-boutaient",
"réarc-boutais",
"réarc-boutait",
"réarc-boutâmes",
"réarc-boutant",
"réarc-boutas",
"réarc-boutasse",
"réarc-boutassent",
"réarc-boutasses",
"réarc-boutassiez",
"réarc-boutassions",
"réarc-boutât",
"réarc-boutâtes",
"réarc-boute",
"réarc-bouté",
"réarc-boutée",
"réarc-boutées",
"réarc-boutent",
"réarc-bouter",
"réarc-boutera",
"réarc-bouterai",
"réarc-bouteraient",
"réarc-bouterais",
"réarc-bouterait",
"réarc-bouteras",
"réarc-boutèrent",
"réarc-bouterez",
"réarc-bouteriez",
"réarc-bouterions",
"réarc-bouterons",
"réarc-bouteront",
"réarc-boutes",
"réarc-boutés",
"réarc-boutez",
"réarc-boutiez",
"réarc-boutions",
"réarc-boutons",
"Réaup-Lisse",
"Rebecq-Rognon",
"Rebreuve-Ranchicourt",
"Rebreuve-sur-Canche",
"rebrousse-poil",
"réception-cadeaux",
"Recey-sur-Ource",
"Rechenberg-Bienenmühle",
"Réchicourt-la-Petite",
"Réchicourt-le-Château",
"récipient-mesure",
"récipient-mesures",
"Reckange-sur-Mess",
"Reckingen-Gluringen",
"Recologne-lès-Rioz",
"Recoubeau-Jansac",
"Recoules-d'Aubrac",
"Recoules-de-Fumas",
"Recoules-Prévinquières",
"recourbe-cils",
"Récourt-le-Creux",
"Recques-sur-Course",
"Recques-sur-Hem",
"recto-vaginal",
"recto-verso",
"redouble-cliqua",
"redouble-cliquai",
"redouble-cliquaient",
"redouble-cliquais",
"redouble-cliquait",
"redouble-cliquâmes",
"redouble-cliquant",
"redouble-cliquas",
"redouble-cliquasse",
"redouble-cliquassent",
"redouble-cliquasses",
"redouble-cliquassiez",
"redouble-cliquassions",
"redouble-cliquât",
"redouble-cliquâtes",
"redouble-clique",
"redouble-cliqué",
"redouble-cliquent",
"redouble-cliquer",
"redouble-cliquera",
"redouble-cliquerai",
"redouble-cliqueraient",
"redouble-cliquerais",
"redouble-cliquerait",
"redouble-cliqueras",
"redouble-cliquèrent",
"redouble-cliquerez",
"redouble-cliqueriez",
"redouble-cliquerions",
"redouble-cliquerons",
"redouble-cliqueront",
"redouble-cliques",
"redouble-cliquez",
"redouble-cliquiez",
"redouble-cliquions",
"redouble-cliquons",
"redresse-seins",
"re'em",
"re'ems",
"réentr'apercevaient",
"réentr'apercevais",
"réentr'apercevait",
"réentr'apercevant",
"réentr'apercevez",
"réentr'aperceviez",
"réentr'apercevions",
"réentr'apercevoir",
"réentr'apercevons",
"réentr'apercevra",
"réentr'apercevrai",
"réentr'apercevraient",
"réentr'apercevrais",
"réentr'apercevrait",
"réentr'apercevras",
"réentr'apercevrez",
"réentr'apercevriez",
"réentr'apercevrions",
"réentr'apercevrons",
"réentr'apercevront",
"réentr'aperçois",
"réentr'aperçoit",
"réentr'aperçoive",
"réentr'aperçoivent",
"réentr'aperçoives",
"réentr'aperçu",
"réentr'aperçue",
"réentr'aperçues",
"réentr'aperçûmes",
"réentr'aperçurent",
"réentr'aperçus",
"réentr'aperçusse",
"réentr'aperçussent",
"réentr'aperçusses",
"réentr'aperçussiez",
"réentr'aperçussions",
"réentr'aperçut",
"réentr'aperçût",
"réentr'aperçûtes",
"réentr'ouvert",
"réentr'ouverte",
"réentr'ouvertes",
"réentr'ouverts",
"réentr'ouvraient",
"réentr'ouvrais",
"réentr'ouvrait",
"réentr'ouvrant",
"réentr'ouvre",
"réentr'ouvrent",
"réentr'ouvres",
"réentr'ouvrez",
"réentr'ouvriez",
"réentr'ouvrîmes",
"réentr'ouvrions",
"réentr'ouvrir",
"réentr'ouvrira",
"réentr'ouvrirai",
"réentr'ouvriraient",
"réentr'ouvrirais",
"réentr'ouvrirait",
"réentr'ouvriras",
"réentr'ouvrirent",
"réentr'ouvrirez",
"réentr'ouvririez",
"réentr'ouvririons",
"réentr'ouvrirons",
"réentr'ouvriront",
"réentr'ouvris",
"réentr'ouvrisse",
"réentr'ouvrissent",
"réentr'ouvrisses",
"réentr'ouvrissiez",
"réentr'ouvrissions",
"réentr'ouvrit",
"réentr'ouvrît",
"réentr'ouvrîtes",
"réentr'ouvrons",
"Réez-Fosse-Martin",
"refox-trotta",
"refox-trottai",
"refox-trottaient",
"refox-trottais",
"refox-trottait",
"refox-trottâmes",
"refox-trottant",
"refox-trottas",
"refox-trottasse",
"refox-trottassent",
"refox-trottasses",
"refox-trottassiez",
"refox-trottassions",
"refox-trottât",
"refox-trottâtes",
"refox-trotte",
"refox-trotté",
"refox-trottent",
"refox-trotter",
"refox-trottera",
"refox-trotterai",
"refox-trotteraient",
"refox-trotterais",
"refox-trotterait",
"refox-trotteras",
"refox-trottèrent",
"refox-trotterez",
"refox-trotteriez",
"refox-trotterions",
"refox-trotterons",
"refox-trotteront",
"refox-trottes",
"refox-trottez",
"refox-trottiez",
"refox-trottions",
"refox-trottons",
"regardez-moi",
"régis-borgien",
"Régis-Borgien",
"régis-borgienne",
"Régis-Borgienne",
"régis-borgiennes",
"Régis-Borgiennes",
"régis-borgiens",
"Régis-Borgiens",
"Regis-Breitingen",
"Regnéville-sur-Mer",
"Regnéville-sur-Meuse",
"Régnié-Durette",
"Regnière-Ecluse",
"Regnière-Écluse",
"Rehburg-Loccum",
"Rehlingen-Siersburg",
"Rehm-Flehde-Bargen",
"Reichenbach-Steegen",
"Reichenow-Möglin",
"Reignac-sur-Indre",
"Reigneville-Bocage",
"Reignier-Esery",
"Reignier-Ésery",
"Reims-la-Brûlée",
"reine-claude",
"reine-des-bois",
"reine-des-prés",
"reine-marguerite",
"reines-claudes",
"reines-des-bois",
"reines-des-prés",
"reines-marguerites",
"Reinhardtsdorf-Schöna",
"Rejet-de-Beaulieu",
"relève-gravure",
"relève-gravures",
"relève-moustache",
"relève-moustaches",
"relève-quartier",
"relève-quartiers",
"relève-selle",
"relève-selles",
"Rémalard-en-Perche",
"Rembercourt-Sommaisne",
"Rembercourt-sur-Mad",
"Remda-Teichel",
"Rémering-lès-Hargarten",
"Rémering-lès-Puttelange",
"remettez-vous",
"remicro-onda",
"remicro-ondai",
"remicro-ondaient",
"remicro-ondais",
"remicro-ondait",
"remicro-ondâmes",
"remicro-ondant",
"remicro-ondas",
"remicro-ondasse",
"remicro-ondassent",
"remicro-ondasses",
"remicro-ondassiez",
"remicro-ondassions",
"remicro-ondât",
"remicro-ondâtes",
"remicro-onde",
"remicro-ondé",
"remicro-ondée",
"remicro-ondées",
"remicro-ondent",
"remicro-onder",
"remicro-ondera",
"remicro-onderai",
"remicro-onderaient",
"remicro-onderais",
"remicro-onderait",
"remicro-onderas",
"remicro-ondèrent",
"remicro-onderez",
"remicro-onderiez",
"remicro-onderions",
"remicro-onderons",
"remicro-onderont",
"remicro-ondes",
"remicro-ondés",
"remicro-ondez",
"remicro-ondiez",
"remicro-ondions",
"remicro-ondons",
"Remilly-Aillicourt",
"Remilly-en-Montagne",
"Remilly-les-Pothées",
"Remilly-sur-Lozon",
"Remilly-sur-Tille",
"Remilly-Wirquin",
"remilly-wirquinois",
"Remilly-Wirquinois",
"remilly-wirquinoise",
"Remilly-Wirquinoise",
"remilly-wirquinoises",
"Remilly-Wirquinoises",
"Remire-Montjoly",
"Rémondans-Vaivre",
"remonte-pente",
"remonte-pentes",
"Remoray-Boujeons",
"Rems-Murr",
"remue-ménage",
"remue-ménages",
"remue-méninge",
"remue-méninges",
"remue-queue",
"remue-queues",
"rémy-montais",
"Rémy-Montais",
"rémy-montaise",
"Rémy-Montaise",
"rémy-montaises",
"Rémy-Montaises",
"renarde-garou",
"renard-garou",
"rendez-vous",
"r'endormaient",
"r'endormais",
"r'endormait",
"r'endormant",
"r'endorme",
"r'endorment",
"r'endormes",
"r'endormez",
"r'endormi",
"r'endormie",
"r'endormies",
"r'endormiez",
"r'endormîmes",
"r'endormions",
"r'endormir",
"r'endormira",
"r'endormirai",
"r'endormiraient",
"r'endormirais",
"r'endormirait",
"r'endormiras",
"r'endormirent",
"r'endormirez",
"r'endormiriez",
"r'endormirions",
"r'endormirons",
"r'endormiront",
"r'endormis",
"r'endormisse",
"r'endormissent",
"r'endormisses",
"r'endormissiez",
"r'endormissions",
"r'endormit",
"r'endormît",
"r'endormîtes",
"r'endormons",
"r'endors",
"r'endort",
"Rendsburg-Eckernförde",
"Rennes-en-Grenouilles",
"Rennes-le-Château",
"Rennes-les-Bains",
"rennes-robots",
"Rennes-sur-Loue",
"renouée-bambou",
"rentre-dedans",
"rentr'ouvert",
"rentr'ouverte",
"rentr'ouvertes",
"rentr'ouverts",
"rentr'ouvraient",
"rentr'ouvrais",
"rentr'ouvrait",
"rentr'ouvrant",
"rentr'ouvre",
"rentr'ouvrent",
"rentr'ouvres",
"rentr'ouvrez",
"rentr'ouvriez",
"rentr'ouvrîmes",
"rentr'ouvrions",
"rentr'ouvrir",
"rentr'ouvrira",
"rentr'ouvrirai",
"rentr'ouvriraient",
"rentr'ouvrirais",
"rentr'ouvrirait",
"rentr'ouvriras",
"rentr'ouvrirent",
"rentr'ouvrirez",
"rentr'ouvririez",
"rentr'ouvririons",
"rentr'ouvrirons",
"rentr'ouvriront",
"rentr'ouvris",
"rentr'ouvrisse",
"rentr'ouvrissent",
"rentr'ouvrisses",
"rentr'ouvrissiez",
"rentr'ouvrissions",
"rentr'ouvrit",
"rentr'ouvrît",
"rentr'ouvrîtes",
"rentr'ouvrons",
"renvoi-instruire",
"repetit-déjeuna",
"repetit-déjeunai",
"repetit-déjeunaient",
"repetit-déjeunais",
"repetit-déjeunait",
"repetit-déjeunâmes",
"repetit-déjeunant",
"repetit-déjeunas",
"repetit-déjeunasse",
"repetit-déjeunassent",
"repetit-déjeunasses",
"repetit-déjeunassiez",
"repetit-déjeunassions",
"repetit-déjeunât",
"repetit-déjeunâtes",
"repetit-déjeune",
"repetit-déjeuné",
"repetit-déjeunent",
"repetit-déjeuner",
"repetit-déjeunera",
"repetit-déjeunerai",
"repetit-déjeuneraient",
"repetit-déjeunerais",
"repetit-déjeunerait",
"repetit-déjeuneras",
"repetit-déjeunèrent",
"repetit-déjeunerez",
"repetit-déjeuneriez",
"repetit-déjeunerions",
"repetit-déjeunerons",
"repetit-déjeuneront",
"repetit-déjeunes",
"repetit-déjeunez",
"repetit-déjeuniez",
"repetit-déjeunions",
"repetit-déjeunons",
"repique-niqua",
"repique-niquai",
"repique-niquaient",
"repique-niquais",
"repique-niquait",
"repique-niquâmes",
"repique-niquant",
"repique-niquas",
"repique-niquasse",
"repique-niquassent",
"repique-niquasses",
"repique-niquassiez",
"repique-niquassions",
"repique-niquât",
"repique-niquâtes",
"repique-nique",
"repique-niqué",
"repique-niquent",
"repique-niquer",
"repique-niquera",
"repique-niquerai",
"repique-niqueraient",
"repique-niquerais",
"repique-niquerait",
"repique-niqueras",
"repique-niquèrent",
"repique-niquerez",
"repique-niqueriez",
"repique-niquerions",
"repique-niquerons",
"repique-niqueront",
"repique-niques",
"repique-niquez",
"repique-niquiez",
"repique-niquions",
"repique-niquons",
"répondeur-enregistreur",
"répondeur-enregistreurs",
"repose-pied",
"repose-pieds",
"repose-poignet",
"repose-poignets",
"repose-tête",
"repose-têtes",
"requin-baleine",
"requin-chabot",
"requin-chat",
"requin-chats",
"requin-citron",
"requin-corail",
"requin-crocodile",
"requin-garou",
"requin-griset",
"requin-hâ",
"requin-maquereau",
"requin-marteau",
"requin-nourrice",
"requin-renard",
"requins-baleines",
"requins-citrons",
"requins-crocodiles",
"requins-garous",
"requins-hâ",
"requins-marteaux",
"requins-taupes",
"requins-tigres",
"requin-taupe",
"requin-taureau",
"requin-tigre",
"requin-vache",
"requin-zèbre",
"r'es",
"résino-gommeux",
"Ressons-l'Abbaye",
"Ressons-le-Long",
"Ressons-sur-Matz",
"r'est",
"restaurant-bar",
"restaurant-bistro",
"restaurant-brasserie",
"restaurant-pub",
"restaurants-bistros",
"reste-avec",
"resto-bar",
"resto-bistro",
"resto-brasserie",
"rest-o-pack",
"resto-pub",
"r'étaient",
"r'étais",
"r'était",
"r'étant",
"r'été",
"r'êtes",
"r'étiez",
"r'étions",
"retraite-chapeau",
"retraites-chapeaux",
"r'être",
"retroussons-nos-manches",
"Reuil-en-Brie",
"Reuilly-Sauvigny",
"Reuil-sur-Brêche",
"Reulle-Vergy",
"réunion-bilan",
"réunions-bilan",
"rêve-creux",
"réveille-matin",
"réveille-matins",
"réveil-matin",
"Revel-Tourdan",
"revenant-bon",
"revenants-bons",
"revenez-y",
"Reventin-Vaugris",
"Revest-des-Brousses",
"Revest-du-Bion",
"Revest-les-Roches",
"Revest-Saint-Martin",
"Revigny-sur-Ornain",
"Réville-aux-Bois",
"rex-castor",
"rex-castors",
"rez-de-chaussée",
"rez-de-cour",
"rez-de-jardin",
"rez-mur",
"Rheda-Wiedenbrück",
"Rheingau-Taunus",
"Rhêmes-Notre-Dame",
"Rhêmes-Saint-Georges",
"Rhénanie-du-Nord-Westphalie",
"Rhénanie-Palatinat",
"rhéo-épaississant",
"rhéo-épaississante",
"rhéo-épaississantes",
"rhéo-épaississants",
"rhéo-fluidifiant",
"rhéo-fluidifiante",
"rhéo-fluidifiantes",
"rhéo-fluidifiants",
"rhéto-roman",
"rhéto-romane",
"rhéto-romanes",
"rhéto-romans",
"Rhin-Berg",
"Rhin-Erft",
"Rhin-Hunsrück",
"Rhin-Lahn",
"Rhin-Neckar",
"Rhin-Palatinat",
"Rhin-Sieg",
"Rhode-Sainte-Agathe",
"Rhode-Saint-Genèse",
"Rhode-Saint-Pierre",
"rhodesian-ridgeback",
"Rhône-Alpes",
"Rhön-Grabfeld",
"Ria-Sirach",
"ria-sirachois",
"Ria-Sirachois",
"ria-sirachoise",
"Ria-Sirachoise",
"ria-sirachoises",
"Ria-Sirachoises",
"Ribaute-les-Tavernes",
"Ribécourt-Dreslincourt",
"Ribécourt-la-Tour",
"Ribemont-sur-Ancre",
"Ribnitz-Damgarten",
"ric-à-rac",
"Ricarville-du-Val",
"Richebourg-Saint-Vaast",
"Richelieu-Yamaskois",
"rick-rolla",
"rick-rollai",
"rick-rollaient",
"rick-rollais",
"rick-rollait",
"rick-rollâmes",
"rick-rollant",
"rick-rollas",
"rick-rollasse",
"rick-rollassent",
"rick-rollasses",
"rick-rollassiez",
"rick-rollassions",
"rick-rollât",
"rick-rollâtes",
"rick-rolle",
"rick-rollé",
"rick-rollée",
"rick-rollées",
"rick-rollent",
"rick-roller",
"rick-rollera",
"rick-rollerai",
"rick-rolleraient",
"rick-rollerais",
"rick-rollerait",
"rick-rolleras",
"rick-rollèrent",
"rick-rollerez",
"rick-rolleriez",
"rick-rollerions",
"rick-rollerons",
"rick-rolleront",
"rick-rolles",
"rick-rollés",
"rick-rollez",
"rick-rolliez",
"rick-rollions",
"rick-rollons",
"ric-rac",
"Riec-sur-Bélon",
"Ried-Brig",
"Rielasingen-Worblingen",
"Riel-les-Eaux",
"Riencourt-lès-Bapaume",
"Riencourt-lès-Cagnicourt",
"Rieschweiler-Mühlbach",
"Rietheim-Weilheim",
"Rietz-Neuendorf",
"Rietzneuendorf-Staakow",
"Rieutort-de-Randon",
"Rieux-de-Pelleport",
"Rieux-en-Cambrésis",
"Rieux-en-Val",
"rieux-en-valois",
"Rieux-en-Valois",
"rieux-en-valoise",
"Rieux-en-Valoise",
"rieux-en-valoises",
"Rieux-en-Valoises",
"Rieux-Minervois",
"Rieux-Volvestre",
"rigaud-montain",
"Rigaud-Montain",
"rigaud-montaine",
"Rigaud-Montaine",
"rigaud-montaines",
"Rigaud-Montaines",
"rigaud-montains",
"Rigaud-Montains",
"Rigil-K",
"Rignieux-le-Franc",
"Rigny-la-Nonneuse",
"Rigny-la-Salle",
"Rigny-le-Ferron",
"Rigny-Saint-Martin",
"Rigny-sur-Arroux",
"Rigny-Ussé",
"rigny-usséen",
"Rigny-Usséen",
"rigny-usséenne",
"Rigny-Usséenne",
"rigny-usséennes",
"Rigny-Usséennes",
"rigny-usséens",
"Rigny-Usséens",
"Rig-Véda",
"Rijssen-Holten",
"Rilhac-Lastours",
"Rilhac-Rancon",
"Rilhac-Treignac",
"Rilhac-Xaintrie",
"Rilland-Bath",
"Rillieux-la-Pape",
"Rilly-aux-Oies",
"Rilly-la-Montagne",
"Rilly-Sainte-Syre",
"Rilly-sur-Aisne",
"Rilly-sur-Loire",
"Rilly-sur-Vienne",
"Rimbach-près-Guebwiller",
"Rimbach-près-Masevaux",
"Rimbez-et-Baudiets",
"Rimon-et-Savel",
"rince-bouche",
"rince-bouches",
"rince-bouteille",
"rince-bouteilles",
"rince-doigt",
"rince-doigts",
"Riom-ès-Montagnes",
"Riom-Parsonz",
"Rion-des-Landes",
"Rioux-Martin",
"Risch-Rotkreuz",
"Ris-Orangis",
"risque-tout",
"Risum-Lindholm",
"Rivas-Vaciamadrid",
"Rive-de-Gier",
"Rivedoux-Plage",
"Rive-Nord",
"Rives-en-Seine",
"Rive-Sud",
"Rive-Sudois",
"Riviera-Pays-d'Enhaut",
"Rivière-Devant",
"Rivière-du-Loup",
"Rivière-les-Fosses",
"Rivière-Pilote",
"Rivière-Saas-et-Gourby",
"Rivière-Salée",
"Rivières-le-Bois",
"Rivière-sur-Tarn",
"Rizaucourt-Buchey",
"riz-pain-sel",
"R'n'B",
"road-book",
"road-books",
"Roannes-Saint-Mary",
"roast-beef",
"roast-beefs",
"robe-chandail",
"robe-housse",
"Robert-Espagne",
"robert-le-diable",
"Robert-Magny",
"robert-messin",
"Robert-Messin",
"robert-messine",
"Robert-Messine",
"robert-messines",
"Robert-Messines",
"robert-messins",
"Robert-Messins",
"robes-chandails",
"robes-housses",
"Robiac-Rochessadoule",
"Robleda-Cervantes",
"robot-chien",
"robots-chiens",
"roche-blanchais",
"Roche-Blanchais",
"roche-blanchaise",
"Roche-Blanchaise",
"roche-blanchaises",
"Roche-Blanchaises",
"Roche-Charles-la-Mayrand",
"Roche-d'Agoux",
"Roche-en-Régnier",
"Roche-et-Méry",
"Roche-et-Raucourt",
"Rochefort-du-Gard",
"Rochefort-en-Terre",
"Rochefort-en-Valdaine",
"Rochefort-en-Yvelines",
"Rochefort-Montagne",
"Rochefort-Samson",
"Rochefort-sur-Brévon",
"Rochefort-sur-la-Côte",
"Rochefort-sur-Loire",
"Rochefort-sur-Nenon",
"Roche-la-Molière",
"Roche-le-Peyroux",
"Roche-lès-Clerval",
"Roche-lez-Beaupré",
"roche-mère",
"roche-papier-ciseaux",
"Roche-Saint-Secret-Béconne",
"Roches-Bettaincourt",
"Roches-lès-Blamont",
"roches-mères",
"Roches-Prémarie-Andillé",
"Roches-sur-Marne",
"Roches-sur-Rognon",
"Roche-sur-Linotte-et-Sorans-les-Cordiers",
"Rochetaillée-sur-Saône",
"Rochy-Condé",
"rock-a-billy",
"rocking-chair",
"rocking-chairs",
"rock'n'roll",
"Roclenge-Looz",
"Roclenge-sur-Geer",
"Roc-Libre",
"Rocourt-Saint-Martin",
"Rocquigny-la-Hardoye",
"Rodengo-Saiano",
"Rödersheim-Gronau",
"Roesbrugge-Haringe",
"Roézé-sur-Sarthe",
"roge-bougeron",
"Roge-Bougeron",
"roge-bougeronne",
"Roge-Bougeronne",
"roge-bougeronnes",
"Roge-Bougeronnes",
"roge-bougerons",
"Roge-Bougerons",
"roger-bontemps",
"rogne-cul",
"rogne-pied",
"rogne-pieds",
"rogne-salaires",
"Rogny-les-Sept-Ecluses",
"Rogny-les-Sept-Écluses",
"Rohrbach-lès-Bitche",
"roi-de-rats",
"Roinville-sous-Auneau",
"rois-de-rats",
"Roi-Soleil",
"Roissy-en-Brie",
"Roissy-en-France",
"Rollegem-Kapelle",
"Rolleghem-Cappelle",
"roller-derby",
"roller-derbys",
"roll-out",
"roll-outs",
"Romagne-sous-les-Côtes",
"Romagne-sous-Montfaucon",
"Romagny-Fontenay",
"Romagny-sous-Rougemont",
"Romain-aux-Bois",
"Romainmôtier-Envy",
"Romain-sur-Meuse",
"Romanèche-Thorins",
"Romanel-sur-Lausanne",
"Romanel-sur-Morges",
"roman-feuilleton",
"roman-fleuve",
"roman-photo",
"roman-photos",
"romans-feuilletons",
"romans-fleuves",
"romans-photos",
"Romans-sur-Isère",
"Rombach-le-Franc",
"Rombies-et-Marchipont",
"Romeny-sur-Marne",
"Romilly-la-Puthenaye",
"Romilly-sur-Aigre",
"Romilly-sur-Andelle",
"Romilly-sur-Seine",
"Romorantin-Lanthenay",
"rompt-pierre",
"rompt-pierres",
"Roncherolles-en-Bray",
"Roncherolles-sur-le-Vivier",
"rond-de-cuir",
"ronde-bosse",
"ronde-bosses",
"rondes-bosses",
"rond-point",
"rond-ponna",
"rond-ponnai",
"rond-ponnaient",
"rond-ponnais",
"rond-ponnait",
"rond-ponnâmes",
"rond-ponnant",
"rond-ponnas",
"rond-ponnasse",
"rond-ponnassent",
"rond-ponnasses",
"rond-ponnassiez",
"rond-ponnassions",
"rond-ponnât",
"rond-ponnâtes",
"rond-ponne",
"rond-ponné",
"rond-ponnent",
"rond-ponner",
"rond-ponnera",
"rond-ponnerai",
"rond-ponneraient",
"rond-ponnerais",
"rond-ponnerait",
"rond-ponneras",
"rond-ponnèrent",
"rond-ponnerez",
"rond-ponneriez",
"rond-ponnerions",
"rond-ponnerons",
"rond-ponneront",
"rond-ponnes",
"rond-ponnez",
"rond-ponniez",
"rond-ponnions",
"rond-ponnons",
"ronds-de-cuir",
"ronds-points",
"ronge-bois",
"ronge-maille",
"rongo-rongo",
"ron-ron",
"Ronzo-Chienis",
"Roôcourt-la-Côte",
"Roodt-sur-Eisch",
"Roodt-sur-Syre",
"Roost-Warendin",
"roost-warendinois",
"Roost-Warendinois",
"roost-warendinoise",
"Roost-Warendinoise",
"roost-warendinoises",
"Roost-Warendinoises",
"Roquebrune-Cap-Martin",
"Roquebrune-sur-Argens",
"Roquecourbe-Minervois",
"Roquefort-de-Sault",
"Roquefort-des-Corbières",
"Roquefort-la-Bédoule",
"Roquefort-les-Cascades",
"Roquefort-les-Pins",
"Roquefort-sur-Garonne",
"Roquefort-sur-Soulzon",
"Roquelaure-Saint-Aubin",
"Roquestéron-Grasse",
"Rorbach-lès-Dieuze",
"Ro-Ro",
"Ro-Ros",
"Rosay-sur-Lieure",
"rose-croix",
"rose-de-mer",
"Rose-Marie",
"rose-marine",
"Rosenthal-Bielatal",
"roses-marines",
"Roset-Fluans",
"Rosières-aux-Salines",
"Rosières-devant-Bar",
"Rosières-en-Blois",
"Rosières-en-Haye",
"Rosières-en-Santerre",
"Rosières-près-Troyes",
"Rosières-sur-Barbèche",
"Rosières-sur-Mance",
"Rosiers-d'Egletons",
"Rosiers-d'Égletons",
"Rosiers-de-Juillac",
"Rosnay-l'Hôpital",
"Rosny-sous-Bois",
"Rosny-sur-Seine",
"Rosoux-Crenwick",
"Rosoy-en-Multien",
"Rosoy-le-Vieil",
"Rosoy-sur-Amance",
"rosti-montois",
"Rosti-Montois",
"rosti-montoise",
"Rosti-Montoise",
"rosti-montoises",
"Rosti-Montoises",
"Rotheux-Rimière",
"Rötsweiler-Nockenthal",
"Rottach-Egern",
"Rottal-Inn",
"Rouessé-Fontaine",
"Rouessé-Vassé",
"Rouffiac-d'Aude",
"Rouffiac-des-Corbières",
"Rouffiac-Tolosan",
"Rouffignac-de-Sigoulès",
"Rouffignac-Saint-Cernin-de-Reilhac",
"rouge-aile",
"rouge-bord",
"rouge-brun",
"rouge-flasher",
"rouge-gorge",
"rouge-herbe",
"rouge-herbes",
"Rougemont-le-Château",
"rouge-noir",
"Rouge-Perriers",
"rouge-pie",
"rouge-queue",
"rouges-ailes",
"rouges-gorges",
"rouges-queues",
"rouget-barbet",
"rouget-grondin",
"Rouilly-Sacey",
"Rouilly-Saint-Loup",
"roulage-décollage",
"roulé-boulé",
"roule-goupille",
"roule-goupilles",
"rouler-bouler",
"roulé-saucisse",
"roulés-boulés",
"roule-ta-bosse",
"Roullet-Saint-Estèphe",
"roullet-stéphanois",
"Roullet-Stéphanois",
"roullet-stéphanoise",
"Roullet-Stéphanoise",
"roullet-stéphanoises",
"Roullet-Stéphanoises",
"roul-sa-bosse",
"Rou-Marson",
"Roumazières-Loubert",
"Rouperroux-le-Coquet",
"Rousseau-esque",
"Rousseau-esques",
"rousses-têtes",
"rousse-tête",
"Rousset-les-Vignes",
"Roussillon-en-Morvan",
"Roussy-le-Village",
"r'ouvert",
"r'ouverte",
"r'ouvertes",
"r'ouverts",
"r'ouvraient",
"r'ouvrais",
"r'ouvrait",
"r'ouvrant",
"Rouvray-Catillon",
"Rouvray-Saint-Denis",
"Rouvray-Sainte-Croix",
"Rouvray-Saint-Florentin",
"r'ouvre",
"r'ouvrent",
"r'ouvres",
"Rouvres-en-Multien",
"Rouvres-en-Plaine",
"Rouvres-en-Woëvre",
"Rouvres-en-Xaintois",
"Rouvres-la-Chétive",
"Rouvres-les-Bois",
"Rouvres-les-Vignes",
"Rouvres-Saint-Jean",
"Rouvres-sous-Meilly",
"Rouvres-sur-Aube",
"r'ouvrez",
"r'ouvriez",
"r'ouvrîmes",
"r'ouvrions",
"r'ouvrir",
"r'ouvrira",
"r'ouvrirai",
"r'ouvriraient",
"r'ouvrirais",
"r'ouvrirait",
"r'ouvriras",
"r'ouvrirent",
"r'ouvrirez",
"r'ouvririez",
"r'ouvririons",
"r'ouvrirons",
"r'ouvriront",
"r'ouvris",
"r'ouvrisse",
"r'ouvrissent",
"r'ouvrisses",
"r'ouvrissiez",
"r'ouvrissions",
"r'ouvrit",
"r'ouvrît",
"r'ouvrîtes",
"Rouvrois-sur-Meuse",
"Rouvrois-sur-Othain",
"r'ouvrons",
"Rouvroy-en-Santerre",
"Rouvroy-les-Merles",
"Rouvroy-les-Pothées",
"Rouvroy-Ripont",
"Rouvroy-sur-Audry",
"Rouvroy-sur-Marne",
"Rouvroy-sur-Serre",
"Rouxmesnil-Bouteilles",
"roux-mirien",
"Roux-Mirien",
"Roux-Mirienne",
"Roux-Miroir",
"Rouy-le-Grand",
"Rouy-le-Petit",
"Rouziers-de-Touraine",
"Roville-aux-Chênes",
"Roville-devant-Bayon",
"Royaucourt-et-Chailvet",
"Royaume-Uni",
"Roy-Boissy",
"Royère-de-Vassivière",
"Roye-sur-Matz",
"Rozay-en-Brie",
"Rozet-Saint-Albin",
"Rozier-Côtes-d'Aurec",
"Rozier-en-Donzy",
"Rozières-en-Beauce",
"Rozières-sur-Crise",
"Rozières-sur-Mouzon",
"Roziers-Saint-Georges",
"Roz-Landrieux",
"Rozoy-Bellevalle",
"Rozoy-le-Vieil",
"Rozoy-sur-Serre",
"Roz-sur-Couesnon",
"RS-232",
"Ruan-sur-Egvonne",
"Rubécourt-et-Lamécourt",
"Rudeau-Ladosse",
"Rudolfstetten-Friedlisberg",
"Rüdtligen-Alchenflüh",
"Rueil-la-Gadelière",
"Rueil-Malmaison",
"Ruelle-sur-Touvre",
"Rueyres-les-Prés",
"Ruffey-le-Château",
"Ruffey-lès-Beaune",
"Ruffey-lès-Echirey",
"Ruffey-lès-Échirey",
"Ruffey-sur-Seille",
"rufino-sulfurique",
"rufino-sulfuriques",
"Ruillé-en-Champagne",
"Ruillé-Froid-Fonds",
"Ruillé-le-Gravelais",
"Ruillé-sur-Loir",
"ruine-babine",
"ruine-babines",
"Rullac-Saint-Cirq",
"Rumersheim-le-Haut",
"Rumilly-en-Cambrésis",
"Rumilly-lès-Vaudes",
"Ruppach-Goldhausen",
"Rupt-aux-Nonains",
"Rupt-devant-Saint-Mihiel",
"Rupt-en-Woëvre",
"Rupt-sur-Moselle",
"Rupt-sur-Othain",
"Rupt-sur-Saône",
"Rurange-lès-Thionville",
"russo-allemand",
"russo-allemande",
"russo-allemandes",
"russo-allemands",
"russo-américain",
"russo-japonaise",
"russo-polonaise",
"Russy-Bémont",
"Ruttersdorf-Lotschen",
"rü'üsá",
"Ruynes-en-Margeride",
"R.-V.",
"S-6-verbénol",
"Saâcy-sur-Marne",
"Saalburg-Ebersdorf",
"Saaldorf-Surheim",
"Saale-Holzland",
"Saale-Orla",
"Saalfeld-Rudolstadt",
"Saâne-Saint-Just",
"Saar-Mark",
"Saas-Almagell",
"Saas-Balen",
"Saas-Fee",
"Saas-Grund",
"Sabadel-Latronquière",
"Sabadel-Lauzès",
"sa'ban",
"Sablé-sur-Sarthe",
"Sablons-sur-Huisne",
"sabre-peuple",
"saccharo-glycose",
"Saceda-Trasierra",
"Sacierges-Saint-Martin",
"sac-jacking",
"Saconin-et-Breuil",
"sac-poubelle",
"sacré-coeur",
"sacré-cœur",
"Sacré-Cœur",
"Sacré-Cœurin",
"Sacré-Cœurois",
"sacro-iliaques",
"sacro-lombaire",
"sacro-saint",
"sacro-sainte",
"sacro-saintement",
"sacro-saintes",
"sacro-saints",
"sacro-vertébral",
"sacs-poubelle",
"sacs-poubelles",
"Sacy-le-Grand",
"Sacy-le-Petit",
"sado-maso",
"sado-masochisme",
"sado-masochiste",
"sado-masochistes",
"safari-parc",
"safari-parcs",
"sage-femme",
"sage-homme",
"sages-femmes",
"Sagnes-et-Goudoulet",
"Saguenay-Jeannois",
"Saguenay-Lac-Saint-Jean",
"sahélo-saharien",
"sahélo-saharienne",
"sahélo-sahariennes",
"sahélo-sahariens",
"saigne-nez",
"Saillat-sur-Vienne",
"Sail-les-Bains",
"Sailly-Achâtel",
"Sailly-au-Bois",
"Sailly-en-Ostrevent",
"Sailly-Flibeaucourt",
"Sailly-Labourse",
"Sailly-Laurette",
"Sailly-le-Sec",
"Sailly-lez-Cambrai",
"Sailly-lez-Lannoy",
"Sailly-Saillisel",
"Sailly-sur-la-Lys",
"Sail-sous-Couzan",
"Sain-Bel",
"sain-belois",
"Sain-Belois",
"sain-beloise",
"Sain-Beloise",
"sain-beloises",
"Sain-Beloises",
"sain-bois",
"Saincaize-Meauce",
"sain-foin",
"Sainghin-en-Mélantois",
"Sainghin-en-Weppes",
"Sains-du-Nord",
"Sains-en-Amiénois",
"Sains-en-Gohelle",
"Sains-lès-Fressin",
"Sains-lès-Marquion",
"Sains-lès-Pernes",
"Sains-Morainvillers",
"Sains-Richaumont",
"Saintry-sur-Seine",
"Saires-la-Verrerie",
"saisie-arrêt",
"saisie-attribution",
"saisie-brandon",
"saisie-exécution",
"saisie-gagerie",
"saisie-revendication",
"saisies-arrêts",
"saisies-attributions",
"saisies-brandons",
"saisies-exécutions",
"saisies-gageries",
"saisies-revendications",
"saisir-arrêter",
"saisir-brandonner",
"saisir-exécuter",
"saisir-gager",
"saisir-revendiquer",
"salafo-sioniste",
"salaire-coût",
"salaire-coûts",
"Salaise-sur-Sanne",
"salamandre-tigre",
"Salies-de-Béarn",
"Salies-du-Salat",
"Salignac-de-Mirambeau",
"Salignac-de-Pons",
"Salignac-Eyvignes",
"Salignac-Eyvigues",
"Salignac-sur-Charente",
"Saligny-le-Vif",
"Saligny-sur-Roudon",
"Salins-Fontaine",
"Salins-les-Bains",
"Salins-les-Thermes",
"Sallèles-Cabardès",
"Sallèles-d'Aude",
"salle-prunetais",
"Salle-Prunetais",
"salle-prunetaise",
"Salle-Prunetaise",
"salle-prunetaises",
"Salle-Prunetaises",
"Salles-Adour",
"Salles-Arbuissonnas-en-Beaujolais",
"Salles-Courbatiès",
"Salles-Curan",
"Salles-d'Angles",
"Salles-d'Armagnac",
"Salles-d'Aude",
"Salles-de-Barbezieux",
"Salles-de-Belvès",
"Salles-de-Villefagnan",
"Salles-en-Toulon",
"Salles-et-Pratviel",
"Salles-la-Source",
"Salles-Lavalette",
"Salles-lès-Aulnay",
"Salles-Mongiscard",
"salles-sourçois",
"Salles-Sourçois",
"salles-sourçoise",
"Salles-Sourçoise",
"salles-sourçoises",
"Salles-Sourçoises",
"Salles-sous-Bois",
"Salles-sur-Garonne",
"Salles-sur-l'Hers",
"Salles-sur-Mer",
"Salm-en-Vosges",
"Salm-Salm",
"Salon-de-Provence",
"Salon-la-Tour",
"Salornay-sur-Guye",
"salpingo-pharyngien",
"Salses-le-Château",
"Salt-en-Donzy",
"Salvagnac-Cajarc",
"Salvatierra-Agurain",
"salve-d'honneur",
"salves-d'honneur",
"Samois-sur-Seine",
"(S)-amphétamine",
"Sampigny-lès-Maranges",
"Samsons-Lion",
"sam'suffit",
"sam'suffits",
"Sana'a",
"Sanary-sur-Mer",
"san-benito",
"san-bérinois",
"San-Bérinois",
"san-bérinoise",
"San-Bérinoise",
"san-bérinoises",
"San-Bérinoises",
"Sancey-le-Grand",
"Sancey-le-Long",
"san-claudien",
"San-Claudien",
"San-Crucien",
"Sancti-Spíritus",
"sancto-bénédictin",
"Sancto-Bénédictin",
"sancto-bénédictine",
"Sancto-Bénédictine",
"sancto-bénédictines",
"Sancto-Bénédictines",
"sancto-bénédictins",
"Sancto-Bénédictins",
"sancto-julianais",
"Sancto-Julianais",
"sancto-julianaise",
"Sancto-Julianaise",
"sancto-julianaises",
"Sancto-Julianaises",
"sancto-prixin",
"Sancto-Prixin",
"sancto-prixine",
"Sancto-Prixine",
"sancto-prixines",
"Sancto-Prixines",
"sancto-prixins",
"Sancto-Prixins",
"Sancy-les-Cheminots",
"Sancy-lès-Provins",
"san-damianais",
"San-Damianais",
"san-damianaise",
"San-Damianaise",
"san-damianaises",
"San-Damianaises",
"San-Damiano",
"san-denien",
"San-Denien",
"san-denienne",
"San-Denienne",
"san-deniennes",
"San-Deniennes",
"san-deniens",
"San-Deniens",
"Sandersdorf-Brehna",
"san-desiderois",
"San-Desiderois",
"san-desideroise",
"San-Desideroise",
"san-desideroises",
"San-Desideroises",
"san-farcios",
"San-Farcios",
"san-farciose",
"San-Farciose",
"san-farcioses",
"San-Farcioses",
"san-ferrois",
"San-Ferrois",
"san-ferroise",
"San-Ferroise",
"san-ferroises",
"San-Ferroises",
"San-Gavino-d'Ampugnani",
"San-Gavino-di-Carbini",
"San-Gavino-di-Fiumorbo",
"San-Gavino-di-Tenda",
"sang-de-bourbe",
"sang-de-dragon",
"san-genestois",
"San-Genestois",
"san-genestoise",
"San-Genestoise",
"san-genestoises",
"San-Genestoises",
"san-germinois",
"San-Germinois",
"san-germinoise",
"San-Germinoise",
"san-germinoises",
"San-Germinoises",
"sang-froid",
"sang-gris",
"San-Giovanni-di-Moriani",
"San-Giuliano",
"sang-mêlé",
"Sang-mêlé",
"sang-mêlés",
"Sang-mêlés",
"Sanilhac-Sagriès",
"sankaku-jime",
"san-lagiron",
"San-Lagiron",
"san-lagirone",
"San-Lagirone",
"san-lagirones",
"San-Lagirones",
"san-lagirons",
"San-Lagirons",
"San-Lorenzo",
"San-Martino-di-Lota",
"san-martinois",
"San-Martinois",
"san-martinoise",
"San-Martinoise",
"san-martinoises",
"San-Martinoises",
"san-miardère",
"San-Miardère",
"san-miardères",
"San-Miardères",
"San-Nicolao",
"san-palous",
"San-Palous",
"san-palouse",
"San-Palouse",
"san-palouses",
"San-Palouses",
"san-pétri-montin",
"San-Pétri-Montin",
"san-pétri-montine",
"San-Pétri-Montine",
"san-pétri-montines",
"San-Pétri-Montines",
"san-pétri-montins",
"San-Pétri-Montins",
"san-pierran",
"San-Pierran",
"san-pierrane",
"San-Pierrane",
"san-pierranes",
"San-Pierranes",
"san-pierrans",
"San-Pierrans",
"San-Priode",
"san-priot",
"San-Priot",
"san-priote",
"san-priotes",
"San-Priotes",
"san-priots",
"San-Priots",
"san-rémois",
"San-Rémois",
"san-rémoise",
"San-Rémoise",
"san-rémoises",
"San-Rémoises",
"Sanry-lès-Vigy",
"Sanry-sur-Nied",
"Sansac-de-Marmiesse",
"Sansac-Veinazès",
"san-salvatorien",
"San-Salvatorien",
"san-salvatorienne",
"San-Salvatorienne",
"san-salvatoriennes",
"San-Salvatoriennes",
"san-salvatoriens",
"San-Salvatoriens",
"Sanssac-l'Eglise",
"Sanssac-l'Église",
"Sant'Agapito",
"Sant'Agnello",
"Sant'Agostino",
"Sant'Alfio",
"Santa-Lucia-di-Mercurio",
"Santa-Lucia-di-Moriani",
"Santa-Maria-di-Lota",
"Santa-Maria-Figaniella",
"Santa-Maria-Poggio",
"Santa-Maria-Siché",
"Sant'Anastasia",
"Sant'Andréa-di-Bozio",
"Sant'Andréa-di-Cotone",
"Sant'Andréa-di-Tallano",
"Sant'Andréa-d'Orcino",
"Sant'Antimo",
"Sant'Antioco",
"Sant'Antonino",
"Sant'Antonio",
"Sant'Apollinare",
"Sant'Arcangelo",
"Santa-Reparata-di-Balagna",
"Santa-Reparata-di-Moriani",
"Sant'Arpino",
"Sant'Arsenio",
"Sant'Elena",
"Santiago-Pontones",
"santi-johanien",
"Santi-Johanien",
"santi-johanienne",
"Santi-Johanienne",
"santi-johaniennes",
"Santi-Johaniennes",
"santi-johaniens",
"Santi-Johaniens",
"Sant'Ippolito",
"Sant'Olcese",
"santoline-cyprès",
"Sant'Omero",
"Sant'Onofrio",
"Santo-Pietro-di-Tenda",
"Santo-Pietro-di-Venaco",
"Sant'Oreste",
"Santpoort-Noord",
"Santpoort-Zuid",
"Sant'Urbano",
"Sanvignes-les-Mines",
"san-vitournaire",
"San-Vitournaire",
"san-vitournaires",
"San-Vitournaires",
"Saône-et-Loire",
"Sap-en-Auge",
"sapeur-pompier",
"sapeurs-pompiers",
"sapeuse-pompière",
"sapeuses-pompières",
"Sapogne-et-Feuchères",
"Sapogne-Feuchères",
"Sapogne-sur-Marche",
"sarclo-buttage",
"sarclo-buttages",
"sarco-épiplocèle",
"sarco-épiplomphale",
"sarco-épiplomphales",
"sarco-hydrocèle",
"sarco-hydrocèles",
"Sardy-lès-Epiry",
"Sardy-lès-Épiry",
"Sargé-lès-le-Mans",
"Sargé-sur-Braye",
"Sariac-Magnoac",
"Sari-di-Porto-Vecchio",
"Sari-d'Orcino",
"Sari-Solenzara",
"Sarlat-la-Canéda",
"Sarliac-sur-l'Isle",
"Saron-sur-Aube",
"Sarre-Palatinat",
"Sarre-Union",
"sarre-unionnais",
"Sarre-Unionnais",
"sarre-unionnaise",
"Sarre-Unionnaise",
"sarre-unionnaises",
"Sarre-Unionnaises",
"Sarriac-Bigorre",
"Sarrola-Carcopino",
"Sarroux-Saint-Julien",
"Sars-et-Rosières",
"Sars-la-Bruyère",
"Sars-la-Buissière",
"Sars-le-Bois",
"Sars-Poteries",
"Sart-Bernard",
"Sart-Custinne",
"Sart-Dames-Avelines",
"sart-dames-avelinois",
"Sart-Dames-Avelinois",
"Sart-Dames-Avelinoise",
"Sart-en-Fagne",
"Sart-Eustache",
"sart-eustachois",
"Sart-Eustachois",
"Sart-Eustachoise",
"Sartilly-Baie-Bocage",
"Sart-Messire-Guillaume",
"Sart-Risbart",
"sart-risbartois",
"Sart-Risbartois",
"Sart-Risbartoise",
"Sart-Saint-Laurent",
"Sas-de-Gand",
"Sassen-Trantow",
"Sassetot-le-Malgardé",
"Sassetot-le-Mauconduit",
"Sassey-sur-Meuse",
"Sassierges-Saint-Germain",
"satellites-espions",
"Sathonay-Camp",
"Sathonay-Village",
"sati-drap",
"Satolas-et-Bonce",
"Sauchy-Cauchy",
"Sauchy-Lestrée",
"Saucourt-sur-Rognon",
"sauf-conduit",
"sauf-conduits",
"Saugnac-et-Cambran",
"saugnac-et-muretois",
"Saugnac-et-Muretois",
"saugnac-et-muretoise",
"Saugnac-et-Muretoise",
"saugnac-et-muretoises",
"Saugnac-et-Muretoises",
"Saugnacq-et-Muret",
"Sauguis-Saint-Etienne",
"Sauguis-Saint-Étienne",
"Saulces-aux-Bois",
"Saulces-Champenoises",
"Saulces-Monclin",
"Saulce-sur-Rhône",
"Saulces-Vieille",
"Saulchoy-sous-Poix",
"Saulchoy-sur-Davenescourt",
"Saulcy-sur-Meurthe",
"Saulgé-l'Hôpital",
"Sauliac-sur-Célé",
"Saulmory-et-Villefranche",
"Saulon-la-Chapelle",
"Saulon-la-Rue",
"Sault-Brénaz",
"Saultchevreuil-du-Tronchet",
"Sault-de-Navailles",
"Sault-lès-Rethel",
"sault-rethelois",
"Sault-Rethelois",
"sault-retheloise",
"Sault-Retheloise",
"sault-retheloises",
"Sault-Retheloises",
"Sault-Saint-Remy",
"Saulx-le-Duc",
"Saulx-lès-Champlon",
"Saulx-les-Chartreux",
"Saulx-Marchais",
"Saulxures-lès-Bulgnéville",
"Saulxures-lès-Nancy",
"Saulxures-lès-Vannes",
"Saulxures-sur-Moselotte",
"Saulzais-le-Potier",
"Saulzet-le-Froid",
"Saumane-de-Vaucluse",
"Saumont-la-Poterie",
"Sauqueuse-Saint-Lucien",
"Sauret-Besserve",
"Saussay-la-Campagne",
"Sausset-les-Pins",
"Sausseuzemare-en-Caux",
"saut-de-lit",
"saut-de-lits",
"saut-de-loup",
"saut-de-mouton",
"saute-au-paf",
"saute-bouchon",
"saute-bouchons",
"saute-en-barque",
"saute-en-bas",
"saute-mouton",
"saute-moutons",
"saute-ruisseau",
"saute-ruisseaux",
"sauts-de-lit",
"sauts-de-mouton",
"Sauvage-Magny",
"Sauvagnat-Sainte-Marthe",
"sauve-l'honneur",
"sauve-qui-peut",
"sauve-rabans",
"Sauveterre-de-Béarn",
"Sauveterre-de-Comminges",
"Sauveterre-de-Guyenne",
"Sauveterre-de-Rouergue",
"Sauveterre-la-Lémance",
"Sauveterre-Saint-Denis",
"sauve-vie",
"Sauviat-sur-Vige",
"Sauvigney-lès-Gray",
"Sauvigney-lès-Pesmes",
"Sauvigny-le-Beuréal",
"Sauvigny-le-Bois",
"Sauvigny-les-Bois",
"Sauvillers-Mongival",
"Saux-et-Pomarède",
"Sauzé-Vaussais",
"Savas-Mépin",
"savez-vous",
"Savignac-de-Duras",
"Savignac-de-l'Isle",
"Savignac-de-Miremont",
"Savignac-de-Nontron",
"Savignac-Lédrier",
"Savignac-les-Eglises",
"Savignac-les-Églises",
"Savignac-les-Ormeaux",
"Savignac-Mona",
"Savignac-sur-Leyze",
"Savigné-l'Evêque",
"Savigné-l'Évêque",
"Savigné-sous-le-Lude",
"Savigné-sur-Lathan",
"Savigny-en-Revermont",
"Savigny-en-Sancerre",
"Savigny-en-Septaine",
"Savigny-en-Terre-Plaine",
"Savigny-en-Véron",
"Savigny-lès-Beaune",
"Savigny-le-Sec",
"Savigny-le-Temple",
"Savigny-Lévescault",
"Savigny-le-Vieux",
"Savigny-Poil-Fol",
"Savigny-sous-Faye",
"Savigny-sous-Mâlain",
"Savigny-sur-Aisne",
"Savigny-sur-Ardres",
"Savigny-sur-Braye",
"Savigny-sur-Clairis",
"Savigny-sur-Grosne",
"Savigny-sur-Orge",
"Savigny-sur-Seille",
"Savines-le-Lac",
"savoir-faire",
"savoir-vivre",
"Savonnières-devant-Bar",
"Savonnières-en-Perthois",
"Savonnières-en-Woëvre",
"Savy-Berlette",
"Saxe-Anhalt",
"Saxe-du-Nord",
"Saxi-Bourdon",
"Saxon-Sion",
"scale-out",
"scale-up",
"scaphoïdo-astragalien",
"scaphoïdo-cuboïdien",
"sceau-cylindre",
"sceau-de-Notre-Dame",
"sceau-de-salomon",
"Sceau-Saint-Angel",
"sceaux-cylindres",
"Sceaux-d'Anjou",
"sceaux-de-Notre-Dame",
"Sceaux-du-Gâtinais",
"Sceaux-sur-Huisne",
"scènes-clés",
"Scey-Maisières",
"Scey-sur-Saône",
"Scey-sur-Saône-et-Saint-Albin",
"Schacht-Audorf",
"Schaffhouse-près-Seltz",
"Schaffhouse-sur-Zorn",
"S-chanf",
"Scharrachbergheim-Irmstett",
"Scheibe-Alsbach",
"Schieder-Schwalenberg",
"Schinznach-Bad",
"Schiphol-Oost",
"Schiphol-Rijk",
"schiste-carton",
"schistes-carton",
"Schlatt-Haslen",
"Schleswig-Flensburg",
"Schleswig-Holstein",
"Schmalkalden-Meiningen",
"Schmogrow-Fehrow",
"Schmölln-Putzkau",
"Schnarup-Thumby",
"Schönau-Berzdorf",
"Schönenberg-Kübelberg",
"Schönwalde-Glien",
"Schouwen-Duiveland",
"Schwalm-Eder",
"Schweigen-Rechtenbach",
"Schweighouse-sur-Moder",
"Schweighouse-Thann",
"scie-cloche",
"science-fictif",
"science-fiction",
"science-fictions",
"sciences-fiction",
"sciences-fictions",
"scies-cloches",
"Scieurac-et-Flourès",
"scirpo-phragmitaie",
"scirpo-phragmitaies",
"Scorbé-Clairvaux",
"scottish-terrier",
"scuto-sternal",
"Scy-Chazelles",
"S.-E.",
"Sealyham-terrier",
"Sébazac-Concourès",
"sèche-cheveu",
"sèche-cheveux",
"sèche-linge",
"séchoir-atomiseur",
"séchoir-atomiseurs",
"seconde-lumière",
"secondes-lumière",
"Secondigné-sur-Belle",
"Secqueville-en-Bessin",
"Sedze-Maubecq",
"See-Gaster",
"Seeheim-Jugenheim",
"Seeon-Seebruck",
"Seeth-Ekholt",
"Séez-Mesnil",
"Ségrie-Fontaine",
"Ségur-le-Château",
"Ségur-les-Villas",
"Seiches-sur-le-Loir",
"Seillons-Source-d'Argens",
"seine-et-marnais",
"Seine-et-Marnais",
"seine-et-marnaise",
"Seine-et-Marnaise",
"seine-et-marnaises",
"Seine-et-Marnaises",
"Seine-et-Marne",
"Seine-et-Oise",
"Seine-Inférieure",
"Seine-Maritime",
"Seine-Port",
"seine-portais",
"Seine-Portais",
"seine-portaise",
"Seine-Portaise",
"seine-portaises",
"Seine-Portaises",
"Seine-Saint-Denis",
"Seitingen-Oberflacht",
"self-control",
"self-défense",
"self-government",
"self-governments",
"self-made-man",
"self-made-mans",
"self-made-men",
"self-made-woman",
"self-made-womans",
"self-made-women",
"self-service",
"self-services",
"Selke-Aue",
"selk'nam",
"Selk'nam",
"Selles-Saint-Denis",
"selles-sur-cher",
"Selles-sur-Cher",
"Selles-sur-Nahon",
"Selon-Jean",
"Selon-Luc",
"Selon-Marc",
"Selon-Matthieu",
"semaine-lumière",
"semaines-lumière",
"Séméacq-Blachon",
"semen-contra",
"Sémézies-Cachan",
"Semoutiers-Montsaon",
"semper-virens",
"Semur-en-Auxois",
"Semur-en-Brionnais",
"Semur-en-Vallon",
"Sénaillac-Latronquière",
"Sénaillac-Lauzès",
"Senargent-Mignafans",
"sénateur-maire",
"sénatus-consulte",
"sénatus-consultes",
"Sencenac-Puy-de-Fourches",
"Senesse-de-Senabugue",
"Senillé-Saint-Sauveur",
"Senlis-le-Sec",
"Sennecé-lès-Mâcon",
"Sennecey-le-Grand",
"Sennecey-lès-Dijon",
"Senneville-sur-Fécamp",
"Sennevoy-le-Bas",
"Sennevoy-le-Haut",
"Senoncourt-les-Maujouy",
"Sens-Beaujeu",
"Sens-de-Bretagne",
"sensori-moteur",
"sensori-moteurs",
"sensori-motrice",
"sensori-motrices",
"sensori-motricité",
"Sens-sur-Seille",
"sent-bon",
"Sentenac-de-Sérou",
"Sentenac-d'Oust",
"Senven-Léhart",
"Seo-yeon",
"Seppois-le-Bas",
"Seppois-le-Haut",
"septante-cinq",
"septante-deux",
"septante-et-un",
"septante-huit",
"septante-neuf",
"septante-quatre",
"septante-sept",
"septante-six",
"septante-trois",
"Septèmes-les-Vallons",
"sept-en-gueule",
"sept-en-huit",
"septentrio-occidental",
"septentrio-occidentale",
"septentrio-occidentales",
"septentrio-occidentaux",
"sept-et-le-va",
"Sept-Forges",
"Sept-Frères",
"sept-frèrien",
"Sept-Frèrien",
"sept-frèrienne",
"Sept-Frèrienne",
"sept-frèriennes",
"Sept-Frèriennes",
"sept-frèriens",
"Sept-Frèriens",
"Sept-Îles",
"Sept-Ilien",
"Sept-Îlien",
"Sept-Îlois",
"Sept-Insulaire",
"Sept-Insulaires",
"Sept-Lacquois",
"sept-mâts",
"Sept-Meules",
"sept-meulois",
"Sept-Meulois",
"sept-meuloise",
"Sept-Meuloise",
"sept-meuloises",
"Sept-Meuloises",
"sept-oeil",
"sept-œil",
"sept-oeils",
"sept-œils",
"Sept-Saulx",
"sept-sortais",
"Sept-Sortais",
"sept-sortaise",
"Sept-Sortaise",
"sept-sortaises",
"Sept-Sortaises",
"Sept-Sorts",
"sept-ventais",
"Sept-Ventais",
"sept-ventaise",
"Sept-Ventaise",
"sept-ventaises",
"Sept-Ventaises",
"Sept-Vents",
"Sepulcro-Hilario",
"Séquano-Dionysien",
"Seraing-le-Château",
"Séranvillers-Forenville",
"Seraucourt-le-Grand",
"Serbie-et-Monténégro",
"serbo-croate",
"Sère-en-Lavedan",
"Sère-Lanso",
"Serémange-Erzange",
"Sère-Rustaing",
"Sérézin-de-la-Tour",
"Sérézin-du-Rhône",
"sergent-chef",
"sergent-major",
"sergents-chefs",
"sergents-majors",
"Sérignac-Péboudou",
"Sérignac-sur-Garonne",
"Sérignan-du-Comtat",
"Seringes-et-Nesles",
"Sermaize-les-Bains",
"Sermoise-sur-Loire",
"séro-sanguin",
"séro-sanguine",
"séro-sanguines",
"séro-sanguins",
"Serra-di-Ferro",
"Serra-di-Fiumorbo",
"Serra-di-Scopamène",
"serre-bauquière",
"serre-bosse",
"serre-bosses",
"serre-bras",
"serre-ciseau",
"serre-ciseaux",
"serre-cou",
"serre-cous",
"serre-feu",
"serre-feux",
"serre-fil",
"serre-file",
"serre-files",
"serre-fils",
"serre-fine",
"serre-frein",
"serre-joint",
"serre-joints",
"Serre-les-Moulières",
"Serre-les-Sapins",
"serre-livre",
"serre-livres",
"serre-malice",
"Serre-Nerpol",
"serre-nerpolain",
"Serre-Nerpolain",
"serre-nerpolaine",
"Serre-Nerpolaine",
"serre-nerpolaines",
"Serre-Nerpolaines",
"serre-nerpolains",
"Serre-Nerpolains",
"serre-nez",
"serre-noeud",
"serre-nœud",
"serre-nœuds",
"serre-papier",
"serre-papiers",
"serre-pédicule",
"serre-pédicules",
"serre-point",
"serre-points",
"serre-rails",
"Serres-Castet",
"Serres-et-Montguyard",
"serres-fines",
"Serres-Gaston",
"serres-gastonnais",
"Serres-Gastonnais",
"serres-gastonnaise",
"Serres-Gastonnaise",
"serres-gastonnaises",
"Serres-Gastonnaises",
"Serreslous-et-Arribans",
"Serres-Morlaàs",
"serres-morlanais",
"Serres-Morlanais",
"serres-morlanaise",
"Serres-Morlanaise",
"serres-morlanaises",
"Serres-Morlanaises",
"Serres-Sainte-Marie",
"Serres-sur-Arget",
"serre-taille",
"serre-tailles",
"serre-tête",
"serre-têtes",
"serre-tube",
"serre-tubes",
"Serrières-de-Briord",
"Serrières-en-Chautagne",
"Serrières-sur-Ain",
"Serrigny-en-Bresse",
"serri-sapinois",
"Serri-Sapinois",
"serri-sapinoise",
"Serri-Sapinoise",
"serri-sapinoises",
"Serri-Sapinoises",
"Servance-Miellin",
"Servaville-Salmonville",
"Serves-sur-Rhône",
"services-volées",
"service-volée",
"Servières-le-Château",
"Serviers-et-Labaume",
"Serviès-en-Val",
"serviette-éponge",
"serviettes-éponges",
"Servigny-lès-Raville",
"Servigny-lès-Sainte-Barbe",
"servo-direction",
"servo-directions",
"servo-frein",
"servo-freins",
"servo-moteur",
"Servon-Melzicourt",
"Servon-sur-Vilaine",
"Séry-lès-Mézières",
"Séry-Magneval",
"Serzy-et-Prin",
"Seuil-d'Argonne",
"seule-en-scène",
"seul-en-scène",
"Sévérac-d'Aveyron",
"Sévérac-le-Château",
"Sévérac-l'Eglise",
"Sévérac-l'Église",
"Sévignacq-Meyracq",
"Sévignacq-Thèze",
"Sévigny-la-Forêt",
"Sévigny-Waleppe",
"Sèvres-Anxaumont",
"sex-appeal",
"sex-digital",
"sex-digitisme",
"sex-digitismes",
"sexe-ratio",
"Sexey-aux-Forges",
"Sexey-les-Bois",
"sex-ratio",
"sex-ratios",
"sex-shop",
"sex-shops",
"sex-symbol",
"sex-symbols",
"sex-toy",
"sex-toys",
"Seysses-Savès",
"Seyssinet-Pariset",
"shabu-shabu",
"Shai-hulud",
"Shang-Haï",
"shar-peï",
"shar-peïs",
"shift-cliqua",
"shift-cliquai",
"shift-cliquaient",
"shift-cliquais",
"shift-cliquait",
"shift-cliquâmes",
"shift-cliquant",
"shift-cliquas",
"shift-cliquasse",
"shift-cliquassent",
"shift-cliquasses",
"shift-cliquassiez",
"shift-cliquassions",
"shift-cliquât",
"shift-cliquâtes",
"shift-clique",
"shift-cliqué",
"shift-cliquée",
"shift-cliquées",
"shift-cliquent",
"shift-cliquer",
"shift-cliquera",
"shift-cliquerai",
"shift-cliqueraient",
"shift-cliquerais",
"shift-cliquerait",
"shift-cliqueras",
"shift-cliquèrent",
"shift-cliquerez",
"shift-cliqueriez",
"shift-cliquerions",
"shift-cliquerons",
"shift-cliqueront",
"shift-cliques",
"shift-cliqués",
"shift-cliquez",
"shift-cliquiez",
"shift-cliquions",
"shift-cliquons",
"shikoku-inu",
"shipibo-conibo",
"shoot-'em-up",
"Shoreham-by-Sea",
"short-culotte",
"short-culottes",
"short-track",
"short-tracks",
"show-biz",
"show-business",
"Siaugues-Sainte-Marie",
"Siccieu-Saint-Julien-et-Carisieu",
"sicilio-sarde",
"side-car",
"side-cariste",
"side-caristes",
"side-cars",
"siècle-lumière",
"siècles-lumière",
"Siegen-Wittgenstein",
"Sierck-les-Bains",
"sierra-léonais",
"Sierra-Léonais",
"sierra-léonaise",
"Sierra-Léonaise",
"sierra-léonaises",
"Sierra-Léonaises",
"Sierra-Léonien",
"Sieversdorf-Hohenofen",
"sigma-additif",
"sigma-additivité",
"sigma-additivités",
"Signy-Avenex",
"Signy-l'Abbaye",
"Signy-le-Grand",
"Signy-le-Petit",
"Signy-Librecy",
"Signy-Montlibert",
"Signy-Signets",
"Sigy-en-Bray",
"Sigy-le-Châtel",
"silicico-aluminique",
"silicico-aluminiques",
"silicico-cuivreux",
"Sillans-la-Cascade",
"Sillé-le-Guillaume",
"Sillé-le-Philippe",
"Silley-Amancey",
"Silley-Bléfond",
"Silly-en-Gouffern",
"Silly-en-Saulnois",
"Silly-la-Poterie",
"Silly-le-Long",
"Silly-sur-Nied",
"Silly-Tillard",
"silure-spatule",
"Simandre-sur-Suran",
"Simiane-Collongue",
"Simiane-la-Rotonde",
"simili-cuir",
"simili-cuirs",
"Simon-la-Vineuse",
"Sincey-lès-Rouvray",
"singe-araignée",
"singe-chouette",
"singe-écureuil",
"singe-lion",
"singes-araignées",
"singes-chouettes",
"singes-écureuils",
"singes-lions",
"Sin-le-Noble",
"sino-américain",
"sino-américaine",
"sino-américaines",
"sino-américains",
"sino-australien",
"sino-australienne",
"sino-australiennes",
"sino-australiens",
"sino-canadien",
"sino-colombien",
"sino-colombienne",
"sino-colombiennes",
"sino-colombiens",
"sino-congolais",
"sino-continental",
"sino-coréen",
"sino-égyptien",
"sino-égyptienne",
"sino-égyptiennes",
"sino-égyptiens",
"sino-européen",
"sino-japonais",
"sino-japonaise",
"sino-japonaises",
"sino-québécois",
"sino-taïwanais",
"sino-tibétain",
"sino-vietnamien",
"sino-vietnamienne",
"sino-vietnamiennes",
"sino-vietnamiens",
"Sint-Amands",
"Sint-Andries",
"Sint-Annaland",
"Sint-Annaparochie",
"Sint-Annen",
"Sint-Antonius",
"Sint-Baafs-Vijve",
"Sint-Denijs",
"Sint-Eloois-Winkel",
"Sint-Gillis-Waas",
"Sint-Goriks-Oudenhove",
"Sint-Huibrechts-Hern",
"Sint-Huibrechts-Lille",
"Sint-Jacobiparochie",
"Sint-Jacobskapelle",
"Sint-Jan",
"Sint-Jan-in-Eremo",
"Sint-Job-in-'t-Goor",
"Sint-Joris",
"Sint-Joris-Weert",
"Sint-Joris-Winge",
"Sint-Jozef",
"Sint-Jozefparochie",
"Sint-Katelijne-Waver",
"Sint-Katherina-Lombeek",
"Sint-Kruis",
"Sint-Kwintens-Lennik",
"Sint-Laureins",
"Sint-Laureins-Berchem",
"Sint-Maartensdijk",
"Sint-Margriete",
"Sint-Maria-Latem",
"Sint-Maria-Lierde",
"Sint-Maria-Oudenhove",
"Sint-Martens-Latem",
"Sint-Martens-Leerne",
"Sint-Martens-Lennik",
"Sint-Martens-Lierde",
"Sint-Michiels",
"Sint-Odiliënberg",
"Sint-Oedenrode",
"Sint-Pauwels",
"Sint-Philipsland",
"Sint-Pieters-Kapelle",
"Sint-Pieters-Leeuw",
"Sint-Pieters-op-den-Dijk",
"Sint-Rijkers",
"Sion-les-Mines",
"Siorac-de-Ribérac",
"Siorac-en-Périgord",
"Siouville-Hague",
"sister-ship",
"sister-ships",
"sit-in",
"sit-ins",
"Sittard-Geleen",
"sit-up",
"sit-ups",
"Sivry-Ante",
"Sivry-Courtry",
"Sivry-la-Perche",
"Sivry-lès-Buzancy",
"Sivry-Rance",
"Sivry-sur-Meuse",
"six-cents",
"six-cent-soixante-six",
"six-cent-soixante-sixième",
"six-cent-soixante-sixièmes",
"six-clefs",
"six-coups",
"six-doigts",
"six-fournais",
"Six-Fournais",
"six-fournaise",
"Six-Fournaise",
"six-fournaises",
"Six-Fournaises",
"Six-Fours-la-Plage",
"Six-Fours-les-Plages",
"six-mâts",
"Six-Planes",
"Sixt-Fer-à-Cheval",
"Sixt-sur-Aff",
"six-vingts",
"Skelton-in-Cleveland",
"ski-alpinisme",
"ski-alpinismes",
"ski-alpiniste",
"ski-alpinistes",
"Skye-terrier",
"sleeping-car",
"Slijk-Ewijk",
"sloop-of-war",
"slop-tank",
"Sluis-Aardenburg",
"smaragdo-chalcite",
"smaragdo-chalcites",
"Smeerebbe-Vloerzegem",
"S-métolachlore",
"snack-bar",
"snack-bars",
"Snijders-Chaam",
"snow-boot",
"snow-boots",
"soap-opéra",
"soaps-opéras",
"sociale-démocrate",
"sociales-démocrates",
"sociales-traitres",
"sociales-traîtres",
"sociale-traitre",
"sociale-traître",
"sociaux-démocrates",
"sociaux-traitres",
"sociaux-traîtres",
"société-écran",
"sociétés-écrans",
"socio-cible",
"socio-cibles",
"socio-culturel",
"socio-culturelle",
"socio-culturelles",
"socio-culturels",
"socio-économique",
"socio-économiques",
"socio-éducatif",
"socio-éducatifs",
"socio-éducative",
"socio-éducatives",
"socio-esthéticien",
"socio-esthéticiens",
"socio-historiographe",
"socio-historiographes",
"socio-historique",
"socio-historiques",
"socio-politique",
"socio-politiques",
"socio-professionnel",
"socio-professionnelle",
"socio-professionnelles",
"socio-professionnels",
"soda-spodumenes",
"sodo-calcique",
"sodo-calciques",
"Sognolles-en-Montois",
"Sogny-aux-Moulins",
"Sogny-en-l'Angle",
"Soheit-Tinlot",
"soi-disamment",
"soi-disant",
"Soignolles-en-Brie",
"soi-même",
"Soing-Cubry-Charentenay",
"Soings-en-Sologne",
"Soirans-Fouffrans",
"Soissons-sur-Nacey",
"Soisy-Bouy",
"Soisy-sous-Montmorency",
"Soisy-sur-Ecole",
"Soisy-sur-École",
"Soisy-sur-Seine",
"soit-communiqué",
"soixante-cinq",
"soixante-deux",
"soixante-dix",
"soixante-dix-huit",
"soixante-dixième",
"soixante-dixièmes",
"soixante-dix-neuf",
"soixante-dix-sept",
"soixante-dizaine",
"soixante-dizaines",
"soixante-douze",
"soixante-et-onze",
"soixante-et-un",
"soixante-et-une",
"soixante-huit",
"soixante-huitard",
"soixante-huitarde",
"soixante-huitardes",
"soixante-huitards",
"soixante-neuf",
"soixante-quatorze",
"soixante-quatre",
"soixante-quinze",
"soixante-seize",
"soixante-sept",
"soixante-six",
"soixante-treize",
"soixante-trois",
"Soizy-aux-Bois",
"Solaure-en-Diois",
"sole-ruardon",
"Solignac-sous-Roche",
"Solignac-sur-Loire",
"Soligny-la-Trappe",
"Soligny-les-Etangs",
"Soligny-les-Étangs",
"Sollières-Sardières",
"Solliès-Pont",
"solliès-pontois",
"Solliès-Pontois",
"solliès-pontoise",
"Solliès-Pontoise",
"solliès-pontoises",
"Solliès-Pontoises",
"Solliès-Toucas",
"solliès-villain",
"Solliès-Villain",
"solliès-villaine",
"Solliès-Villaine",
"solliès-villaines",
"Solliès-Villaines",
"solliès-villains",
"Solliès-Villains",
"Solliès-Ville",
"Solre-le-Château",
"Solre-Saint-Géry",
"Solre-sur-Sambre",
"Soltau-Fallingbostel",
"Solutré-Pouilly",
"somato-psychique",
"somato-psychiques",
"Someren-Eind",
"Someren-Heide",
"Somme-Bionne",
"Somme-Leuze",
"somme-leuzien",
"Somme-Leuzien",
"Somme-Leuzienne",
"Sommepy-Tahure",
"somme-suippas",
"Somme-Suippas",
"somme-suippase",
"Somme-Suippase",
"somme-suippases",
"Somme-Suippases",
"Somme-Suippe",
"Somme-Tourbe",
"Sommette-Eaucourt",
"Somme-Vesle",
"Somme-Yèvre",
"Sommières-du-Clain",
"Sonceboz-Sombeval",
"Soncourt-sur-Marne",
"son-et-lumière",
"soŋay-zarma",
"soŋay-zarmas",
"songe-creux",
"songe-malice",
"songhaï-zarma",
"songhaï-zarmas",
"Sonnac-sur-l'Hers",
"Sonnenberg-Winnenberg",
"Sons-et-Ronchères",
"Sonthonnax-la-Montagne",
"Soo-hyun",
"Soorts-Hossegor",
"Soppe-le-Bas",
"Soppe-le-Haut",
"Sorans-lès-Breurey",
"Sorbo-Ocagnano",
"Sorcy-Bauthémont",
"Sorcy-Saint-Martin",
"Sorde-l'Abbaye",
"Sorel-en-Vimeu",
"Sorel-Moussel",
"Sorinne-la-Longue",
"Sornzig-Ablaß",
"Sort-en-Chalosse",
"sortie-de-bain",
"sortie-de-bal",
"Sortosville-en-Beaumont",
"sot-l'y-laisse",
"Sotteville-lès-Rouen",
"Sotteville-sous-le-Val",
"Sotteville-sur-Mer",
"sotto-voce",
"Souain-Perthes-lès-Hurlus",
"Souancé-au-Perche",
"sou-chong",
"sou-chongs",
"Soucieu-en-Jarrest",
"Soudaine-Lavinadière",
"soudano-tchado-lybien",
"Soudé-Notre-Dame-ou-le-Petit",
"soudo-brasa",
"soudo-brasai",
"soudo-brasaient",
"soudo-brasais",
"soudo-brasait",
"soudo-brasâmes",
"soudo-brasant",
"soudo-brasas",
"soudo-brasasse",
"soudo-brasassent",
"soudo-brasasses",
"soudo-brasassiez",
"soudo-brasassions",
"soudo-brasât",
"soudo-brasâtes",
"soudo-brase",
"soudo-brasé",
"soudo-brasée",
"soudo-brasées",
"soudo-brasent",
"soudo-braser",
"soudo-brasera",
"soudo-braserai",
"soudo-braseraient",
"soudo-braserais",
"soudo-braserait",
"soudo-braseras",
"soudo-brasèrent",
"soudo-braserez",
"soudo-braseriez",
"soudo-braserions",
"soudo-braserons",
"soudo-braseront",
"soudo-brases",
"soudo-brasés",
"soudo-brasez",
"soudo-brasiez",
"soudo-brasions",
"soudo-brasons",
"Soueix-Rogalle",
"souffre-douleur",
"souffre-douleurs",
"soufre-sélénifère",
"Sougé-le-Ganelon",
"Sougères-en-Puisaye",
"Sougères-sur-Sinotte",
"Sougné-Remouchamps",
"Sougy-sur-Loire",
"souï-manga",
"Soulac-sur-Mer",
"Soulages-Bonneval",
"Soulaines-Dhuys",
"Soulaines-sur-Aubance",
"Soulaire-et-Bourg",
"Soulaucourt-sur-Mouzon",
"Soulce-Cernay",
"Souleuvre-en-Bocage",
"Soulgé-sur-Ouette",
"Souligné-Flacé",
"Souligné-sous-Ballon",
"Soulosse-sous-Saint-Elophe",
"Soulosse-sous-Saint-Élophe",
"Soultzbach-les-Bains",
"Soultz-Haut-Rhin",
"Soultz-les-Bains",
"Soultz-sous-Forêts",
"Soumont-Saint-Quentin",
"soum-soum",
"soupe-tout-seul",
"Souppes-sur-Loing",
"Source-Seine",
"Sourcieux-les-Mines",
"sourde-muette",
"sourdes-muettes",
"Sourdeval-la-Barre",
"Sourdeval-les-Bois",
"Sourdeval-Vengeons",
"sourd-muet",
"sourd-parlant",
"sourds-muets",
"souris-chauve",
"souris-chauves",
"souris-crayon",
"souris-crayons",
"souris-opossums",
"souris-stylo",
"souris-stylos",
"Sousceyrac-en-Quercy",
"Soussey-sur-Brionne",
"Southend-on-Sea",
"soutien-gorge",
"soutien-loloches",
"soutiens-gorge",
"souvenez-vous-de-moi",
"souveraineté-association",
"Souvigné-sur-Même",
"Souvigné-sur-Sarthe",
"Souvigny-de-Touraine",
"Souvigny-en-Sologne",
"Souzay-Champigny",
"Souzy-la-Briche",
"Soye-en-Septaine",
"Spaarndam-Oost",
"Spaarndam-West",
"sparring-partner",
"spatio-temporel",
"spatio-temporelle",
"spatio-temporelles",
"spatio-temporels",
"Spechbach-le-Bas",
"Spechbach-le-Haut",
"speed-dating",
"sphéno-temporal",
"sphinx-bourdon",
"Spider-Man",
"Spiesen-Elversberg",
"spina-bifida",
"spina-ventosa",
"spin-off",
"spin-offs",
"spiro-bloc",
"spiro-blocs",
"sport-étude",
"sportivo-financier",
"sports-études",
"Sprang-Capelle",
"Spree-Neisse",
"spruce-beer",
"squale-grogneur",
"sri-lankais",
"Sri-Lankais",
"sri-lankaise",
"Sri-Lankaise",
"sri-lankaises",
"Sri-Lankaises",
"stabilo-bossa",
"stabilo-bossai",
"stabilo-bossaient",
"stabilo-bossais",
"stabilo-bossait",
"stabilo-bossâmes",
"stabilo-bossant",
"stabilo-bossas",
"stabilo-bossasse",
"stabilo-bossassent",
"stabilo-bossasses",
"stabilo-bossassiez",
"stabilo-bossassions",
"stabilo-bossât",
"stabilo-bossâtes",
"stabilo-bosse",
"stabilo-bossé",
"stabilo-bossée",
"stabilo-bossées",
"stabilo-bossent",
"stabilo-bosser",
"stabilo-bossera",
"stabilo-bosserai",
"stabilo-bosseraient",
"stabilo-bosserais",
"stabilo-bosserait",
"stabilo-bosseras",
"stabilo-bossèrent",
"stabilo-bosserez",
"stabilo-bosseriez",
"stabilo-bosserions",
"stabilo-bosserons",
"stabilo-bosseront",
"stabilo-bosses",
"stabilo-bossés",
"stabilo-bossez",
"stabilo-bossiez",
"stabilo-bossions",
"stabilo-bossons",
"Stadecken-Elsheim",
"Stafordshire-bull-terrier",
"stage-coach",
"stage-coachs",
"Staines-upon-Thames",
"stand-by",
"stand-up",
"Stanford-le-Hope",
"stannoso-potassique",
"Starrkirch-Wil",
"star-système",
"star-systèmes",
"starting-block",
"starting-blocks",
"starting-gate",
"start-up",
"start-upeur",
"st'at'imc",
"station-service",
"stations-service",
"stations-services",
"statue-menhir",
"statues-menhirs",
"Staudach-Egerndach",
"steam-boat",
"steam-boats",
"Stechow-Ferchesar",
"Steenhuize-Wijnhuize",
"steeple-chase",
"Steg-Hohtenn",
"Steinbach-Hallenberg",
"Stein-Bockenheim",
"Steinbrunn-le-Bas",
"Steinbrunn-le-Haut",
"Stein-Neukirch",
"Stein-Wingert",
"Stelle-Wittenwurth",
"sténo-dactylographe",
"sténo-dactylographes",
"sténo-méditerranéen",
"sténo-méditerranéenne",
"sténo-méditerranéennes",
"sténo-méditerranéens",
"step-back",
"step-backs",
"stéphano-carladésien",
"Stéphano-Carladésien",
"stéphano-carladésienne",
"Stéphano-Carladésienne",
"stéphano-carladésiennes",
"Stéphano-Carladésiennes",
"stéphano-carladésiens",
"Stéphano-Carladésiens",
"stéréo-isomère",
"stéréo-isomères",
"sterno-claviculaire",
"sterno-claviculaires",
"sterno-cléido-mastoïdien",
"sterno-cléido-mastoïdiens",
"sterno-clido-mastoïdien",
"sterno-clido-mastoïdienne",
"sterno-clido-mastoïdiennes",
"sterno-clido-mastoïdiens",
"sterno-huméral",
"sterno-hyoïdien",
"sterno-pubien",
"Stiring-Wendel",
"St-Jean",
"stock-car",
"stock-cars",
"Stockhausen-Illfurth",
"stock-option",
"stock-options",
"stocks-tampons",
"stock-tampon",
"Stockton-on-Tees",
"Stockum-Püschen",
"Stoke-on-Trent",
"stomo-gastrique",
"stomo-gastriques",
"stop-ski",
"stop-skis",
"Storbeck-Frankendorf",
"story-board",
"story-boards",
"Straßlach-Dingharting",
"Stratford-on-Avon",
"Straubing-Bogen",
"strauss-kahnien",
"strauss-kahniens",
"street-artiste",
"street-artistes",
"street-gadz",
"Strépy-Bracquegnies",
"strip-teasa",
"strip-teasai",
"strip-teasaient",
"strip-teasais",
"strip-teasait",
"strip-teasâmes",
"strip-teasant",
"strip-teasas",
"strip-teasasse",
"strip-teasassent",
"strip-teasasses",
"strip-teasassiez",
"strip-teasassions",
"strip-teasât",
"strip-teasâtes",
"strip-tease",
"strip-teasé",
"strip-teasée",
"strip-teasées",
"strip-teasent",
"strip-teaser",
"strip-teasera",
"strip-teaserai",
"strip-teaseraient",
"strip-teaserais",
"strip-teaserait",
"strip-teaseras",
"strip-teasèrent",
"strip-teaserez",
"strip-teaseriez",
"strip-teaserions",
"strip-teaserons",
"strip-teaseront",
"strip-teases",
"strip-teasés",
"strip-teaseurs",
"strip-teaseuse",
"strip-teaseuses",
"strip-teasez",
"strip-teasiez",
"strip-teasions",
"strip-teasons",
"stroke-play",
"strom-apparat",
"Strombeek-Bever",
"struggle-for-life",
"struggle-for-lifes",
"stud-book",
"Stüdenitz-Schönermark",
"stuffing-box",
"Stutzheim-Offenheim",
"stylo-bille",
"stylo-billes",
"stylo-feutre",
"stylo-glosse",
"stylo-gomme",
"stylo-pistolet",
"stylo-plume",
"stylos-feutres",
"stylos-gommes",
"stylo-souris",
"stylos-plume",
"stylos-souris",
"Suaucourt-et-Pisseloup",
"subrogés-tuteurs",
"subrogé-tuteur",
"suce-bœuf",
"suce-boules",
"suce-fleur",
"suce-fleurs",
"suce-goulot",
"suce-goulots",
"suce-médailles",
"Sucé-sur-Erdre",
"Suc-et-Sentenac",
"Sucy-en-Brie",
"sudoro-algique",
"Súdwest-Fryslân",
"suédo-américain",
"suédo-américaine",
"suédo-américaines",
"suédo-américains",
"Suilly-la-Tour",
"Suisse-Saxonne-Monts-Métallifères-de-l'Est",
"suivez-moi-jeune-homme",
"Suizy-le-Franc",
"Sukow-Levitzow",
"sulfo-margarique",
"Sully-la-Chapelle",
"Sully-sur-Loire",
"Sulzbach-Laufen",
"Sulzbach-Rosenberg",
"suméro-akkadien",
"suméro-akkadienne",
"suméro-akkadiennes",
"suméro-akkadiens",
"Sunbury-on-Thames",
"super-8",
"support-chaussettes",
"supports-chaussettes",
"supra-axillaire",
"supra-axillaires",
"supra-caudal",
"supra-caudale",
"supra-caudales",
"supra-caudaux",
"supra-épineux",
"surdi-mutité",
"surdi-mutités",
"suro-pédieuse",
"suro-pédieuses",
"suro-pédieux",
"surprise-partie",
"surprise-parties",
"surprises-parties",
"surveillant-général",
"Sury-aux-Bois",
"Sury-en-Léré",
"Sury-en-Vaux",
"Sury-ès-Bois",
"Sury-le-Comtal",
"Sury-près-Léré",
"sus-caudal",
"sus-cité",
"sus-coccygien",
"sus-dominante",
"sus-dominantes",
"sus-épineux",
"sus-hépatique",
"sus-hépatiques",
"sus-hyoïdien",
"sus-jacent",
"sus-jacents",
"sus-maxillo-labial",
"sus-maxillo-nasal",
"sus-métatarsien",
"sus-métatarsienne",
"sus-métatarsiennes",
"sus-métatarsiens",
"sus-naseau",
"sus-naso-labial",
"sus-pied",
"sus-pubio-fémoral",
"Sus-Saint-Léger",
"sus-tarsien",
"sus-tarsienne",
"sus-tarsiennes",
"sus-tarsiens",
"sus-tentoriel",
"sus-tentorielle",
"sus-tentorielles",
"sus-tentoriels",
"sus-tonique",
"su-sucre",
"su-sucres",
"Sutton-in-Ashfield",
"Sutz-Lattrigen",
"Suze-la-Rousse",
"S.-W.",
"sweat-shirt",
"sweat-shirts",
"Sylvains-les-Moulins",
"syndesmo-pharyngien",
"Syr-Daria",
"syro-chaldaïque",
"syro-chaldéen",
"syro-chaldéens",
"syro-saoudien",
"systèmes-clés",
"tabagn's",
"Tabaille-Usquain",
"Taben-Rodt",
"table-bureau",
"tables-bureaux",
"tac-tac",
"Tadousse-Ussau",
"Taglio-Isolaccio",
"Tahiti-Iti",
"Tahu-Ata",
"Taiarapu-Est",
"Taiarapu-Ouest",
"tai-kadai",
"taï-kadaï",
"taï-le",
"taille-crayon",
"taille-crayons",
"taille-douce",
"taille-haie",
"taille-haies",
"taille-mèche",
"taille-mèches",
"taille-mer",
"taille-mers",
"taille-plume",
"taille-plumes",
"taille-pré",
"taille-prés",
"tailles-douces",
"taille-vent",
"taille-vents",
"Tain-l'Hermitage",
"taï-nüa",
"Taisnières-en-Thiérache",
"Taisnières-sur-Hon",
"Taizé-Aizie",
"taki-taki",
"talco-micacé",
"talco-quartzeux",
"talkies-walkies",
"talkie-walkie",
"talkie-walkies",
"talk-show",
"Talloires-Montmin",
"Tallud-Sainte-Gemme",
"Talmont-Saint-Hilaire",
"Talmont-sur-Gironde",
"Talus-Saint-Prix",
"taly-pen",
"taly-pens",
"Tambach-Dietharz",
"tambour-major",
"tambours-majors",
"Tamnay-en-Bazois",
"tams-tams",
"tam-tam",
"tam-tams",
"Ta-Nehisi",
"Tanghin-Dassouri",
"Tannerre-en-Puisaye",
"tao-taï",
"tao-taïs",
"tape-à-l'oeil",
"tape-à-l'œil",
"tape-beurre",
"tape-beurres",
"tape-cul",
"tape-culs",
"tape-dur",
"tape-durs",
"tapis-brosse",
"tapis-de-caoutchouté",
"tapis-franc",
"tapis-francs",
"tapis-luge",
"tapis-luges",
"tapis-plain",
"Taponnat-Fleurignac",
"Tarascon-sur-Ariège",
"Tarascon-sur-Rhône",
"Tarawa-Sud",
"Tardets-Sorholus",
"tard-venus",
"tarn-et-garonnais",
"Tarn-et-Garonnais",
"tarn-et-garonnaise",
"Tarn-et-Garonnaise",
"tarn-et-garonnaises",
"Tarn-et-Garonnaises",
"Tarn-et-Garonne",
"Taron-Sadirac-Viellenave",
"tarso-métatarse",
"tarso-métatarsien",
"Tart-l'Abbaye",
"Tart-le-Bas",
"Tart-le-Haut",
"tarton-raire",
"Tassin-la-Demi-Lune",
"Tataouine-les-Bains",
"tâte-au-pot",
"tâte-ferraille",
"tate-mono",
"tate-monos",
"tâte-poule",
"tâte-vin",
"tâte-vins",
"tau-fluvalinate",
"Taulhac-près-le-Puy",
"taupe-grillon",
"taupes-grillons",
"Tauriac-de-Camarès",
"Tauriac-de-Naucelle",
"Taurignan-Castet",
"Taurignan-Vieux",
"Taussac-la-Billière",
"Tauxières-Mutry",
"Tavaux-et-Pontséricourt",
"Taxat-Senat",
"taxi-auto",
"taxi-automobile",
"taxi-brousse",
"taxi-girl",
"taxi-girls",
"taxis-brousse",
"taxis-vélos",
"taxi-vélo",
"t-bone",
"t-bones",
"T-calculable",
"T-calculables",
"tchado-burkinabé",
"tchado-centrafricain",
"tchado-egyptien",
"tchado-lybien",
"tchado-soudano-lybien",
"tchéco-slovaque",
"Tchéco-slovaque",
"Tchéco-Slovaque",
"tchéco-slovaques",
"Tchéco-slovaques",
"Tchéco-Slovaques",
"tchin-tchin",
"tchou-tchou",
"teach-in",
"teach-ins",
"teen-ager",
"teen-agers",
"tee-shirt",
"tee-shirts",
"Teillay-le-Gaudin",
"Teillay-Saint-Benoît",
"Teillet-Argenty",
"teinture-mère",
"teint-vin",
"teint-vins",
"Teissières-de-Cornet",
"Teissières-lès-Bouliès",
"Tel-Aviv-Jaffa",
"Telgruc-sur-Mer",
"Tella-Sin",
"t-elle",
"Tellières-le-Plessis",
"Teltow-Fläming",
"Temmen-Ringenwalde",
"témoins-clés",
"Temple-Laguyon",
"Templeuve-en-Pévèle",
"Templeux-la-Fosse",
"Templeux-le-Guérard",
"temporo-conchinien",
"temporo-superficiel",
"Tenero-Contra",
"Tensbüttel-Röst",
"tensio-actif",
"tente-abri",
"tente-ménagerie",
"tentes-ménageries",
"téra-ampère",
"téra-ampères",
"téra-électron-volt",
"téraélectron-volt",
"téra-électron-volts",
"téraélectron-volts",
"térawatt-heure",
"térawatt-heures",
"térawatts-heures",
"Tercis-les-Bains",
"Termes-d'Armagnac",
"Ternant-les-Eaux",
"terno-annulaire",
"Ternuay-Melay-et-Saint-Hilaire",
"Terny-Sorny",
"terra-cotta",
"terra-forma",
"terra-formai",
"terra-formaient",
"terra-formais",
"terra-formait",
"terra-formâmes",
"terra-formant",
"terra-formas",
"terra-formasse",
"terra-formassent",
"terra-formasses",
"terra-formassiez",
"terra-formassions",
"terra-formât",
"terra-formâtes",
"terra-forme",
"terra-formé",
"terra-formée",
"terra-formées",
"terra-forment",
"terra-former",
"terra-formera",
"terra-formerai",
"terra-formeraient",
"terra-formerais",
"terra-formerait",
"terra-formeras",
"terra-formèrent",
"terra-formerez",
"terra-formeriez",
"terra-formerions",
"terra-formerons",
"terra-formeront",
"terra-formes",
"terra-formés",
"terra-formez",
"terra-formiez",
"terra-formions",
"terra-formons",
"Terrasson-la-Villedieu",
"Terrasson-Lavilledieu",
"terre-à-terre",
"Terre-Clapier",
"Terre-de-Bas",
"Terre-de-Haut",
"Terre-et-Marais",
"terre-grièpe",
"Terre-Natale",
"terre-neuva",
"terre-neuvas",
"terre-neuve",
"Terre-Neuve",
"Terre-Neuve-et-Labrador",
"terre-neuvien",
"Terre-Neuvien",
"Terre-Neuvien-et-Labradorien",
"terre-neuvienne",
"Terre-Neuvienne",
"Terre-Neuvienne-et-Labradorienne",
"terre-neuviennes",
"Terre-Neuviennes",
"Terre-Neuviennes-et-Labradoriennes",
"terre-neuviens",
"Terre-Neuviens",
"Terre-Neuviens-et-Labradoriens",
"terre-neuvier",
"terre-neuviers",
"terre-noix",
"terre-plein",
"terre-pleins",
"Terres-de-Caux",
"terret-bourret",
"Territoire-de-Belfort",
"Terron-lès-Poix",
"Terron-lès-Vendresse",
"Terron-sur-Aisne",
"ter-ter",
"terza-rima",
"Tessancourt-sur-Aubette",
"Tessé-Froulay",
"Tessy-sur-Vire",
"test-match",
"test-matchs",
"Test-Milon",
"test-objet",
"Testorf-Steinfort",
"tête-à-queue",
"tête-à-tête",
"tête-bêche",
"tête-bleu",
"tête-chèvre",
"tête-de-bécasse",
"tête-de-chat",
"tête-de-chats",
"tête-de-cheval",
"tête-de-clou",
"tête-de-coq",
"tête-de-loup",
"tête-de-maure",
"tête-de-Maure",
"tête-de-méduse",
"Tête-de-Moine",
"tête-de-moineau",
"tête-de-More",
"tête-de-mort",
"tête-de-serpent",
"tête-de-soufre",
"Téteghem-Coudekerque-Village",
"tête-ronde",
"têtes-de-chat",
"têtes-de-clou",
"têtes-de-loup",
"têtes-de-Maure",
"têtes-de-méduse",
"têtes-de-moineau",
"têtes-de-mort",
"têtes-vertes",
"tête-verte",
"Teting-sur-Nied",
"tétra-atomique",
"tétrachlorodibenzo-p-dioxine",
"tétrachlorodibenzo-p-dioxines",
"tétrachloro-isophtalonitrile",
"tette-chèvre",
"tette-chèvres",
"teufs-teufs",
"teuf-teuf",
"teuf-teufa",
"teuf-teufai",
"teuf-teufaient",
"teuf-teufais",
"teuf-teufait",
"teuf-teufâmes",
"teuf-teufant",
"teuf-teufas",
"teuf-teufasse",
"teuf-teufassent",
"teuf-teufasses",
"teuf-teufassiez",
"teuf-teufassions",
"teuf-teufât",
"teuf-teufâtes",
"teuf-teufe",
"teuf-teufé",
"teuf-teufent",
"teuf-teufer",
"teuf-teufera",
"teuf-teuferai",
"teuf-teuferaient",
"teuf-teuferais",
"teuf-teuferait",
"teuf-teuferas",
"teuf-teufèrent",
"teuf-teuferez",
"teuf-teuferiez",
"teuf-teuferions",
"teuf-teuferons",
"teuf-teuferont",
"teuf-teufes",
"teuf-teufez",
"teuf-teufiez",
"teuf-teufions",
"teuf-teufons",
"Teurthéville-Bocage",
"Teurthéville-Hague",
"Thal-Drulingen",
"Thaleischweiler-Fröschen",
"Thal-Marmoutier",
"Thaon-les-Vosges",
"Theil-Rabier",
"Theil-sur-Vanne",
"Theix-Noyalo",
"Thélis-la-Combe",
"Théoule-sur-Mer",
"Thermes-Magnoac",
"thêta-jointure",
"thêta-jointures",
"Theuville-aux-Maillots",
"Theuvy-Achères",
"Thevet-Saint-Julien",
"They-sous-Montfort",
"They-sous-Vaudemont",
"Thézan-des-Corbières",
"Thézan-lès-Béziers",
"Thézey-Saint-Martin",
"Thézy-Glimont",
"thézy-glimontois",
"Thézy-Glimontois",
"thézy-glimontoise",
"Thézy-Glimontoise",
"thézy-glimontoises",
"Thézy-Glimontoises",
"Thiaucourt-Regniéville",
"Thiaville-sur-Meurthe",
"Thiéblemont-Farémont",
"Thiel-sur-Acolin",
"Thiers-sur-Thève",
"Thierville-sur-Meuse",
"Thieulloy-l'Abbaye",
"Thieulloy-la-Ville",
"Thieuloy-Saint-Antoine",
"thifensulfuron-méthyle",
"Thil-Manneville",
"Thil-sur-Arroux",
"Thimert-Gâtelles",
"Thimister-Clermont",
"thimistérien-clermontois",
"Thimistérien-Clermontois",
"Thimistérien-Clermontoise",
"Thin-le-Moutier",
"Thionville-sur-Opton",
"thiophanate-éthyl",
"thiophanate-méthyl",
"Thiron-Gardais",
"Thiverval-Grignon",
"Thizy-les-Bourgs",
"Thoiré-sous-Contensor",
"Thoiré-sur-Dinan",
"Thoirette-Coisia",
"Thoisy-la-Berchère",
"Thoisy-le-Désert",
"Thol-lès-Millières",
"Thollon-les-Mémises",
"Thomer-la-Sôgne",
"Thonnance-lès-Joinville",
"Thonnance-les-Moulins",
"Thonne-la-Long",
"Thonne-les-Près",
"Thonne-le-Thil",
"Thonon-les-Bains",
"Thon-Samson",
"thon-samsonais",
"Thon-Samsonais",
"Thon-Samsonaise",
"Thorame-Basse",
"Thorame-Haute",
"Thorée-les-Pins",
"thoré-folléen",
"Thoré-Folléen",
"thoré-folléenne",
"Thoré-Folléenne",
"thoré-folléennes",
"Thoré-Folléennes",
"thoré-folléens",
"Thoré-Folléens",
"Thoré-la-Rochette",
"Thorembais-les-Béguines",
"Thorembais-Saint-Trond",
"Thorens-Glières",
"Thorey-en-Plaine",
"Thorey-Lyautey",
"Thorey-sous-Charny",
"Thorey-sur-Ouche",
"Thorigné-d'Anjou",
"Thorigné-en-Charnie",
"Thorigné-Fouillard",
"Thorigné-sur-Dué",
"Thorigné-sur-Vilaine",
"Thorigny-sur-le-Mignon",
"Thorigny-sur-Marne",
"Thorigny-sur-Oreuse",
"Thornaby-on-Tees",
"Thornton-Cleveleys",
"Thouaré-sur-Loire",
"Thouarsais-Bouildroux",
"Thouars-sur-Arize",
"Thouars-sur-Garonne",
"thoult-tronaisien",
"Thoult-Tronaisien",
"thoult-tronaisienne",
"Thoult-Tronaisienne",
"thoult-tronaisiennes",
"Thoult-Tronaisiennes",
"thoult-tronaisiens",
"Thoult-Tronaisiens",
"Thoury-Férottes",
"Thoury-Ferrottes",
"thraco-illyrienne",
"Thuès-Entre-Valls",
"Thugny-Trugny",
"Thuilley-aux-Groseilles",
"thuit-angevin",
"Thuit-Angevin",
"thuit-angevine",
"Thuit-Angevine",
"thuit-angevines",
"Thuit-Angevines",
"thuit-angevins",
"Thuit-Angevins",
"Thuit-Hébert",
"thuit-signolais",
"Thuit-Signolais",
"thuit-signolaise",
"Thuit-Signolaise",
"thuit-signolaises",
"Thuit-Signolaises",
"thuit-simérien",
"Thuit-Simérien",
"thuit-simérienne",
"Thuit-Simérienne",
"thuit-simériennes",
"Thuit-Simériennes",
"thuit-simériens",
"Thuit-Simériens",
"thun-episcopien",
"Thun-Episcopien",
"thun-épiscopien",
"Thun-Épiscopien",
"Thun-Episcopienne",
"thun-épiscopienne",
"Thun-Épiscopienne",
"Thun-Episcopiennes",
"thun-épiscopiennes",
"Thun-Épiscopiennes",
"Thun-Episcopiens",
"thun-épiscopiens",
"Thun-Épiscopiens",
"Thun-l'Evêque",
"Thun-l'Évêque",
"Thun-Saint-Amand",
"Thun-Saint-Martin",
"Thurey-le-Mont",
"Thury-en-Valois",
"Thury-Harcourt",
"Thury-sous-Clermont",
"Thy-le-Bauduin",
"Thy-le-Château",
"Tian'anmen",
"tibéto-birman",
"tibéto-birmane",
"tibéto-birmanes",
"tibéto-birmans",
"tibio-malléolaire",
"Tibiran-Jaunac",
"ticket-restaurant",
"ti-coune",
"ti-counes",
"tic-tac",
"tic-tacs",
"tic-tac-toe",
"ti-cul",
"tie-break",
"tie-breaks",
"Tielt-Winge",
"T'ien-ngan-men",
"tierce-feuille",
"tierce-rime",
"tierces-rimes",
"Tieste-Uragnoux",
"tiger-kidnappeur",
"tiger-kidnapping",
"tiger-kidnappings",
"Tignieu-Jameyzieu",
"Tigny-Noyelle",
"tigre-garou",
"tigres-garous",
"tiki-taka",
"t-il",
"Til-Châtel",
"Tillay-le-Péneux",
"Tilleul-Dame-Agnès",
"tilleul-othonnais",
"Tilleul-Othonnais",
"tilleul-othonnaise",
"Tilleul-Othonnaise",
"tilleul-othonnaises",
"Tilleul-Othonnaises",
"Tillières-sur-Avre",
"Tilloy-et-Bellay",
"Tilloy-Floriville",
"Tilloy-lès-Conty",
"Tilloy-lès-Hermaville",
"Tilloy-lès-Mofflaines",
"Tilloy-lez-Cambrai",
"Tilloy-lez-Marchiennes",
"Tilly-Capelle",
"Tilly-la-Campagne",
"Tilly-sur-Meuse",
"Tilly-sur-Seulles",
"tilt-shift",
"timbre-amende",
"timbre-poste",
"timbre-quittance",
"timbres-amende",
"timbres-poste",
"timbres-quittances",
"timbre-taxe",
"time-lapse",
"time-lapses",
"time-sharing",
"time-sharings",
"Tin-Akof",
"Tincey-et-Pontrebeau",
"Tinchebray-Bocage",
"Tincourt-Boucly",
"Tinizong-Rona",
"t'inquiète",
"tiou-tiou",
"tiou-tious",
"ti-papoute",
"ti-punch",
"ti-punchs",
"tira-tutto",
"Tirent-Pontéjac",
"tireur-au-cul",
"tireurs-au-cul",
"tiroir-caisse",
"tiroirs-caisses",
"tissu-éponge",
"tissus-éponges",
"titan-cotte",
"titanico-ammonique",
"titanico-ammoniques",
"Tite-Live",
"Titisee-Neustadt",
"titre-service",
"titres-services",
"Tizac-de-Curton",
"Tizac-de-Lapouyade",
"toba-qom",
"Tobel-Tägerschen",
"Tocane-Saint-Apre",
"t'occupe",
"toc-feu",
"Tocqueville-en-Caux",
"Tocqueville-les-Murs",
"Tocqueville-sur-Eu",
"toc-toc",
"toc-tocs",
"Togny-aux-Bœufs",
"t'oh",
"tohu-bohu",
"tohu-bohus",
"tohus-bohus",
"toi-même",
"toits-terrasses",
"toit-terrasse",
"tolclofos-méthyl",
"tombe-cartouche",
"tom-pouce",
"tom-tom",
"tom-toms",
"t-on",
"Tongre-Notre-Dame",
"Tongre-Saint-Martin",
"Tonnay-Boutonne",
"Tonnay-Charente",
"Tonnegrande-Montsinery",
"tonne-grenoir",
"tonne-mètre",
"top-down",
"top-model",
"top-modèle",
"top-modèles",
"top-models",
"topo-guide",
"topo-guides",
"top-secret",
"top-secrets",
"toque-feu",
"Torcé-en-Vallée",
"Torcé-Viviers-en-Charnie",
"torche-cul",
"torche-culs",
"torche-fer",
"torche-pertuis",
"torche-pin",
"torche-pinceau",
"torche-pinceaux",
"torche-pins",
"Torcy-en-Valois",
"Torcy-et-Pouligny",
"Torcy-le-Grand",
"Torcy-le-Petit",
"tord-boyau",
"tord-boyaux",
"tord-nez",
"Torgelow-Holländerei",
"Torigni-sur-Vire",
"Torigny-les-Villes",
"tori-i",
"Torre-Cardela",
"Torre-serona",
"Torricella-Taverne",
"torse-poil",
"torse-poils",
"Torteval-Quesnay",
"tortue-alligator",
"tortue-boite",
"tortue-boîte",
"tortue-duc",
"tortues-alligators",
"tortues-boites",
"tortues-boîtes",
"tortues-ducs",
"tosa-inu",
"Toscolano-Maderno",
"tote-bag",
"tote-bags",
"tôt-fait",
"tôt-faits",
"touch-and-go",
"touche-à-tout",
"touche-pipi",
"touche-touche",
"Touët-de-l'Escarène",
"Touët-sur-Var",
"Touffreville-la-Cable",
"Touffreville-la-Corbeline",
"Touffreville-sur-Eu",
"touille-boeuf",
"touille-bœuf",
"touille-boeufs",
"touille-bœufs",
"Touillon-et-Loutelet",
"Toulis-et-Attencourt",
"Toulon-la-Montagne",
"Toulon-sur-Allier",
"Toulon-sur-Arroux",
"Toulouse-le-Château",
"Toulx-Sainte-Croix",
"Tourailles-sous-Bois",
"tour-à-tour",
"Tourcelles-Chaumont",
"Tourcelles-Chaumont-Quilly-et-Chardeny",
"Tour-de-Faure",
"Tour-en-Bessin",
"Tour-en-Sologne",
"Tourette-du-Château",
"Tourinnes-la-Grosse",
"Tourinnes-Saint-Lambert",
"tour-minute",
"Tournai-sur-Dive",
"Tournan-en-Brie",
"Tournay-sur-Odon",
"tourne-à-gauche",
"tourne-au-vent",
"tourne-case",
"tourne-cases",
"tourne-disque",
"tourne-disques",
"Tournedos-Bois-Hubert",
"Tournedos-sur-Seine",
"tourne-feuille",
"tourne-feuilles",
"tourne-feuillet",
"tourne-feuillets",
"tourne-fil",
"tourne-fils",
"tourne-gants",
"Tournehem-sur-la-Hem",
"tourne-motte",
"tourne-mottes",
"tourne-oreille",
"tourne-oreilles",
"tourne-pierres",
"tourne-soc",
"tourne-socs",
"tourneur-fraiseur",
"tourneurs-fraiseurs",
"tourne-vent",
"tourne-vents",
"Tournon-d'Agenais",
"Tournon-Saint-Martin",
"Tournon-Saint-Pierre",
"Tournon-sur-Rhône",
"Tournous-Darré",
"Tournous-Devant",
"tour-opérateur",
"tour-opérateurs",
"tour-opératrice",
"tour-opératrices",
"Tourouvre-au-Perche",
"Tourrette-Levens",
"Tourrettes-sur-Loup",
"Tours-en-Savoie",
"Tours-en-Vimeu",
"tours-minute",
"tours-opérateurs",
"tours-opératrices",
"tours-sur-marnais",
"Tours-sur-Marnais",
"tours-sur-marnaise",
"Tours-sur-Marnaise",
"tours-sur-marnaises",
"Tours-sur-Marnaises",
"Tours-sur-Marne",
"Tours-sur-Meymont",
"Tourville-en-Auge",
"Tourville-la-Campagne",
"Tourville-la-Chapelle",
"Tourville-la-Rivière",
"Tourville-les-Ifs",
"Tourville-sur-Arques",
"Tourville-sur-Odon",
"Tourville-sur-Pont-Audemer",
"Tourville-sur-Sienne",
"Toury-Lurcy",
"Toury-sur-Jour",
"Tourzel-Ronzières",
"Toussus-le-Noble",
"tout-à-fait",
"tout-à-la-rue",
"tout-à-l'égout",
"tout-blanc",
"tout-blancs",
"tout-communication",
"tout-connaissant",
"toute-bonne",
"toute-bonté",
"toute-cousue",
"toute-épice",
"tout-ensemble",
"tout-en-un",
"toute-petite",
"toute-présence",
"toute-puissance",
"toute-puissante",
"toute-saine",
"toutes-boîtes",
"toutes-bonnes",
"toute-science",
"toutes-petites",
"toutes-puissantes",
"toutes-saines",
"toutes-tables",
"toutes-venues",
"toute-table",
"toute-venue",
"tout-fait",
"tout-faits",
"tout-fécond",
"tout-Londres",
"Tout-Paris",
"tout-parisien",
"tout-parisienne",
"tout-parisiennes",
"tout-parisiens",
"tout-petit",
"tout-petits",
"tout-puissant",
"Tout-Puissant",
"tout-puissants",
"tout-terrain",
"tout-venant",
"tout-venu",
"toxi-infectieux",
"toxi-infection",
"toxi-infections",
"toy-terrier",
"Toy-Viam",
"Traben-Trarbach",
"trace-bouche",
"trace-roulis",
"trace-sautereau",
"trace-vague",
"trachée-artère",
"trachélo-occipital",
"trachéo-bronchite",
"trachéo-bronchites",
"Tracy-Bocage",
"Tracy-le-Mont",
"Tracy-le-Val",
"Tracy-sur-Loire",
"Tracy-sur-Mer",
"trade-union",
"trade-unionisme",
"trade-unionismes",
"trade-unions",
"tragi-comédie",
"tragi-comédies",
"tragi-comique",
"tragi-comiques",
"traîne-bâton",
"traine-buche",
"traîne-bûche",
"traine-buches",
"traîne-bûches",
"traîne-buisson",
"traîne-charrue",
"traîne-la-patte",
"traîne-lattes",
"traîne-malheur",
"traîne-misère",
"traîne-patins",
"traîne-potence",
"traine-ruisseau",
"traîne-ruisseau",
"traine-savate",
"traîne-savate",
"traine-savates",
"traîne-savates",
"traîne-semelle",
"traîne-semelles",
"trains-trams",
"train-train",
"train-trains",
"train-tram",
"trait-d'union",
"trait-d'unioné",
"trait-track",
"Tramont-Emy",
"Tramont-Émy",
"Tramont-Lassus",
"Tramont-Saint-André",
"trams-trains",
"tram-train",
"tranchées-abris",
"tranche-maçonné",
"tranche-montagne",
"tranche-montagnes",
"tranche-papier",
"tranche-tête",
"Tranqueville-Graux",
"tran-tran",
"Traubach-le-Bas",
"Traubach-le-Haut",
"Travedona-Monate",
"Trébons-de-Luchon",
"Trébons-sur-la-Grasse",
"Trédrez-Locquémeau",
"Treffort-Cuisiat",
"tré-flip",
"tré-flips",
"Treilles-en-Gâtinais",
"Treis-Karden",
"Treize-Septiers",
"Treize-Vents",
"Trélou-sur-Marne",
"Tremblay-en-France",
"Tremblay-lès-Gonesse",
"Tremblay-les-Villages",
"Tremblois-lès-Carignan",
"Tremblois-lès-Rocroi",
"Trémont-sur-Saulx",
"Trémouille-Saint-Loup",
"trench-coat",
"trench-coats",
"trente-cinq",
"trente-deux",
"trente-deuxième",
"trente-deuxièmes",
"trente-deuzain",
"trente-deuzains",
"trente-deuzet",
"trente-deuzets",
"trente-douze",
"trente-et-un",
"trente-et-une",
"trente-et-unième",
"trente-et-unièmes",
"trente-huit",
"trente-neuf",
"trente-neuvième",
"trente-quatre",
"trente-sept",
"trente-six",
"trente-trois",
"trente-troisième",
"Trentin-Haut-Adige",
"Trentola-Ducenta",
"trépan-benne",
"trépan-bennes",
"Treschenu-Creyers",
"très-chrétien",
"tré-sept",
"très-haut",
"Très-Haut",
"Trespoux-Rassiels",
"Treuzy-Levelay",
"Trèves-Cunault",
"Trèves-Sarrebourg",
"Trévou-Tréguignec",
"Triac-Lautrait",
"tribénuron-méthyle",
"tribo-électricité",
"tribo-électricités",
"tribo-électrique",
"tribo-électriques",
"trichloro-nitrométhane",
"trichloro-trinitro-benzène",
"tric-trac",
"tric-tracs",
"Trie-Château",
"Trie-la-Ville",
"Triel-sur-Seine",
"Triembach-au-Val",
"Trie-sur-Baïse",
"Triffouilly-les-Oies",
"triflusulfuron-méthyle",
"Trifouillis-les-Oies",
"Trifouilly-les-Oies",
"trinexapac-éthyl",
"Trinité-et-Tobago",
"trinitro-cellulose",
"trinitro-celluloses",
"tripe-madame",
"triple-croche",
"triples-croches",
"trique-madame",
"tris-mal",
"tris-male",
"tris-males",
"tris-maux",
"Trith-Saint-Léger",
"Tritteling-Redlach",
"Trizay-Coutretot-Saint-Serge",
"Trizay-lès-Bonneval",
"Trockenborn-Wolfersdorf",
"Trocy-en-Multien",
"trois-bassinois",
"Trois-Bassinois",
"trois-bassinoise",
"Trois-Bassinoise",
"trois-bassinoises",
"Trois-Bassinoises",
"trois-crayons",
"trois-épines",
"Trois-Fonds",
"Trois-Fontaines",
"Trois-Fontaines-l'Abbaye",
"Troisfontaines-la-Ville",
"trois-huit",
"trois-mâts",
"trois-mâts-goélettes",
"Trois-Monts",
"Trois-Palis",
"trois-pierrais",
"Trois-Pierrais",
"trois-pierraise",
"Trois-Pierraise",
"trois-pierraises",
"Trois-Pierraises",
"Trois-Pistolet",
"Trois-Pistolien",
"Trois-Pistolois",
"trois-ponts",
"Trois-Ponts",
"Trois-Puits",
"trois-quarts",
"Trois-Riverain",
"Trois-Rives",
"Trois-Rivières",
"trois-riviérien",
"Trois-Riviérien",
"trois-riviérienne",
"Trois-Riviérienne",
"trois-riviériennes",
"Trois-Riviériennes",
"trois-riviériens",
"Trois-Riviériens",
"trois-roues",
"trois-six",
"trois-trois",
"Trois-Vèvres",
"Trois-Villes",
"trompe-cheval",
"trompe-couillon",
"trompe-la-mort",
"trompe-l'oeil",
"trompe-l'œil",
"trompe-oreilles",
"trompe-valet",
"Tronville-en-Barrois",
"trop-bu",
"trop-payé",
"trop-payés",
"trop-perçu",
"trop-perçus",
"trop-plein",
"trop-pleins",
"Trosly-Breuil",
"Trosly-Loire",
"trotte-chemin",
"trotte-menu",
"Trouan-le-Grand",
"trouble-fête",
"trouble-fêtes",
"Trouley-Labarthe",
"trousse-barre",
"trousse-barres",
"trousse-pet",
"trousse-pète",
"trousse-pètes",
"trousse-pets",
"trousse-pied",
"trousse-pieds",
"trousse-queue",
"trousse-queues",
"trousse-traits",
"Trouville-la-Haule",
"Trouville-sur-Mer",
"Troye-d'Ariège",
"Trucios-Turtzioz",
"Trucy-l'Orgueilleux",
"Trucy-sur-Yonne",
"Truttemer-le-Grand",
"Truttemer-le-Petit",
"Tschiertschen-Praden",
"tsé-tsé",
"tsé-tsés",
"t-shirt",
"T-shirt",
"t-shirts",
"T-shirts",
"tsoin-tsoin",
"tsouin-tsouin",
"T-SQL",
"tss-tss",
"tta-kun",
"tta-kuns",
"ttun-ttun",
"ttun-ttuns",
"tubéro-infundibulaire",
"tubéro-infundibulaires",
"tue-brebis",
"tue-chien",
"tue-chiens",
"tue-diable",
"tue-diables",
"tue-l'amour",
"tue-loup",
"tue-loups",
"tue-mouche",
"tue-mouches",
"tue-poule",
"tue-teignes",
"Tue-Vaques",
"tue-vent",
"Tugéras-Saint-Maurice",
"Tugny-et-Pont",
"Tümlauer-Koog",
"tuniso-égypto-lybien",
"tupi-guarani",
"Tupin-et-Semons",
"turbo-alternateur",
"turbo-alternateurs",
"turbo-capitalisme",
"turbo-capitalismes",
"turbo-compresseur",
"turbo-compresseurs",
"turbo-prof",
"turbo-profs",
"turco-coréen",
"turco-mongol",
"turco-persan",
"turco-syrien",
"Turing-calculable",
"Turing-calculables",
"turn-over",
"Turnow-Preilack",
"Turquestein-Blancrupt",
"tutti-frutti",
"tu-tu-ban-ban",
"tux-zillertal",
"twin-set",
"twin-sets",
"tz'utujil",
"Ua-Huka",
"Ua-Pou",
"Übach-Palenberg",
"Ubaye-Serre-Ponçon",
"über-célèbre",
"über-célèbres",
"Ubstadt-Weiher",
"Uchacq-et-Parentis",
"u-commerce",
"Uebigau-Wahrenbrück",
"Uecker-Randow",
"Uesslingen-Buch",
"Ugao-Miraballes",
"Uggiate-Trevano",
"Ugny-le-Gay",
"Ugny-l'Equipée",
"Ugny-l'Équipée",
"Ugny-sur-Meuse",
"Uhart-Cize",
"Uharte-Arakil",
"Uhart-Mixe",
"Uhldingen-Mühlhofen",
"Ühlingen-Birkendorf",
"Uhlstädt-Kirchhasel",
"ukiyo-e",
"ukiyo-es",
"Ully-Saint-Georges",
"Uncey-le-Franc",
"unda-maris",
"une-deux",
"uni-dimensionnel",
"uni-dimensionnelle",
"uni-dimensionnelles",
"uni-dimensionnels",
"uni-modal",
"uni-sonore",
"uni-sonores",
"unité-souris",
"unités-souris",
"univers-bloc",
"univers-île",
"univers-îles",
"Unstrut-Hainich",
"upa-upa",
"Upgant-Schott",
"urane-mica",
"uranes-micas",
"urétro-cystotomie",
"urétro-cystotomies",
"uro-génital",
"uro-génitale",
"uro-génitales",
"uro-génitaux",
"Urou-et-Crennes",
"Urroz-Villa",
"Urtenen-Schönbühl",
"Urville-Bocage",
"Urville-Nacqueville",
"Usclades-et-Rieutord",
"Usclas-d'Hérault",
"Usclas-du-Bosc",
"Ussel-d'Allier",
"Usson-du-Poitou",
"Usson-en-Forez",
"Ussy-sur-Marne",
"utéro-lombaire",
"utéro-ovarien",
"utéro-ovarienne",
"utéro-ovariennes",
"utéro-ovariens",
"utéro-placentaire",
"utéro-tubaire",
"utéro-vaginal",
"utéro-vaginale",
"utéro-vaginales",
"utéro-vaginaux",
"UTF-8",
"uto-aztèque",
"uto-aztèques",
"U-turn",
"U-turns",
"uva-ursi",
"uva-ursis",
"Uvernet-Fours",
"Uzay-le-Venon",
"Vabres-l'Abbaye",
"Vabre-Tizac",
"vache-biche",
"vache-garou",
"Vachères-en-Quint",
"Vacheresses-les-Basses",
"vaches-biches",
"vaches-garous",
"Vacognes-Neuilly",
"Vacquerie-le-Boucq",
"Vacqueriette-Erquières",
"vade-in-pace",
"va-de-la-gueule",
"vade-mecum",
"va-de-pied",
"vaeakau-taumako",
"vaeakau-taumakos",
"va-et-vient",
"vagino-vésical",
"Vahl-Ebersing",
"Vahl-lès-Bénestroff",
"Vahl-lès-Faulquemont",
"Vaihingen-sur-l'Enz",
"Vailly-sur-Aisne",
"Vailly-sur-Sauldre",
"vaine-pâture",
"Vaire-Arcier",
"Vaire-le-Petit",
"Vaire-sous-Corbie",
"Vaires-sur-Marne",
"Vair-sur-Loire",
"Vaison-la-Romaine",
"Vaivre-et-Montoille",
"Val-Alainois",
"Val-au-Perche",
"Val-Bélairien",
"Val-Brillantois",
"Val-Cenis",
"Val-d'Aoste",
"Val-d'Auzon",
"Val-Davidois",
"Val-de-Bride",
"Val-de-Chalvagne",
"Val-de-Fier",
"Valdegovía-Gaubea",
"Val-de-la-Haye",
"val-de-marnais",
"Val-de-Marne",
"Val-de-Mercy",
"Val-de-Meuse",
"Valdemoro-Sierra",
"Valdeolmos-Alalpardo",
"Val-d'Epy",
"Val-d'Épy",
"Val-de-Reuil",
"Val-de-Roulans",
"Val-de-Ruz",
"val-de-saânais",
"Val-de-Saânais",
"val-de-saânaise",
"Val-de-Saânaise",
"val-de-saânaises",
"Val-de-Saânaises",
"Val-de-Saâne",
"Val-des-Marais",
"Val-d'Espoirien",
"Val-des-Prés",
"Val-de-Travers",
"Valde-Ucieza",
"Val-de-Vesle",
"Val-de-Vie",
"Val-de-Vière",
"Val-de-Virvée",
"Valdieu-Lutran",
"Val-d'Illiez",
"Val-d'Isère",
"Val-d'Izé",
"Val-d'Oise",
"Val-d'Oisien",
"Val-d'Oisienne",
"Val-d'Oisiennes",
"Val-d'Oisiens",
"Val-d'Orger",
"Vald'orien",
"Val-d'Orien",
"Val-d'Ornain",
"Val-d'Oust",
"Val-du-Layon",
"Valence-d'Albigeois",
"Valence-en-Brie",
"valence-gramme",
"valence-grammes",
"Valence-sur-Baïse",
"valet-à-patin",
"Val-et-Châtillon",
"valet-de-pied",
"valets-à-patin",
"valets-de-pied",
"Valeyres-sous-Montagny",
"Valeyres-sous-Rances",
"Valeyres-sous-Ursins",
"Valfin-lès-Saint-Claude",
"Valfin-sur-Valouse",
"Val-Fouzon",
"Val-Jolois",
"Valkenburg-Houthem",
"Vallant-Saint-Georges",
"Valle-d'Alesani",
"Valle-di-Campoloro",
"Valle-di-Mezzana",
"Valle-di-Rostino",
"Valle-d'Orezza",
"Vallerois-le-Bois",
"Vallerois-Lorioz",
"Valleroy-aux-Saules",
"Valleroy-le-Sec",
"Vallières-les-Grandes",
"Vallières-lès-Metz",
"Vall-llobrega",
"Valloire-sur-Cisse",
"Vallon-en-Sully",
"Vallon-Pont-d'Arc",
"Vallon-sur-Gée",
"Vallouise-Pelvoux",
"Val-Maravel",
"Val-Meer",
"val-mésangeois",
"Val-Mésangeois",
"val-mésangeoise",
"Val-Mésangeoise",
"val-mésangeoises",
"Val-Mésangeoises",
"Val-Mont",
"Val-Morinois",
"Val-Racinois",
"Valras-Plage",
"Val-Revermont",
"val-saint-germinois",
"Val-Saint-Germinois",
"val-saint-germinoise",
"Val-Saint-Germinoise",
"val-saint-germinoises",
"Val-Saint-Germinoises",
"val-saint-pierrais",
"Val-Saint-Pierrais",
"val-saint-pierraise",
"Val-Saint-Pierraise",
"val-saint-pierraises",
"Val-Saint-Pierraises",
"Vals-des-Tilles",
"valse-hésitation",
"Val-Sennevillois",
"valses-hésitations",
"Vals-le-Chastel",
"Vals-les-Bains",
"Val-Sonnette",
"Vals-près-le-Puy",
"Val-Suzon",
"Valverde-Enrique",
"Valzin-en-Petite-Montagne",
"Valz-sous-Châteauneuf",
"Vanault-le-Châtel",
"Vanault-les-Dames",
"Vandenesse-en-Auxois",
"Vandœuvre-lès-Nancy",
"vanity-case",
"vanity-cases",
"Vannes-le-Châtel",
"Vannes-sur-Cosson",
"Vantoux-et-Longevelle",
"Vantoux-lès-Dijon",
"va-nu-pieds",
"va-outre",
"Varces-Allières-et-Risset",
"Varengeville-sur-Mer",
"Varenne-l'Arconce",
"Varenne-Saint-Germain",
"Varennes-Changy",
"Varennes-en-Argonne",
"Varennes-Jarcy",
"Varennes-le-Grand",
"Varennes-lès-Mâcon",
"Varennes-lès-Narcy",
"Varennes-lès-Nevers",
"Varennes-Saint-Honorat",
"Varennes-Saint-Sauveur",
"Varennes-sous-Dun",
"Varennes-sur-Allier",
"Varennes-sur-Amance",
"Varennes-sur-Fouzon",
"Varennes-sur-Loire",
"Varennes-sur-Morge",
"Varennes-sur-Seine",
"Varennes-sur-Tèche",
"Varennes-sur-Usson",
"Varenne-sur-le-Doubs",
"Varennes-Vauzelles",
"Varmie-Mazurie",
"Varneville-Bretteville",
"Varois-et-Chaignot",
"Vars-sur-Roseix",
"vasculo-nerveux",
"vaso-constricteur",
"vaso-constricteurs",
"vaso-constriction",
"vaso-constrictions",
"vaso-dilatateur",
"vaso-dilatateurs",
"vaso-dilatation",
"vaso-dilatations",
"vaso-intestinal",
"vaso-intestinale",
"vaso-intestinales",
"vaso-intestinaux",
"vaso-moteur",
"vaso-motrice",
"Vassieux-en-Vercors",
"Vassimont-et-Chapelaine",
"Vassy-lès-Avallon",
"Vassy-sous-Pisy",
"vas-y",
"va-te-laver",
"va-t-en",
"va-t'en",
"va-t-en-guerre",
"vaterite-A",
"vaterite-As",
"va-tout",
"Vattetot-sous-Beaumont",
"Vattetot-sur-Mer",
"Vatteville-la-Rue",
"Vaucelles-et-Beffecourt",
"Vauchelles-lès-Authie",
"Vauchelles-lès-Domart",
"Vauchelles-les-Quesnoy",
"Vauclerc-et-la-Vallée-Foulon",
"Vauconcourt-Nervezain",
"Vaudeville-le-Haut",
"Vaudoy-en-Brie",
"Vaudreuil-Lacois",
"Vaulnaveys-le-Bas",
"Vaulnaveys-le-Haut",
"Vault-de-Lugny",
"Vaulx-en-Velin",
"Vaulx-Milieu",
"Vaulx-Vraucourt",
"Vaunaveys-la-Rochette",
"Vaux-Andigny",
"Vaux-Champagne",
"vaux-champenois",
"Vaux-Champenois",
"vaux-champenoise",
"Vaux-Champenoise",
"vaux-champenoises",
"Vaux-Champenoises",
"Vaux-Chavanne",
"vaux-chavannois",
"Vaux-Chavannois",
"Vaux-Chavannoise",
"Vaux-d'Amognes",
"Vaux-devant-Damloup",
"Vaux-en-Amiénois",
"Vaux-en-Beaujolais",
"Vaux-en-Bugey",
"Vaux-en-Couhé",
"Vaux-en-Dieulet",
"Vaux-en-Pré",
"Vaux-en-Vermandois",
"Vaux-et-Borset",
"Vaux-et-Chantegrue",
"Vaux-la-Douce",
"Vaux-la-Grande",
"Vaux-la-Petite",
"Vaux-Lavalette",
"Vaux-le-Moncelot",
"Vaux-le-Pénil",
"Vaux-lès-Mouron",
"Vaux-lès-Mouzon",
"Vaux-lès-Palameix",
"Vaux-les-Prés",
"Vaux-lès-Rubigny",
"Vaux-lès-Saint-Claude",
"Vaux-lez-Rosières",
"Vaux-Marquenneville",
"Vaux-Montreuil",
"Vaux-Rouillac",
"Vaux-Saules",
"Vaux-sous-Aubigny",
"Vaux-sous-Bourcq",
"Vaux-sous-Chèvremont",
"Vaux-sous-Coulombs",
"Vaux-sur-Aure",
"Vaux-sur-Blaise",
"Vaux-sur-Eure",
"Vaux-sur-Lunain",
"Vaux-sur-Mer",
"Vaux-sur-Morges",
"vaux-sûrois",
"Vaux-Sûrois",
"Vaux-Sûroise",
"Vaux-sur-Poligny",
"Vaux-sur-Risle",
"Vaux-sur-Saint-Urbain",
"Vaux-sur-Seine",
"Vaux-sur-Seulles",
"Vaux-sur-Somme",
"Vaux-sur-Sûre",
"Vaux-sur-Vienne",
"Vaux-Villaine",
"Vavray-le-Grand",
"Vavray-le-Petit",
"Vayres-sur-Essonne",
"Vazeilles-Limandre",
"Vazeilles-près-Saugues",
"veau-laq",
"veau-marin",
"Veauville-lès-Baons",
"Veauville-lès-Quelles",
"Védrines-Saint-Loup",
"végéto-sulfurique",
"Veigy-Foncenex",
"Velaine-en-Haye",
"Velaine-sous-Amance",
"Velars-sur-Ouche",
"velci-aller",
"Velesmes-Echevanne",
"Velesmes-Échevanne",
"Velesmes-Essarts",
"Vélez-Blanco",
"Vélez-Málaga",
"Vélez-Rubio",
"Vélizy-Villacoublay",
"Vellechevreux-et-Courbenans",
"Vellefrey-et-Vellefrange",
"Velleguindry-et-Levrecey",
"Velle-le-Châtel",
"Vellereille-les-Brayeux",
"Vellereille-le-Sec",
"Vellerot-lès-Belvoir",
"Vellerot-lès-Vercel",
"Velle-sur-Moselle",
"Vellexon-Queutey-et-Vaudey",
"Vellexon-Queutrey-et-Vaudey",
"Velloreille-lès-Choye",
"vélo-école",
"vélo-écoles",
"Velone-Orneto",
"vélo-rail",
"vélo-rails",
"vélos-taxis",
"vélo-taxi",
"Velotte-et-Tatignécourt",
"Velsen-Noord",
"Velsen-Zuid",
"Veltem-Beisem",
"Velzeke-Ruddershove",
"Venarey-les-Laumes",
"Vendays-Montalivet",
"Vendegies-au-Bois",
"Vendegies-sur-Ecaillon",
"Vendegies-sur-Écaillon",
"Vendenesse-lès-Charolles",
"Vendenesse-sur-Arroux",
"Vendeuil-Caply",
"Vendeuvre-du-Poitou",
"Vendeuvre-sur-Barse",
"Vendin-lès-Béthune",
"Vendin-le-Vieil",
"Vendredi-Saint",
"Vendresse-Beaulne",
"Vendresse-et-Troyon",
"Veneux-les-Sablons",
"venez-y-voir",
"Ventenac-Cabardès",
"Ventenac-d'Aude",
"Ventenac-en-Minervois",
"Ventes-Saint-Rémy",
"ventre-madame",
"ventre-saint-gris",
"Ven-Zelderheide",
"Verbano-Cusio-Ossola",
"Vercel-Villedieu-le-Camp",
"Verchain-Maugré",
"ver-coquin",
"Verderel-lès-Sauqueuse",
"Verdun-en-Lauragais",
"Verdun-sur-Garonne",
"Verdun-sur-le-Doubs",
"Verdun-sur-Meuse",
"Verel-de-Montbel",
"Verel-Pragondran",
"verge-d'or",
"Verger-sur-Dive",
"verges-d'or",
"Vergt-de-Biron",
"Vérizet-Fleurville",
"Ver-lès-Chartres",
"Verlhac-Tescou",
"Vern-d'Anjou",
"Verneil-le-Chétif",
"Vernet-la-Varenne",
"Vernet-les-Bains",
"Verneuil-d'Avre-et-d'Iton",
"Verneuil-en-Bourbonnais",
"Verneuil-en-Halatte",
"Verneuil-Grand",
"Verneuil-le-Château",
"Verneuil-l'Etang",
"Verneuil-l'Étang",
"Verneuil-Moustiers",
"Verneuil-Petit",
"Verneuil-sous-Coucy",
"Verneuil-sur-Avre",
"Verneuil-sur-Igneraie",
"Verneuil-sur-Indre",
"Verneuil-sur-Seine",
"Verneuil-sur-Serre",
"Verneuil-sur-Vienne",
"Vernoil-le-Fourrier",
"Vernois-le-Fol",
"Vernois-lès-Belvoir",
"Vernois-lès-Vesvres",
"Vernois-sur-Mance",
"Vernosc-lès-Annonay",
"Vernou-en-Sologne",
"Vernou-la-Celle-sur-Seine",
"Vernou-sur-Brenne",
"Vernou-sur-Seine",
"Vernoux-en-Gâtine",
"Vernoux-en-Vivarais",
"Vernoux-sur-Boutonne",
"Vern-sur-Seiche",
"Véronnes-les-Petites",
"Verpillières-sur-Ource",
"Verrens-Arvey",
"Verreries-de-Moussans",
"Verrey-sous-Drée",
"Verrey-sous-Salmaise",
"Verrières-de-Joux",
"Verrières-du-Grosbois",
"Verrières-en-Anjou",
"Verrières-en-Forez",
"Verrières-le-Buisson",
"Verrines-sous-Celles",
"Verseilles-le-Bas",
"Verseilles-le-Haut",
"Vers-en-Montagne",
"vers-librisme",
"vers-librismes",
"vers-libriste",
"vers-libristes",
"Versols-et-Lapeyre",
"Vers-Pont-du-Gard",
"Vers-sous-Sellières",
"Vers-sur-Méouge",
"Vers-sur-Selles",
"Ver-sur-Launette",
"Ver-sur-Mer",
"vert-bois",
"vert-de-gris",
"vert-de-grisa",
"vert-de-grisai",
"vert-de-grisaient",
"vert-de-grisais",
"vert-de-grisait",
"vert-de-grisâmes",
"vert-de-grisant",
"vert-de-grisas",
"vert-de-grisasse",
"vert-de-grisassent",
"vert-de-grisasses",
"vert-de-grisassiez",
"vert-de-grisassions",
"vert-de-grisât",
"vert-de-grisâtes",
"vert-de-grise",
"vert-de-grisé",
"vert-de-grisée",
"vert-de-grisées",
"vert-de-grisent",
"vert-de-griser",
"vert-de-grisera",
"vert-de-griserai",
"vert-de-griseraient",
"vert-de-griserais",
"vert-de-griserait",
"vert-de-griseras",
"vert-de-grisèrent",
"vert-de-griserez",
"vert-de-griseriez",
"vert-de-griserions",
"vert-de-griserons",
"vert-de-griseront",
"vert-de-grises",
"vert-de-grisés",
"vert-de-grisez",
"vert-de-grisiez",
"vert-de-grisions",
"vert-de-grisons",
"Vert-en-Drouais",
"Verteuil-d'Agenais",
"Verteuil-sur-Charente",
"vert-jaune",
"Vert-le-Grand",
"Vert-le-Petit",
"vert-monnier",
"vert-monniers",
"Vert-Saint-Denis",
"Vert-Toulon",
"Vesaignes-sous-Lafauche",
"Vesaignes-sur-Marne",
"Vésenex-Crassy",
"Vésigneul-sur-Coole",
"Vésigneul-sur-Marne",
"Vesles-et-Caumont",
"vesse-de-loup",
"vesses-de-loup",
"veston-cravate",
"vestons-cravates",
"Vestric-et-Candiac",
"Vesvres-sous-Chalancey",
"vétéro-testamentaire",
"vétéro-testamentaires",
"Vétraz-Monthoux",
"vetula-domussien",
"Vetula-Domussien",
"vetula-domussienne",
"Vetula-Domussienne",
"vetula-domussiennes",
"Vetula-Domussiennes",
"vetula-domussiens",
"Vetula-Domussiens",
"Veuilly-la-Poterie",
"Veules-les-Roses",
"Veulettes-sur-Mer",
"Veurey-Voroize",
"Veuvey-sur-Ouche",
"Veuxhaulles-sur-Aube",
"Veuzain-sur-Loire",
"Vexin-sur-Epte",
"Veyre-Monton",
"Veyrier-du-Lac",
"Veyrines-de-Domme",
"Veyrines-de-Vergt",
"Veyrins-Thuellin",
"Vezels-Roussy",
"Vézeronce-Curtin",
"Vezin-le-Coquet",
"Vézins-de-Lévézou",
"Viala-du-Pas-de-Jaux",
"Viala-du-Tarn",
"Viâpres-le-Grand",
"Viâpres-le-Petit",
"Vic-de-Chassenay",
"Vic-des-Prés",
"vice-amiral",
"vice-amirale",
"vice-amirales",
"vice-amirauté",
"vice-amiraux",
"vice-bailli",
"vice-baillis",
"vice-camérier",
"vice-cardinal",
"vice-champion",
"vice-championne",
"vice-championnes",
"vice-champions",
"vice-chancelier",
"vice-chanceliers",
"vice-consul",
"vice-consulat",
"vice-consulats",
"vice-consule",
"vice-directeur",
"vice-gérance",
"vice-gérances",
"vice-gérant",
"vice-gérants",
"vice-gérent",
"vice-gérents",
"vice-gouverneur",
"vice-légat",
"vice-légation",
"vice-légations",
"vice-légats",
"Vic-en-Bigorre",
"Vic-en-Carladais",
"vice-official",
"vice-préfet",
"vice-présida",
"vice-présidai",
"vice-présidaient",
"vice-présidais",
"vice-présidait",
"vice-présidâmes",
"vice-présidant",
"vice-présidas",
"vice-présidasse",
"vice-présidassent",
"vice-présidasses",
"vice-présidassiez",
"vice-présidassions",
"vice-présidât",
"vice-présidâtes",
"vice-préside",
"vice-présidé",
"vice-présidée",
"vice-présidées",
"vice-présidence",
"vice-présidences",
"vice-président",
"vice-présidente",
"vice-présidentes",
"vice-présidents",
"vice-présider",
"vice-présidera",
"vice-présiderai",
"vice-présideraient",
"vice-présiderais",
"vice-présiderait",
"vice-présideras",
"vice-présidèrent",
"vice-présiderez",
"vice-présideriez",
"vice-présiderions",
"vice-présiderons",
"vice-présideront",
"vice-présides",
"vice-présidés",
"vice-présidez",
"vice-présidiez",
"vice-présidions",
"vice-présidons",
"vice-procureur",
"vice-procureurs",
"vice-recteur",
"vice-recteurs",
"vice-rectrice",
"vice-rectrices",
"vice-reine",
"vice-reines",
"vice-roi",
"vice-rois",
"vice-royal",
"vice-royale",
"vice-royales",
"vice-royauté",
"vice-royautés",
"vice-royaux",
"vice-secrétaire",
"vice-sénéchal",
"vices-gouverneurs",
"vice-versa",
"Vic-Fezensac",
"Vichel-Nanteuil",
"Vic-la-Gardiole",
"Vic-le-Comte",
"Vic-le-Fesq",
"Vicq-d'Auribat",
"Vicq-Exemplet",
"Vicq-sur-Breuilh",
"Vicq-sur-Gartempe",
"Vicq-sur-Mer",
"Vicq-sur-Nahon",
"Vic-sous-Thil",
"Vic-sur-Aisne",
"Vic-sur-Cère",
"Vic-sur-Seille",
"victim-blaming",
"Victot-Pontfol",
"vide-atelier",
"vide-ateliers",
"vide-bouteille",
"vide-bouteilles",
"vide-cave",
"vide-caves",
"vide-citrons",
"vide-couilles",
"vide-dressing",
"vide-dressings",
"vide-gousset",
"vide-goussets",
"vide-grange",
"vide-grenier",
"vide-greniers",
"vide-maison",
"vide-maisons",
"vide-ordure",
"vide-ordures",
"vide-poche",
"vide-poches",
"vide-pomme",
"vide-pommes",
"vide-pommier",
"vide-vite",
"vieil-baugeois",
"Vieil-Baugeois",
"vieil-baugeoise",
"Vieil-Baugeoise",
"vieil-baugeoises",
"Vieil-Baugeoises",
"Vieil-Hesdin",
"vieil-hesdinois",
"Vieil-Hesdinois",
"vieil-hesdinoise",
"Vieil-Hesdinoise",
"vieil-hesdinoises",
"Vieil-Hesdinoises",
"Vieil-Moutier",
"Viel-Arcy",
"Vielle-Adour",
"Vielle-Aure",
"Vielle-Louron",
"Viellenave-d'Arthez",
"Viellenave-de-Bidache",
"Viellenave-de-Navarrenx",
"Viellenave-sur-Bidouze",
"Vielle-Saint-Girons",
"Vielle-Soubiran",
"vielle-soubiranais",
"Vielle-Soubiranais",
"vielle-soubiranaise",
"Vielle-Soubiranaise",
"vielle-soubiranaises",
"Vielle-Soubiranaises",
"Vielle-Tursan",
"viel-mauricien",
"Viel-Mauricien",
"viel-mauricienne",
"Viel-Mauricienne",
"viel-mauriciennes",
"Viel-Mauriciennes",
"viel-mauriciens",
"Viel-Mauriciens",
"Vielmur-sur-Agout",
"Viel-Saint-Remy",
"Viels-Maisons",
"Vienne-en-Arthies",
"Vienne-en-Bessin",
"Vienne-en-Val",
"Vienne-la-Ville",
"Vienne-le-Château",
"viens-poupoulerie",
"viens-poupouleries",
"Vier-Bordes",
"Viereth-Trunstadt",
"Vierset-Barse",
"Vierves-sur-Viroin",
"Vierville-sur-Mer",
"Viet-Nam",
"Viêt-nam",
"Vieu-d'Izenave",
"Viéville-en-Haye",
"Viéville-sous-les-Côtes",
"Vievy-le-Rayé",
"vif-argent",
"vif-gage",
"vigne-blanche",
"vignes-blanches",
"Vignes-la-Côte",
"Vigneulles-lès-Hattonchâtel",
"Vigneul-sous-Montmédy",
"Vigneux-de-Bretagne",
"Vigneux-Hocquet",
"Vigneux-sur-Seine",
"Vignola-Falesina",
"Vignoux-sous-les-Aix",
"Vignoux-sur-Barangeon",
"Vigny-lès-Paray",
"Vigoulet-Auzil",
"Vila-real",
"Vila-rodona",
"Vila-sacra",
"Vila-sana",
"Vila-seca",
"Vilcey-sur-Trey",
"Vildé-Guingalan",
"Villabona-Amasa",
"Village-Neuf",
"village-rue",
"villages-rue",
"villages-rues",
"villages-tas",
"village-tas",
"Villaines-en-Duesmois",
"Villaines-la-Carelle",
"Villaines-la-Gonais",
"Villaines-la-Juhel",
"Villaines-les-Prévôtes",
"Villaines-les-Rochers",
"Villaines-sous-Bois",
"Villaines-sous-Lucé",
"Villaines-sous-Malicorne",
"Villar-d'Arêne",
"Villard-Bonnot",
"villard-de-lans",
"Villard-de-Lans",
"villard-d'hérien",
"Villard-d'Hérien",
"villard-d'hérienne",
"Villard-d'Hérienne",
"villard-d'hériennes",
"Villard-d'Hériennes",
"villard-d'hériens",
"Villard-d'Hériens",
"Villard-d'Héry",
"Villard-Léger",
"Villard-Notre-Dame",
"Villard-Reculas",
"Villard-Reymond",
"Villard-Saint-Christophe",
"Villard-Saint-Sauveur",
"Villard-Sallet",
"Villards-d'Héria",
"Villard-sur-Bienne",
"Villard-sur-Doron",
"Villard-sur-l'Ain",
"Villarejo-Periesteban",
"Villar-en-Val",
"Villar-Loubière",
"Villarodin-Bourget",
"Villar-Saint-Anselme",
"Villar-Saint-Pancrace",
"Villars-Brandis",
"Villars-Colmars",
"Villarsel-sur-Marly",
"Villars-en-Azois",
"Villars-en-Pons",
"Villars-Épeney",
"Villars-et-Villenotte",
"Villars-Fontaine",
"Villars-le-Comte",
"Villars-le-Pautel",
"Villars-lès-Blamont",
"Villars-les-Bois",
"Villars-les-Dombes",
"Villars-le-Sec",
"Villars-les-Moines",
"Villars-le-Terroir",
"Villars-Sainte-Croix",
"Villars-Saint-Georges",
"Villars-Saint-Marcellin",
"Villars-Santenoge",
"Villars-sous-Dampjoux",
"Villars-sous-Ecot",
"Villars-sous-Écot",
"Villars-sous-Yens",
"Villars-sur-Glâne",
"Villars-sur-Var",
"Villarta-Quintana",
"Villarzel-Cabardès",
"Villarzel-du-Razès",
"Villaverde-Mogina",
"Villaz-Saint-Pierre",
"Villebois-Lavalette",
"Villebois-les-Pins",
"Villebon-sur-Yvette",
"Villecey-sur-Mad",
"Villecomtal-sur-Arros",
"Villedieu-la-Blouère",
"Villedieu-le-Camp",
"Villedieu-le-Château",
"Villedieu-lès-Bailleul",
"Villedieu-les-Poêles",
"Villedieu-les-Poêles-Rouffigny",
"Villedieu-sur-Indre",
"Villefranche-d'Albigeois",
"Villefranche-d'Allier",
"Villefranche-de-Conflent",
"Villefranche-de-Lauragais",
"Villefranche-de-Lonchat",
"Villefranche-de-Longchapt",
"Villefranche-de-Panat",
"Villefranche-de-Rouergue",
"Villefranche-du-Périgord",
"Villefranche-du-Queyran",
"Villefranche-le-Château",
"Villefranche-sur-Cher",
"Villefranche-sur-Mer",
"Villefranche-sur-Saône",
"Villegusien-le-Lac",
"Villeloin-Coulangé",
"Villelongue-d'Aude",
"Villelongue-de-la-Salanque",
"Villelongue-dels-Monts",
"Villemagne-l'Argentière",
"Villemaur-sur-Vanne",
"Villemeux-sur-Eure",
"Villemoiron-en-Othe",
"Villemoisson-sur-Orge",
"Villemur-sur-Tarn",
"Villenauxe-la-Grande",
"Villenauxe-la-Petite",
"Villenave-de-Rions",
"Villenave-d'Ornon",
"Villenave-près-Béarn",
"Villenave-près-Marsac",
"Villennes-sur-Seine",
"Villequier-Aumont",
"Villerouge-Termenès",
"Villeroy-sur-Méholle",
"villes-champignons",
"villes-clés",
"Villesèque-des-Corbières",
"villes-États",
"villes-provinces",
"Villes-sur-Auzon",
"Villey-le-Sec",
"Villey-Saint-Etienne",
"Villey-Saint-Étienne",
"Villey-sur-Tille",
"Villez-sous-Bailleul",
"Villez-sur-le-Neubourg",
"Villié-Morgon",
"Villieu-Loyes-Mollon",
"Villingen-Schwenningen",
"Villons-les-Buissons",
"Villotte-devant-Louppy",
"Villotte-Saint-Seine",
"Villotte-sur-Aire",
"Villotte-sur-Ource",
"Vilosnes-Haraumont",
"Vilters-Wangs",
"Vincent-Froideville",
"Vincy-Manœuvre",
"Vincy-Reuil-et-Magny",
"Vindrac-Alayrac",
"Vineuil-Saint-Firmin",
"vingt-cinq",
"Vingt-Cinq",
"vingt-cinquième",
"vingt-cinquièmes",
"vingt-deux",
"vingt-deuxain",
"vingt-deuxains",
"vingt-deuxième",
"vingt-deuxièmes",
"vingt-et-un",
"vingt-et-une",
"vingt-et-unième",
"vingt-et-unièmes",
"Vingt-Hanaps",
"vingt-hanapsien",
"Vingt-Hanapsien",
"vingt-hanapsienne",
"Vingt-Hanapsienne",
"vingt-hanapsiennes",
"Vingt-Hanapsiennes",
"vingt-hanapsiens",
"Vingt-Hanapsiens",
"vingt-huit",
"Vingt-Huit",
"vingt-huitième",
"vingt-huitièmes",
"vingt-neuf",
"vingt-neuvième",
"vingt-neuvièmes",
"vingt-quatrain",
"vingt-quatrains",
"vingt-quatre",
"vingt-quatrième",
"vingt-quatrièmes",
"vingt-sept",
"Vingt-Sept",
"vingt-septième",
"vingt-septièmes",
"vingt-six",
"vingt-sixain",
"vingt-sixième",
"vingt-sixièmes",
"vingt-trois",
"vingt-troisième",
"vingt-troisièmes",
"vino-benzoïque",
"vino-benzoïques",
"Vinon-sur-Verdon",
"Vins-sur-Caramy",
"Viodos-Abense-de-Bas",
"violet-évêque",
"Viols-en-Laval",
"Viols-le-Fort",
"viornes-tin",
"viorne-tin",
"vire-capot",
"vire-capots",
"Viré-en-Champagne",
"Vire-sur-Lot",
"Vireux-Molhain",
"Vireux-Wallerand",
"vire-vire",
"Virey-le-Grand",
"Virey-sous-Bar",
"Virginal-Samme",
"Virginie-Occidentale",
"Virieu-le-Grand",
"Virieu-le-Petit",
"Viry-Châtillon",
"Viry-Noureuil",
"visa-bourgien",
"Visa-Bourgien",
"visa-bourgienne",
"Visa-Bourgienne",
"visa-bourgiennes",
"Visa-Bourgiennes",
"visa-bourgiens",
"Visa-Bourgiens",
"vis-à-vis",
"Vis-en-Artois",
"Vissac-Auteyrac",
"visuo-spacial",
"visuo-spaciale",
"visuo-spaciales",
"visuo-spaciaux",
"vit-de-mulet",
"Vitoria-Gasteiz",
"Vitrac-en-Viadène",
"Vitrac-Saint-Vincent",
"Vitrac-sur-Montane",
"Vitrai-sous-Laigle",
"Vitray-en-Beauce",
"Vitray-sous-Brézolles",
"Vitrey-sur-Mance",
"Vitrolles-en-Luberon",
"Vitrolles-en-Lubéron",
"Vitry-aux-Loges",
"Vitry-en-Artois",
"Vitry-en-Charollais",
"Vitry-en-Montagne",
"Vitry-en-Perthois",
"Vitry-Laché",
"Vitry-la-Ville",
"Vitry-le-Croisé",
"Vitry-le-François",
"Vitry-lès-Cluny",
"Vitry-lès-Nogent",
"Vitry-sur-Loire",
"Vitry-sur-Orne",
"Vitry-sur-Seine",
"Vittel-menthe",
"Vitz-sur-Authie",
"Viuz-en-Sallaz",
"Viuz-la-Chiésaz",
"vivaro-alpin",
"vivaro-alpins",
"vive-eau",
"vive-la-joie",
"Vive-Saint-Bavon",
"Vive-Saint-Éloi",
"vives-eaux",
"Vivier-au-Court",
"Vivier-Danger",
"Viviers-du-Lac",
"Viviers-le-Gras",
"Viviers-lès-Lavaur",
"Viviers-lès-Montagnes",
"Viviers-lès-Offroicourt",
"Viviers-sur-Artaut",
"Viviers-sur-Chiers",
"vivre-ensemble",
"v'là",
"Vlaardinger-Ambacht",
"Vlagtwedder-Barlage",
"Vlagtwedder-Veldhuis",
"Vlodrop-Station",
"v'nir",
"v'nu",
"Vœlfling-lès-Bouzonville",
"Vœuil-et-Giget",
"Vogelsang-Warsin",
"Void-Vacon",
"voile-manteau",
"voile-manteaux",
"Voisins-le-Bretonneux",
"vois-tu",
"voiture-bar",
"voiture-bélier",
"voiture-cage",
"voiture-couchettes",
"voiture-lits",
"voiture-pilote",
"voiture-restaurant",
"voiture-salon",
"voitures-balais",
"voitures-bars",
"voitures-béliers",
"voitures-cages",
"voitures-couchettes",
"voitures-lits",
"voitures-pilotes",
"voitures-restaurants",
"voitures-salons",
"voitures-ventouses",
"voiture-ventouse",
"Voivres-lès-le-Mans",
"vol-au-vent",
"vol-bélier",
"vol-béliers",
"volley-ball",
"volley-balls",
"Vollore-Montagne",
"Vollore-Ville",
"Volmerange-lès-Boulay",
"Volmerange-les-Mines",
"volt-ampère",
"volt-ampères",
"volte-face",
"volte-faces",
"Vomécourt-sur-Madon",
"vomito-negro",
"vomito-négro",
"Voor-Drempt",
"Voray-sur-l'Ognon",
"Vorges-les-Pins",
"Voroux-Goreux",
"Voroux-lez-Liers",
"Vortum-Mullem",
"Vosne-Romanée",
"Vouillé-les-Marais",
"Voulaines-les-Templiers",
"Vouneuil-sous-Biard",
"Vouneuil-sur-Vienne",
"vous-même",
"vous-mêmes",
"Voutenay-sur-Cure",
"Vouthon-Bas",
"Vouthon-Haut",
"Vouvray-sur-Huisne",
"Vouvray-sur-Loir",
"Vovray-en-Bornes",
"voyageur-kilomètre",
"voyageurs-kilomètres",
"voyez-vous",
"Vraignes-en-Vermandois",
"Vraignes-lès-Hornoy",
"Vresse-sur-Semois",
"Vrigne-aux-Bois",
"Vrigne-Meuse",
"vrigne-meusien",
"Vrigne-Meusien",
"vrigne-meusienne",
"Vrigne-Meusienne",
"vrigne-meusiennes",
"Vrigne-Meusiennes",
"vrigne-meusiens",
"Vrigne-Meusiens",
"Vrijhoeve-Capelle",
"Vroncourt-la-Côte",
"v's",
"vu-arriver",
"Vufflens-la-Ville",
"Vufflens-le-Château",
"Vuisternens-devant-Romont",
"Vuisternens-en-Ogoz",
"Vulaines-lès-Provins",
"Vulaines-sur-Seine",
"Vyans-le-Val",
"Vyle-et-Tharoul",
"Vy-le-Ferroux",
"Vy-lès-Filain",
"Vy-lès-Lure",
"vy-les-luron",
"Vy-les-Luron",
"vy-les-lurone",
"Vy-les-Lurone",
"vy-les-lurones",
"Vy-les-Lurones",
"vy-les-lurons",
"Vy-les-Lurons",
"Vy-lès-Rupt",
"Vyt-lès-Belvoir",
"Wadonville-en-Woëvre",
"Wageningen-Hoog",
"wagon-bar",
"wagon-citerne",
"wagon-couchette",
"wagon-couchettes",
"wagon-foudre",
"wagon-grue",
"wagon-lit",
"wagon-lits",
"wagon-poche",
"wagon-poste",
"wagon-réservoir",
"wagon-restaurant",
"wagon-salon",
"wagons-bars",
"wagons-citernes",
"wagons-couchettes",
"wagons-foudres",
"wagons-grues",
"wagons-lits",
"wagons-réservoirs",
"wagons-restaurants",
"wagons-salons",
"wagons-tombereaux",
"wagons-trémie",
"wagon-tombereau",
"wagon-trémie",
"wagon-vanne",
"wah-wah",
"Wailly-Beaucamp",
"Waldeck-Frankenberg",
"Waldfischbach-Burgalben",
"Waldhof-Falkenstein",
"Wald-Michelbach",
"Waldshut-Tiengen",
"Walhain-Saint-Paul",
"Walincourt-Selvigny",
"walkies-talkies",
"walkie-talkie",
"Wallendorf-Pont",
"Wallers-en-Fagne",
"Wallers-Trélon",
"Wallis-et-Futuna",
"Wallon-Cappel",
"wallon-cappelois",
"Wallon-Cappelois",
"wallon-cappeloise",
"Wallon-Cappeloise",
"wallon-cappeloises",
"Wallon-Cappeloises",
"Waltenheim-sur-Zorn",
"Walton-on-Thames",
"Wanchy-Capval",
"Wandignies-Hamage",
"Wanfercée-Baulet",
"Wangenbourg-Engenthal",
"Wangen-Brüttisellen",
"Wannegem-Lede",
"Wanzleben-Börde",
"waray-waray",
"Waret-la-Chaussée",
"Waret-l'Évêque",
"Warfusée-Abancourt",
"Wargemoulin-Hurlus",
"Wargnies-le-Grand",
"Wargnies-le-Petit",
"Warlencourt-Eaucourt",
"Warlincourt-lès-Pas",
"Warloy-Baillon",
"Warnant-Dreye",
"Warneton-Sud",
"Wartenberg-Rohrbach",
"Warth-Weiningen",
"Wasmes-Audemez-Briffœil",
"Wasnes-au-Bac",
"Wassy-sur-Blaise",
"water-ballast",
"water-ballasts",
"water-closet",
"water-closets",
"Waterland-Oudeman",
"Watermael-Boitsfort",
"water-polo",
"water-proof",
"water-proofs",
"Wath-on-Dearne",
"Wath-upon-Dearne",
"Wattignies-la-Victoire",
"Wauthier-Braine",
"wauthier-brainois",
"Wauthier-Brainois",
"Wauthier-Brainoise",
"waux-hall",
"waux-halls",
"Wavrans-sur-l'Aa",
"Wavrans-sur-Ternoise",
"Wavrechain-sous-Denain",
"Wavrechain-sous-Faulx",
"Wavre-Notre-Dame",
"Wavre-Saint-Catherine",
"Wavre-Sainte-Catherine",
"waza-ari",
"w.-c.",
"web-to-print",
"week-end",
"week-ends",
"Weiler-la-Tour",
"Weiler-Simmerberg",
"Weilheim-Schongau",
"Weimar-Campagne",
"Weißenborn-Lüderode",
"Weißenburg-Gunzenhausen",
"Welles-Pérennes",
"Wemaers-Cappel",
"wemaers-cappelois",
"Wemaers-Cappelois",
"wemaers-cappeloise",
"Wemaers-Cappeloise",
"wemaers-cappeloises",
"Wemaers-Cappeloises",
"Wenningstedt-Braderup",
"Wenum-Wiesel",
"Wernberg-Köblitz",
"Werra-Meissner",
"Wervicq-Nord",
"Wervicq-Sud",
"Wesembeek-Ophem",
"wesh-wesh",
"West-Barendrecht",
"West-Cappel",
"west-cappelois",
"West-Cappelois",
"west-cappeloise",
"West-Cappeloise",
"west-cappeloises",
"West-Cappeloises",
"Westerhaar-Vriezenveensewijk",
"Wester-Koggenland",
"Wester-Ohrstedt",
"West-Graftdijk",
"Westhouse-Marmoutier",
"Westkapelle-Binnen",
"West-Knollendam",
"Westrem-Saint-Denis",
"West-Souburg",
"West-Terschelling",
"Wettin-Löbejün",
"Wezembeek-Oppem",
"Wez-Velvain",
"white-spirit",
"Wickersheim-Wilshausen",
"Wiège-Faty",
"Wiencourt-l'Equipée",
"Wiencourt-l'Équipée",
"Wierre-au-Bois",
"Wierre-Effroy",
"Wi-Fi",
"Wihr-au-Val",
"Wihr-en-Plaine",
"Wilkau-Haßlau",
"Willer-sur-Thur",
"willy-willy",
"Wilp-Achterhoek",
"Wilzenberg-Hußweiler",
"Wingen-sur-Moder",
"Winghe-Saint-Georges",
"Winkel-Sainte-Croix",
"Winkel-Saint-Éloi",
"Wintzenheim-Kochersberg",
"Wiry-au-Mont",
"Witry-lès-Reims",
"witsuwit'en",
"Wœlfling-lès-Sarreguemines",
"Wokuhl-Dabelow",
"Wolframs-Eschenbach",
"Wolfsburg-Unkeroda",
"Woluwe-Saint-Étienne",
"Woluwe-Saint-Lambert",
"Woluwe-Saint-Pierre",
"Wormeldange-Haut",
"Wortegem-Petegem",
"wuchiaping'ien",
"Wuchiaping'ien",
"Wünnewil-Flamatt",
"Wust-Fischbeck",
"Wutha-Farnroda",
"Wy-dit-Joli-Village",
"Xanton-Chassenon",
"X-arbre",
"X-arbres",
"X-board",
"X-boards",
"Xivray-et-Marvoisin",
"Xivry-Circourt",
"Xonrupt-Longemer",
"X-SAMPA",
"y'a",
"yacht-club",
"yacht-clubs",
"Yaucourt-Bussus",
"Yécora-Iekora",
"Yernée-Fraineux",
"Yèvre-la-Ville",
"Yèvre-le-Châtel",
"Yèvres-le-Petit",
"yé-yé",
"Ygos-Saint-Saturnin",
"yin-yang",
"ylang-ylang",
"yocto-ohm",
"yocto-ohms",
"Yo-kai",
"Yorkshire-et-Humber",
"yotta-ampère",
"yotta-ampères",
"young-ice",
"young-ices",
"you-you",
"you-yous",
"yo-yo",
"yo-yota",
"yo-yotai",
"yo-yotaient",
"yo-yotais",
"yo-yotait",
"yo-yotâmes",
"yo-yotant",
"yo-yotas",
"yo-yotasse",
"yo-yotassent",
"yo-yotasses",
"yo-yotassiez",
"yo-yotassions",
"yo-yotât",
"yo-yotâtes",
"yo-yote",
"yo-yoté",
"yo-yotée",
"yo-yotées",
"yo-yotent",
"yo-yoter",
"yo-yotera",
"yo-yoterai",
"yo-yoteraient",
"yo-yoterais",
"yo-yoterait",
"yo-yoteras",
"yo-yotèrent",
"yo-yoterez",
"yo-yoteriez",
"yo-yoterions",
"yo-yoterons",
"yo-yoteront",
"yo-yotes",
"yo-yotés",
"yo-yotez",
"yo-yotiez",
"yo-yotions",
"yo-yotons",
"Ypreville-Biville",
"Yronde-et-Buron",
"Yssac-la-Tourette",
"yuki-onna",
"yuki-onnas",
"Yverdon-les-Bains",
"Yves-Gomezée",
"Yvetot-Bocage",
"Yvignac-la-Tour",
"Yville-sur-Seine",
"Yvoy-le-Marron",
"Yvrac-et-Malleyrand",
"Yvré-le-Pôlin",
"Yvré-l'Evêque",
"Yvré-l'Évêque",
"Yzeures-sur-Creuse",
"Z9-12:Ac",
"Z9-dodécénylacétate",
"Zahna-Elster",
"zapil's",
"zapil'ser",
"zayse-zergulla",
"Z/E-8-DDA",
"zébré-de-vert",
"Zella-Mehlis",
"Zeltingen-Rachtig",
"z'en",
"Zend-avesta",
"zénith-secteur",
"zénith-secteurs",
"zepto-ohm",
"zepto-ohms",
"Zernitz-Lohm",
"zéro-dimensionnel",
"Zétrud-Lumay",
"zetta-ampère",
"zetta-ampères",
"Zeulenroda-Triebes",
"Zevenhuizen-Moerkapelle",
"Z-grille",
"Z-grilles",
"Zichen-Zussen-Bolder",
"Ziegra-Knobelsdorf",
"Zihlschlacht-Sitterdorf",
"Zillis-Reischen",
"zinc-blende",
"zinc-blendes",
"Ziortza-Bolibar",
"zizi-panpan",
"Zoerle-Parwijs",
"Zoeterwoude-Dorp",
"Zoeterwoude-Rijndijk",
"zones-clés",
"zoo-cinéma",
"zoo-cinémas",
"zoo-gymnaste",
"zoo-gymnastes",
"Zschaitz-Ottewig",
"Zuid-Beijerland",
"Zuid-Eierland",
"Zuid-Polsbroek",
"Zuid-Scharwoude",
"Zuid-Spierdijk",
"Zuid-Waddinxveen",
"zulgo-gemzek",
"zuricho-montpelliérain",
"zuricho-montpelliéraine",
"zuricho-montpelliéraines",
"zuricho-montpelliérains",
"zut-au-berger",
"Zwaagdijk-Oost",
"Zwaagdijk-West",
"z'y",
"z'yeuta",
"z'yeutai",
"z'yeutaient",
"z'yeutais",
"z'yeutait",
"z'yeutâmes",
"z'yeutant",
"z'yeutas",
"z'yeutasse",
"z'yeutassent",
"z'yeutasses",
"z'yeutassiez",
"z'yeutassions",
"z'yeutât",
"z'yeutâtes",
"z'yeute",
"z'yeuté",
"z'yeutée",
"z'yeutées",
"z'yeutent",
"z'yeuter",
"z'yeutera",
"z'yeuterai",
"z'yeuteraient",
"z'yeuterais",
"z'yeuterait",
"z'yeuteras",
"z'yeutèrent",
"z'yeuterez",
"z'yeuteriez",
"z'yeuterions",
"z'yeuterons",
"z'yeuteront",
"z'yeutes",
"z'yeutés",
"z'yeutez",
"z'yeutiez",
"z'yeutions",
"z'yeutons",
"z'yeux",
"zygomato-auriculaire",
"zygomato-labial",
"zygomato-maxillaire",
"zy-va",
"zy-vas",
"α-Dahllite",
"α-Dahllites",
"α-D-glucofuranose",
"α-D-glucopyranose",
"α-D-ribofuranose",
"α-D-ribopyranose",
"α-L-ribofuranose",
"α-L-ribopyranose",
"β-Dahllite",
"β-Dahllites",
"β-D-glucofuranose",
"β-D-glucopyranose",
"β-D-ribofuranose",
"β-D-ribopyranose",
"β-galactosidase",
"β-lactamine",
"β-L-ribofuranose",
"β-L-ribopyranose",
"β-sitostérol",
"β-sitostérols",
"γ-Dahllite",
"γ-Dahllites",
"μ-métal",
"σ-additivité",
"σ-additivités",
"σ-compacité",
"σ-compact",
"σ-compacts"]
| mit |
KittyTristy/roguekit | libs/libtcodpy.py | 1 | 60843 | #
# libtcod 1.5.1 python wrapper
# Copyright (c) 2008,2009,2010 Jice & Mingos
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * The name of Jice or Mingos may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY JICE AND MINGOS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL JICE OR MINGOS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import sys
import ctypes
import struct
from ctypes import *
if not hasattr(ctypes, "c_bool"): # for Python < 2.6
c_bool = c_uint8
try: #import NumPy if available
import numpy
numpy_available = True
except ImportError:
numpy_available = False
LINUX=False
MAC=False
MINGW=False
MSVC=False
if sys.platform.find('linux') != -1:
if struct.calcsize("P") * 8 == 64:
_lib = ctypes.cdll['./libs/libtcod.so.64']
else:
_lib = ctypes.cdll['./libs/libtcod.so']
LINUX=True
elif sys.platform.find('darwin') != -1:
_lib = ctypes.cdll['./libs/libtcod.dylib']
MAC = True
elif sys.platform.find('haiku') != -1:
_lib = ctypes.cdll['./libs/libtcod.so']
HAIKU = True
else:
try:
_lib = ctypes.cdll['./libs/libtcod-mingw.dll']
MINGW=True
except WindowsError:
_lib = ctypes.cdll['./libs/libtcod-VS.dll']
MSVC=True
# On Windows, ctypes doesn't work well with function returning structs,
# so we have to user the _wrapper functions instead
_lib.TCOD_color_multiply = _lib.TCOD_color_multiply_wrapper
_lib.TCOD_color_add = _lib.TCOD_color_add_wrapper
_lib.TCOD_color_multiply_scalar = _lib.TCOD_color_multiply_scalar_wrapper
_lib.TCOD_color_subtract = _lib.TCOD_color_subtract_wrapper
_lib.TCOD_color_lerp = _lib.TCOD_color_lerp_wrapper
_lib.TCOD_console_get_default_background = _lib.TCOD_console_get_default_background_wrapper
_lib.TCOD_console_get_default_foreground = _lib.TCOD_console_get_default_foreground_wrapper
_lib.TCOD_console_get_char_background = _lib.TCOD_console_get_char_background_wrapper
_lib.TCOD_console_get_char_foreground = _lib.TCOD_console_get_char_foreground_wrapper
_lib.TCOD_console_get_fading_color = _lib.TCOD_console_get_fading_color_wrapper
_lib.TCOD_image_get_pixel = _lib.TCOD_image_get_pixel_wrapper
_lib.TCOD_image_get_mipmap_pixel = _lib.TCOD_image_get_mipmap_pixel_wrapper
_lib.TCOD_parser_get_color_property = _lib.TCOD_parser_get_color_property_wrapper
HEXVERSION = 0x010501
STRVERSION = "1.5.1"
TECHVERSION = 0x01050103
############################
# color module
############################
class Color(Structure):
_fields_ = [('r', c_uint8),
('g', c_uint8),
('b', c_uint8),
]
def __eq__(self, c):
return _lib.TCOD_color_equals(self, c)
def __mul__(self, c):
if isinstance(c,Color):
return _lib.TCOD_color_multiply(self, c)
else:
return _lib.TCOD_color_multiply_scalar(self, c_float(c))
def __add__(self, c):
return _lib.TCOD_color_add(self, c)
def __sub__(self, c):
return _lib.TCOD_color_subtract(self, c)
def __repr__(self):
return "Color(%d,%d,%d)" % (self.r, self.g, self.b)
def __getitem__(self, i):
if type(i) == str:
return getattr(self, i)
else:
return getattr(self, "rgb"[i])
def __setitem__(self, i, c):
if type(i) == str:
setattr(self, i, c)
else:
setattr(self, "rgb"[i], c)
def __iter__(self):
yield self.r
yield self.g
yield self.b
# Should be valid on any platform, check it! Has to be done after Color is defined.
if MAC:
from cprotos import setup_protos
setup_protos(_lib)
_lib.TCOD_color_equals.restype = c_bool
_lib.TCOD_color_multiply.restype = Color
_lib.TCOD_color_multiply_scalar.restype = Color
_lib.TCOD_color_add.restype = Color
_lib.TCOD_color_subtract.restype = Color
# default colors
# grey levels
black=Color(0,0,0)
darkest_grey=Color(31,31,31)
darker_grey=Color(63,63,63)
dark_grey=Color(95,95,95)
grey=Color(127,127,127)
light_grey=Color(159,159,159)
lighter_grey=Color(191,191,191)
lightest_grey=Color(223,223,223)
darkest_gray=Color(31,31,31)
darker_gray=Color(63,63,63)
dark_gray=Color(95,95,95)
gray=Color(127,127,127)
light_gray=Color(159,159,159)
lighter_gray=Color(191,191,191)
lightest_gray=Color(223,223,223)
white=Color(255,255,255)
# sepia
darkest_sepia=Color(31,24,15)
darker_sepia=Color(63,50,31)
dark_sepia=Color(94,75,47)
sepia=Color(127,101,63)
light_sepia=Color(158,134,100)
lighter_sepia=Color(191,171,143)
lightest_sepia=Color(222,211,195)
#standard colors
red=Color(255,0,0)
flame=Color(255,63,0)
orange=Color(255,127,0)
amber=Color(255,191,0)
yellow=Color(255,255,0)
lime=Color(191,255,0)
chartreuse=Color(127,255,0)
green=Color(0,255,0)
sea=Color(0,255,127)
turquoise=Color(0,255,191)
cyan=Color(0,255,255)
sky=Color(0,191,255)
azure=Color(0,127,255)
blue=Color(0,0,255)
han=Color(63,0,255)
violet=Color(127,0,255)
purple=Color(191,0,255)
fuchsia=Color(255,0,255)
magenta=Color(255,0,191)
pink=Color(255,0,127)
crimson=Color(255,0,63)
# dark colors
dark_red=Color(191,0,0)
dark_flame=Color(191,47,0)
dark_orange=Color(191,95,0)
dark_amber=Color(191,143,0)
dark_yellow=Color(191,191,0)
dark_lime=Color(143,191,0)
dark_chartreuse=Color(95,191,0)
dark_green=Color(0,191,0)
dark_sea=Color(0,191,95)
dark_turquoise=Color(0,191,143)
dark_cyan=Color(0,191,191)
dark_sky=Color(0,143,191)
dark_azure=Color(0,95,191)
dark_blue=Color(0,0,191)
dark_han=Color(47,0,191)
dark_violet=Color(95,0,191)
dark_purple=Color(143,0,191)
dark_fuchsia=Color(191,0,191)
dark_magenta=Color(191,0,143)
dark_pink=Color(191,0,95)
dark_crimson=Color(191,0,47)
# darker colors
darker_red=Color(127,0,0)
darker_flame=Color(127,31,0)
darker_orange=Color(127,63,0)
darker_amber=Color(127,95,0)
darker_yellow=Color(127,127,0)
darker_lime=Color(95,127,0)
darker_chartreuse=Color(63,127,0)
darker_green=Color(0,127,0)
darker_sea=Color(0,127,63)
darker_turquoise=Color(0,127,95)
darker_cyan=Color(0,127,127)
darker_sky=Color(0,95,127)
darker_azure=Color(0,63,127)
darker_blue=Color(0,0,127)
darker_han=Color(31,0,127)
darker_violet=Color(63,0,127)
darker_purple=Color(95,0,127)
darker_fuchsia=Color(127,0,127)
darker_magenta=Color(127,0,95)
darker_pink=Color(127,0,63)
darker_crimson=Color(127,0,31)
# darkest colors
darkest_red=Color(63,0,0)
darkest_flame=Color(63,15,0)
darkest_orange=Color(63,31,0)
darkest_amber=Color(63,47,0)
darkest_yellow=Color(63,63,0)
darkest_lime=Color(47,63,0)
darkest_chartreuse=Color(31,63,0)
darkest_green=Color(0,63,0)
darkest_sea=Color(0,63,31)
darkest_turquoise=Color(0,63,47)
darkest_cyan=Color(0,63,63)
darkest_sky=Color(0,47,63)
darkest_azure=Color(0,31,63)
darkest_blue=Color(0,0,63)
darkest_han=Color(15,0,63)
darkest_violet=Color(31,0,63)
darkest_purple=Color(47,0,63)
darkest_fuchsia=Color(63,0,63)
darkest_magenta=Color(63,0,47)
darkest_pink=Color(63,0,31)
darkest_crimson=Color(63,0,15)
# light colors
light_red=Color(255,114,114)
light_flame=Color(255,149,114)
light_orange=Color(255,184,114)
light_amber=Color(255,219,114)
light_yellow=Color(255,255,114)
light_lime=Color(219,255,114)
light_chartreuse=Color(184,255,114)
light_green=Color(114,255,114)
light_sea=Color(114,255,184)
light_turquoise=Color(114,255,219)
light_cyan=Color(114,255,255)
light_sky=Color(114,219,255)
light_azure=Color(114,184,255)
light_blue=Color(114,114,255)
light_han=Color(149,114,255)
light_violet=Color(184,114,255)
light_purple=Color(219,114,255)
light_fuchsia=Color(255,114,255)
light_magenta=Color(255,114,219)
light_pink=Color(255,114,184)
light_crimson=Color(255,114,149)
#lighter colors
lighter_red=Color(255,165,165)
lighter_flame=Color(255,188,165)
lighter_orange=Color(255,210,165)
lighter_amber=Color(255,232,165)
lighter_yellow=Color(255,255,165)
lighter_lime=Color(232,255,165)
lighter_chartreuse=Color(210,255,165)
lighter_green=Color(165,255,165)
lighter_sea=Color(165,255,210)
lighter_turquoise=Color(165,255,232)
lighter_cyan=Color(165,255,255)
lighter_sky=Color(165,232,255)
lighter_azure=Color(165,210,255)
lighter_blue=Color(165,165,255)
lighter_han=Color(188,165,255)
lighter_violet=Color(210,165,255)
lighter_purple=Color(232,165,255)
lighter_fuchsia=Color(255,165,255)
lighter_magenta=Color(255,165,232)
lighter_pink=Color(255,165,210)
lighter_crimson=Color(255,165,188)
# lightest colors
lightest_red=Color(255,191,191)
lightest_flame=Color(255,207,191)
lightest_orange=Color(255,223,191)
lightest_amber=Color(255,239,191)
lightest_yellow=Color(255,255,191)
lightest_lime=Color(239,255,191)
lightest_chartreuse=Color(223,255,191)
lightest_green=Color(191,255,191)
lightest_sea=Color(191,255,223)
lightest_turquoise=Color(191,255,239)
lightest_cyan=Color(191,255,255)
lightest_sky=Color(191,239,255)
lightest_azure=Color(191,223,255)
lightest_blue=Color(191,191,255)
lightest_han=Color(207,191,255)
lightest_violet=Color(223,191,255)
lightest_purple=Color(239,191,255)
lightest_fuchsia=Color(255,191,255)
lightest_magenta=Color(255,191,239)
lightest_pink=Color(255,191,223)
lightest_crimson=Color(255,191,207)
# desaturated colors
desaturated_red=Color(127,63,63)
desaturated_flame=Color(127,79,63)
desaturated_orange=Color(127,95,63)
desaturated_amber=Color(127,111,63)
desaturated_yellow=Color(127,127,63)
desaturated_lime=Color(111,127,63)
desaturated_chartreuse=Color(95,127,63)
desaturated_green=Color(63,127,63)
desaturated_sea=Color(63,127,95)
desaturated_turquoise=Color(63,127,111)
desaturated_cyan=Color(63,127,127)
desaturated_sky=Color(63,111,127)
desaturated_azure=Color(63,95,127)
desaturated_blue=Color(63,63,127)
desaturated_han=Color(79,63,127)
desaturated_violet=Color(95,63,127)
desaturated_purple=Color(111,63,127)
desaturated_fuchsia=Color(127,63,127)
desaturated_magenta=Color(127,63,111)
desaturated_pink=Color(127,63,95)
desaturated_crimson=Color(127,63,79)
# metallic
brass=Color(191,151,96)
copper=Color(197,136,124)
gold=Color(229,191,0)
silver=Color(203,203,203)
# miscellaneous
celadon=Color(172,255,175)
peach=Color(255,159,127)
# color functions
_lib.TCOD_color_lerp.restype = Color
def color_lerp(c1, c2, a):
return _lib.TCOD_color_lerp(c1, c2, c_float(a))
def color_set_hsv(c, h, s, v):
_lib.TCOD_color_set_HSV(byref(c), c_float(h), c_float(s), c_float(v))
def color_get_hsv(c):
h = c_float()
s = c_float()
v = c_float()
_lib.TCOD_color_get_HSV(c, byref(h), byref(s), byref(v))
return h.value, s.value, v.value
def color_scale_HSV(c, scoef, vcoef) :
_lib.TCOD_color_scale_HSV(byref(c),c_float(scoef),c_float(vcoef))
def color_gen_map(colors, indexes):
ccolors = (Color * len(colors))(*colors)
cindexes = (c_int * len(indexes))(*indexes)
cres = (Color * (max(indexes) + 1))()
_lib.TCOD_color_gen_map(cres, len(colors), ccolors, cindexes)
return cres
############################
# console module
############################
class Key(Structure):
_fields_=[('vk', c_int),
('c', c_uint8),
('pressed', c_bool),
('lalt', c_bool),
('lctrl', c_bool),
('ralt', c_bool),
('rctrl', c_bool),
('shift', c_bool),
]
class ConsoleBuffer:
# simple console that allows direct (fast) access to cells. simplifies
# use of the "fill" functions.
def __init__(self, width, height, back_r=0, back_g=0, back_b=0, fore_r=0, fore_g=0, fore_b=0, char=' '):
# initialize with given width and height. values to fill the buffer
# are optional, defaults to black with no characters.
n = width * height
self.width = width
self.height = height
self.clear(back_r, back_g, back_b, fore_r, fore_g, fore_b, char)
def clear(self, back_r=0, back_g=0, back_b=0, fore_r=0, fore_g=0, fore_b=0, char=' '):
# clears the console. values to fill it with are optional, defaults
# to black with no characters.
n = self.width * self.height
self.back_r = [back_r] * n
self.back_g = [back_g] * n
self.back_b = [back_b] * n
self.fore_r = [fore_r] * n
self.fore_g = [fore_g] * n
self.fore_b = [fore_b] * n
self.char = [ord(char)] * n
def copy(self):
# returns a copy of this ConsoleBuffer.
other = ConsoleBuffer(0, 0)
other.width = self.width
other.height = self.height
other.back_r = list(self.back_r) # make explicit copies of all lists
other.back_g = list(self.back_g)
other.back_b = list(self.back_b)
other.fore_r = list(self.fore_r)
other.fore_g = list(self.fore_g)
other.fore_b = list(self.fore_b)
other.char = list(self.char)
return other
def set_fore(self, x, y, r, g, b, char):
# set the character and foreground color of one cell.
i = self.width * y + x
self.fore_r[i] = r
self.fore_g[i] = g
self.fore_b[i] = b
self.char[i] = ord(char)
def set_back(self, x, y, r, g, b):
# set the background color of one cell.
i = self.width * y + x
self.back_r[i] = r
self.back_g[i] = g
self.back_b[i] = b
def set(self, x, y, back_r, back_g, back_b, fore_r, fore_g, fore_b, char):
# set the background color, foreground color and character of one cell.
i = self.width * y + x
self.back_r[i] = back_r
self.back_g[i] = back_g
self.back_b[i] = back_b
self.fore_r[i] = fore_r
self.fore_g[i] = fore_g
self.fore_b[i] = fore_b
self.char[i] = ord(char)
def blit(self, dest, fill_fore=True, fill_back=True):
# use libtcod's "fill" functions to write the buffer to a console.
if (console_get_width(dest) != self.width or
console_get_height(dest) != self.height):
raise ValueError('ConsoleBuffer.blit: Destination console has an incorrect size.')
s = struct.Struct('%di' % len(self.back_r))
if fill_back:
_lib.TCOD_console_fill_background(dest, (c_int * len(self.back_r))(*self.back_r), (c_int * len(self.back_g))(*self.back_g), (c_int * len(self.back_b))(*self.back_b))
if fill_fore:
_lib.TCOD_console_fill_foreground(dest, (c_int * len(self.fore_r))(*self.fore_r), (c_int * len(self.fore_g))(*self.fore_g), (c_int * len(self.fore_b))(*self.fore_b))
_lib.TCOD_console_fill_char(dest, (c_int * len(self.char))(*self.char))
_lib.TCOD_console_credits_render.restype = c_bool
_lib.TCOD_console_is_fullscreen.restype = c_bool
_lib.TCOD_console_is_window_closed.restype = c_bool
_lib.TCOD_console_get_default_background.restype = Color
_lib.TCOD_console_get_default_foreground.restype = Color
_lib.TCOD_console_get_char_background.restype = Color
_lib.TCOD_console_get_char_foreground.restype = Color
_lib.TCOD_console_get_fading_color.restype = Color
_lib.TCOD_console_is_key_pressed.restype = c_bool
# background rendering modes
BKGND_NONE = 0
BKGND_SET = 1
BKGND_MULTIPLY = 2
BKGND_LIGHTEN = 3
BKGND_DARKEN = 4
BKGND_SCREEN = 5
BKGND_COLOR_DODGE = 6
BKGND_COLOR_BURN = 7
BKGND_ADD = 8
BKGND_ADDA = 9
BKGND_BURN = 10
BKGND_OVERLAY = 11
BKGND_ALPH = 12
BKGND_DEFAULT=13
def BKGND_ALPHA(a):
return BKGND_ALPH | (int(a * 255) << 8)
def BKGND_ADDALPHA(a):
return BKGND_ADDA | (int(a * 255) << 8)
# non blocking key events types
KEY_PRESSED = 1
KEY_RELEASED = 2
# key codes
KEY_NONE = 0
KEY_ESCAPE = 1
KEY_BACKSPACE = 2
KEY_TAB = 3
KEY_ENTER = 4
KEY_SHIFT = 5
KEY_CONTROL = 6
KEY_ALT = 7
KEY_PAUSE = 8
KEY_CAPSLOCK = 9
KEY_PAGEUP = 10
KEY_PAGEDOWN = 11
KEY_END = 12
KEY_HOME = 13
KEY_UP = 14
KEY_LEFT = 15
KEY_RIGHT = 16
KEY_DOWN = 17
KEY_PRINTSCREEN = 18
KEY_INSERT = 19
KEY_DELETE = 20
KEY_LWIN = 21
KEY_RWIN = 22
KEY_APPS = 23
KEY_0 = 24
KEY_1 = 25
KEY_2 = 26
KEY_3 = 27
KEY_4 = 28
KEY_5 = 29
KEY_6 = 30
KEY_7 = 31
KEY_8 = 32
KEY_9 = 33
KEY_KP0 = 34
KEY_KP1 = 35
KEY_KP2 = 36
KEY_KP3 = 37
KEY_KP4 = 38
KEY_KP5 = 39
KEY_KP6 = 40
KEY_KP7 = 41
KEY_KP8 = 42
KEY_KP9 = 43
KEY_KPADD = 44
KEY_KPSUB = 45
KEY_KPDIV = 46
KEY_KPMUL = 47
KEY_KPDEC = 48
KEY_KPENTER = 49
KEY_F1 = 50
KEY_F2 = 51
KEY_F3 = 52
KEY_F4 = 53
KEY_F5 = 54
KEY_F6 = 55
KEY_F7 = 56
KEY_F8 = 57
KEY_F9 = 58
KEY_F10 = 59
KEY_F11 = 60
KEY_F12 = 61
KEY_NUMLOCK = 62
KEY_SCROLLLOCK = 63
KEY_SPACE = 64
KEY_CHAR = 65
# special chars
# single walls
CHAR_HLINE = 196
CHAR_VLINE = 179
CHAR_NE = 191
CHAR_NW = 218
CHAR_SE = 217
CHAR_SW = 192
CHAR_TEEW = 180
CHAR_TEEE = 195
CHAR_TEEN = 193
CHAR_TEES = 194
CHAR_CROSS = 197
# double walls
CHAR_DHLINE = 205
CHAR_DVLINE = 186
CHAR_DNE = 187
CHAR_DNW = 201
CHAR_DSE = 188
CHAR_DSW = 200
CHAR_DTEEW = 185
CHAR_DTEEE = 204
CHAR_DTEEN = 202
CHAR_DTEES = 203
CHAR_DCROSS = 206
# blocks
CHAR_BLOCK1 = 176
CHAR_BLOCK2 = 177
CHAR_BLOCK3 = 178
# arrows
CHAR_ARROW_N = 24
CHAR_ARROW_S = 25
CHAR_ARROW_E = 26
CHAR_ARROW_W = 27
# arrows without tail
CHAR_ARROW2_N = 30
CHAR_ARROW2_S = 31
CHAR_ARROW2_E = 16
CHAR_ARROW2_W = 17
# double arrows
CHAR_DARROW_H = 29
CHAR_DARROW_V = 18
# GUI stuff
CHAR_CHECKBOX_UNSET = 224
CHAR_CHECKBOX_SET = 225
CHAR_RADIO_UNSET = 9
CHAR_RADIO_SET = 10
# sub-pixel resolution kit
CHAR_SUBP_NW = 226
CHAR_SUBP_NE = 227
CHAR_SUBP_N = 228
CHAR_SUBP_SE = 229
CHAR_SUBP_DIAG = 230
CHAR_SUBP_E = 231
CHAR_SUBP_SW = 232
# misc characters
CHAR_BULLET = 7
CHAR_BULLET_INV = 8
CHAR_BULLET_SQUARE = 254
CHAR_CENT = 189
CHAR_CLUB = 5
CHAR_COPYRIGHT = 184
CHAR_CURRENCY = 207
CHAR_DIAMOND = 4
CHAR_DIVISION = 246
CHAR_EXCLAM_DOUBLE = 19
CHAR_FEMALE = 12
CHAR_FUNCTION = 159
CHAR_GRADE = 248
CHAR_HALF = 171
CHAR_HEART = 3
CHAR_LIGHT = 15
CHAR_MALE = 11
CHAR_MULTIPLICATION = 158
CHAR_NOTE = 13
CHAR_NOTE_DOUBLE = 14
CHAR_ONE_QUARTER = 172
CHAR_PILCROW = 20
CHAR_POUND = 156
CHAR_POW1 = 251
CHAR_POW2 = 253
CHAR_POW3 = 252
CHAR_RESERVED = 169
CHAR_SECTION = 21
CHAR_SMILIE = 1
CHAR_SMILIE_INV = 2
CHAR_SPADE = 6
CHAR_THREE_QUARTERS = 243
CHAR_UMLAUT = 249
CHAR_YEN = 190
# font flags
FONT_LAYOUT_ASCII_INCOL = 1
FONT_LAYOUT_ASCII_INROW = 2
FONT_TYPE_GREYSCALE = 4
FONT_TYPE_GRAYSCALE = 4
FONT_LAYOUT_TCOD = 8
# color control codes
COLCTRL_1=1
COLCTRL_2=2
COLCTRL_3=3
COLCTRL_4=4
COLCTRL_5=5
COLCTRL_NUMBER=5
COLCTRL_FORE_RGB=6
COLCTRL_BACK_RGB=7
COLCTRL_STOP=8
# renderers
RENDERER_GLSL=0
RENDERER_OPENGL=1
RENDERER_SDL=2
NB_RENDERERS=3
# alignment
LEFT=0
RIGHT=1
CENTER=2
# initializing the console
def console_init_root(w, h, title, fullscreen=False, renderer=RENDERER_SDL):
_lib.TCOD_console_init_root(w, h, c_char_p(title), fullscreen, renderer)
def console_get_width(con):
return _lib.TCOD_console_get_width(con)
def console_get_height(con):
return _lib.TCOD_console_get_height(con)
def console_set_custom_font(fontFile, flags=FONT_LAYOUT_ASCII_INCOL, nb_char_horiz=0, nb_char_vertic=0):
_lib.TCOD_console_set_custom_font(c_char_p(fontFile), flags, nb_char_horiz, nb_char_vertic)
def console_map_ascii_code_to_font(asciiCode, fontCharX, fontCharY):
if type(asciiCode) == str or type(asciiCode) == bytes:
_lib.TCOD_console_map_ascii_code_to_font(ord(asciiCode), fontCharX,
fontCharY)
else:
_lib.TCOD_console_map_ascii_code_to_font(asciiCode, fontCharX,
fontCharY)
def console_map_ascii_codes_to_font(firstAsciiCode, nbCodes, fontCharX,
fontCharY):
if type(firstAsciiCode) == str or type(asciiCode) == bytes:
_lib.TCOD_console_map_ascii_codes_to_font(ord(firstAsciiCode), nbCodes,
fontCharX, fontCharY)
else:
_lib.TCOD_console_map_ascii_codes_to_font(firstAsciiCode, nbCodes,
fontCharX, fontCharY)
def console_map_string_to_font(s, fontCharX, fontCharY):
if type(s) == bytes:
_lib.TCOD_console_map_string_to_font(s, fontCharX, fontCharY)
else:
_lib.TCOD_console_map_string_to_font_utf(s, fontCharX, fontCharY)
def console_is_fullscreen():
return _lib.TCOD_console_is_fullscreen()
def console_set_fullscreen(fullscreen):
_lib.TCOD_console_set_fullscreen(c_int(fullscreen))
def console_is_window_closed():
return _lib.TCOD_console_is_window_closed()
def console_set_window_title(title):
_lib.TCOD_console_set_window_title(c_char_p(title))
def console_credits():
_lib.TCOD_console_credits()
def console_credits_reset():
_lib.TCOD_console_credits_reset()
def console_credits_render(x, y, alpha):
return _lib.TCOD_console_credits_render(x, y, c_int(alpha))
def console_flush():
_lib.TCOD_console_flush()
# drawing on a console
def console_set_default_background(con, col):
_lib.TCOD_console_set_default_background(con, col)
def console_set_default_foreground(con, col):
_lib.TCOD_console_set_default_foreground(con, col)
def console_clear(con):
return _lib.TCOD_console_clear(con)
def console_put_char(con, x, y, c, flag=BKGND_DEFAULT):
if type(c) == str or type(c) == bytes:
_lib.TCOD_console_put_char(con, x, y, ord(c), flag)
else:
_lib.TCOD_console_put_char(con, x, y, c, flag)
def console_put_char_ex(con, x, y, c, fore, back):
if type(c) == str or type(c) == bytes:
_lib.TCOD_console_put_char_ex(con, x, y, ord(c), fore, back)
else:
_lib.TCOD_console_put_char_ex(con, x, y, c, fore, back)
def console_set_char_background(con, x, y, col, flag=BKGND_SET):
_lib.TCOD_console_set_char_background(con, x, y, col, flag)
def console_set_char_foreground(con, x, y, col):
_lib.TCOD_console_set_char_foreground(con, x, y, col)
def console_set_char(con, x, y, c):
if type(c) == str or type(c) == bytes:
_lib.TCOD_console_set_char(con, x, y, ord(c))
else:
_lib.TCOD_console_set_char(con, x, y, c)
def console_set_background_flag(con, flag):
_lib.TCOD_console_set_background_flag(con, c_int(flag))
def console_get_background_flag(con):
return _lib.TCOD_console_get_background_flag(con)
def console_set_alignment(con, alignment):
_lib.TCOD_console_set_alignment(con, c_int(alignment))
def console_get_alignment(con):
return _lib.TCOD_console_get_alignment(con)
def console_print(con, x, y, fmt):
if type(fmt) == bytes:
_lib.TCOD_console_print(c_void_p(con), x, y, c_char_p(fmt))
else:
_lib.TCOD_console_print_utf(c_void_p(con), x, y, fmt)
def console_print_ex(con, x, y, flag, alignment, fmt):
if type(fmt) == bytes:
_lib.TCOD_console_print_ex(c_void_p(con), x, y, flag, alignment, c_char_p(fmt))
else:
_lib.TCOD_console_print_ex_utf(c_void_p(con), x, y, flag, alignment, fmt)
def console_print_rect(con, x, y, w, h, fmt):
if type(fmt) == bytes:
return _lib.TCOD_console_print_rect(c_void_p(con), x, y, w, h, c_char_p(fmt))
else:
return _lib.TCOD_console_print_rect_utf(c_void_p(con), x, y, w, h, fmt)
def console_print_rect_ex(con, x, y, w, h, flag, alignment, fmt):
if type(fmt) == bytes:
return _lib.TCOD_console_print_rect_ex(c_void_p(con), x, y, w, h, flag, alignment, c_char_p(fmt))
else:
return _lib.TCOD_console_print_rect_ex_utf(c_void_p(con), x, y, w, h, flag, alignment, fmt)
def console_get_height_rect(con, x, y, w, h, fmt):
if type(fmt) == bytes:
return _lib.TCOD_console_get_height_rect(c_void_p(con), x, y, w, h, c_char_p(fmt))
else:
return _lib.TCOD_console_get_height_rect_utf(c_void_p(con), x, y, w, h, fmt)
def console_rect(con, x, y, w, h, clr, flag=BKGND_DEFAULT):
_lib.TCOD_console_rect(con, x, y, w, h, c_int(clr), flag)
def console_hline(con, x, y, l, flag=BKGND_DEFAULT):
_lib.TCOD_console_hline( con, x, y, l, flag)
def console_vline(con, x, y, l, flag=BKGND_DEFAULT):
_lib.TCOD_console_vline( con, x, y, l, flag)
def console_print_frame(con, x, y, w, h, clear=True, flag=BKGND_DEFAULT, fmt=0):
_lib.TCOD_console_print_frame(c_void_p(con), x, y, w, h, c_int(clear), flag, c_char_p(fmt))
def console_set_color_control(con,fore,back) :
_lib.TCOD_console_set_color_control(con,fore,back)
def console_get_default_background(con):
return _lib.TCOD_console_get_default_background(con)
def console_get_default_foreground(con):
return _lib.TCOD_console_get_default_foreground(con)
def console_get_char_background(con, x, y):
return _lib.TCOD_console_get_char_background(con, x, y)
def console_get_char_foreground(con, x, y):
return _lib.TCOD_console_get_char_foreground(con, x, y)
def console_get_char(con, x, y):
return _lib.TCOD_console_get_char(con, x, y)
def console_set_fade(fade, fadingColor):
_lib.TCOD_console_set_fade(fade, fadingColor)
##_lib.TCOD_console_set_fade_wrapper(fade, fadingColor)
def console_get_fade():
return _lib.TCOD_console_get_fade().value
def console_get_fading_color():
return _lib.TCOD_console_get_fading_color()
# handling keyboard input
def console_wait_for_keypress(flush):
k=Key()
_lib.TCOD_console_wait_for_keypress_wrapper(byref(k),c_bool(flush))
return k
def console_check_for_keypress(flags=KEY_RELEASED):
k=Key()
_lib.TCOD_console_check_for_keypress_wrapper(byref(k),c_int(flags))
return k
def console_is_key_pressed(key):
return _lib.TCOD_console_is_key_pressed(key)
def console_set_keyboard_repeat(initial_delay, interval):
_lib.TCOD_console_set_keyboard_repeat(initial_delay, interval)
def console_disable_keyboard_repeat():
_lib.TCOD_console_disable_keyboard_repeat()
# using offscreen consoles
def console_new(w, h):
return _lib.TCOD_console_new(w, h)
def console_from_file(filename):
return _lib.TCOD_console_from_file(filename)
def console_get_width(con):
return _lib.TCOD_console_get_width(con)
def console_get_height(con):
return _lib.TCOD_console_get_height(con)
def console_blit(src, x, y, w, h, dst, xdst, ydst, ffade=1.0,bfade=1.0):
_lib.TCOD_console_blit(src, x, y, w, h, dst, xdst, ydst, c_float(ffade), c_float(bfade))
def console_set_key_color(con, col):
_lib.TCOD_console_set_key_color(con, col)
def console_delete(con):
_lib.TCOD_console_delete(con)
# fast color filling
def console_fill_foreground(con,r,g,b) :
if len(r) != len(g) or len(r) != len(b):
raise TypeError('R, G and B must all have the same size.')
if (numpy_available and isinstance(r, numpy.ndarray) and
isinstance(g, numpy.ndarray) and isinstance(b, numpy.ndarray)):
#numpy arrays, use numpy's ctypes functions
r = numpy.ascontiguousarray(r, dtype=numpy.int_)
g = numpy.ascontiguousarray(g, dtype=numpy.int_)
b = numpy.ascontiguousarray(b, dtype=numpy.int_)
cr = r.ctypes.data_as(POINTER(c_int))
cg = g.ctypes.data_as(POINTER(c_int))
cb = b.ctypes.data_as(POINTER(c_int))
else:
# otherwise convert using ctypes arrays
cr = (c_int * len(r))(*r)
cg = (c_int * len(g))(*g)
cb = (c_int * len(b))(*b)
_lib.TCOD_console_fill_foreground(con, cr, cg, cb)
def console_fill_background(con,r,g,b) :
if len(r) != len(g) or len(r) != len(b):
raise TypeError('R, G and B must all have the same size.')
if (numpy_available and isinstance(r, numpy.ndarray) and
isinstance(g, numpy.ndarray) and isinstance(b, numpy.ndarray)):
#numpy arrays, use numpy's ctypes functions
r = numpy.ascontiguousarray(r, dtype=numpy.int_)
g = numpy.ascontiguousarray(g, dtype=numpy.int_)
b = numpy.ascontiguousarray(b, dtype=numpy.int_)
cr = r.ctypes.data_as(POINTER(c_int))
cg = g.ctypes.data_as(POINTER(c_int))
cb = b.ctypes.data_as(POINTER(c_int))
else:
# otherwise convert using ctypes arrays
cr = (c_int * len(r))(*r)
cg = (c_int * len(g))(*g)
cb = (c_int * len(b))(*b)
_lib.TCOD_console_fill_background(con, cr, cg, cb)
def console_fill_char(con,arr) :
if (numpy_available and isinstance(arr, numpy.ndarray) ):
#numpy arrays, use numpy's ctypes functions
arr = numpy.ascontiguousarray(arr, dtype=numpy.int_)
carr = arr.ctypes.data_as(POINTER(c_int))
else:
#otherwise convert using the struct module
carr = struct.pack('%di' % len(arr), *arr)
_lib.TCOD_console_fill_char(con, carr)
def console_load_asc(con, filename) :
_lib.TCOD_console_load_asc(con,filename)
def console_save_asc(con, filename) :
_lib.TCOD_console_save_asc(con,filename)
def console_load_apf(con, filename) :
_lib.TCOD_console_load_apf(con,filename)
def console_save_apf(con, filename) :
_lib.TCOD_console_save_apf(con,filename)
############################
# sys module
############################
_lib.TCOD_sys_get_last_frame_length.restype = c_float
_lib.TCOD_sys_elapsed_seconds.restype = c_float
# high precision time functions
def sys_set_fps(fps):
_lib.TCOD_sys_set_fps(fps)
def sys_get_fps():
return _lib.TCOD_sys_get_fps()
def sys_get_last_frame_length():
return _lib.TCOD_sys_get_last_frame_length()
def sys_sleep_milli(val):
_lib.TCOD_sys_sleep_milli(c_uint(val))
def sys_elapsed_milli():
return _lib.TCOD_sys_elapsed_milli()
def sys_elapsed_seconds():
return _lib.TCOD_sys_elapsed_seconds()
def sys_set_renderer(renderer):
_lib.TCOD_sys_set_renderer(renderer)
def sys_get_renderer():
return _lib.TCOD_sys_get_renderer()
# easy screenshots
def sys_save_screenshot(name=0):
_lib.TCOD_sys_save_screenshot(c_char_p(name))
# custom fullscreen resolution
def sys_force_fullscreen_resolution(width, height):
_lib.TCOD_sys_force_fullscreen_resolution(width, height)
def sys_get_current_resolution():
w = c_int()
h = c_int()
_lib.TCOD_sys_get_current_resolution(byref(w), byref(h))
return w.value, h.value
def sys_get_char_size():
w = c_int()
h = c_int()
_lib.TCOD_sys_get_char_size(byref(w), byref(h))
return w.value, h.value
# update font bitmap
def sys_update_char(asciiCode, fontx, fonty, img, x, y) :
_lib.TCOD_sys_update_char(c_int(asciiCode),c_int(fontx),c_int(fonty),img,c_int(x),c_int(y))
# custom SDL post renderer
SDL_RENDERER_FUNC = CFUNCTYPE(None, c_void_p)
def sys_register_SDL_renderer(callback):
global sdl_renderer_func
sdl_renderer_func = SDL_RENDERER_FUNC(callback)
_lib.TCOD_sys_register_SDL_renderer(sdl_renderer_func)
# events
EVENT_KEY_PRESS=1
EVENT_KEY_RELEASE=2
EVENT_KEY=EVENT_KEY_PRESS|EVENT_KEY_RELEASE
EVENT_MOUSE_MOVE=4
EVENT_MOUSE_PRESS=8
EVENT_MOUSE_RELEASE=16
EVENT_MOUSE=EVENT_MOUSE_MOVE|EVENT_MOUSE_PRESS|EVENT_MOUSE_RELEASE
EVENT_ANY=EVENT_KEY|EVENT_MOUSE
def sys_check_for_event(mask,k,m) :
return _lib.TCOD_sys_check_for_event(c_int(mask),byref(k),byref(m))
def sys_wait_for_event(mask,k,m,flush) :
return _lib.TCOD_sys_wait_for_event(c_int(mask),byref(k),byref(m),c_bool(flush))
############################
# line module
############################
_lib.TCOD_line_step.restype = c_bool
_lib.TCOD_line.restype=c_bool
_lib.TCOD_line_step_mt.restype = c_bool
def line_init(xo, yo, xd, yd):
_lib.TCOD_line_init(xo, yo, xd, yd)
def line_step():
x = c_int()
y = c_int()
ret = _lib.TCOD_line_step(byref(x), byref(y))
if not ret:
return x.value, y.value
return None,None
def line(xo,yo,xd,yd,py_callback) :
LINE_CBK_FUNC=CFUNCTYPE(c_bool,c_int,c_int)
c_callback=LINE_CBK_FUNC(py_callback)
return _lib.TCOD_line(xo,yo,xd,yd,c_callback)
def line_iter(xo, yo, xd, yd):
data = (c_int * 9)() # struct TCOD_bresenham_data_t
_lib.TCOD_line_init_mt(xo, yo, xd, yd, data)
x = c_int(xo)
y = c_int(yo)
done = False
while not done:
yield x.value, y.value
done = _lib.TCOD_line_step_mt(byref(x), byref(y), data)
############################
# image module
############################
_lib.TCOD_image_is_pixel_transparent.restype = c_bool
_lib.TCOD_image_get_pixel.restype = Color
_lib.TCOD_image_get_mipmap_pixel.restype = Color
def image_new(width, height):
return _lib.TCOD_image_new(width, height)
def image_clear(image,col) :
_lib.TCOD_image_clear(image,col)
def image_invert(image) :
_lib.TCOD_image_invert(image)
def image_hflip(image) :
_lib.TCOD_image_hflip(image)
def image_rotate90(image, num=1) :
_lib.TCOD_image_rotate90(image,num)
def image_vflip(image) :
_lib.TCOD_image_vflip(image)
def image_scale(image, neww, newh) :
_lib.TCOD_image_scale(image,c_int(neww),c_int(newh))
def image_set_key_color(image,col) :
_lib.TCOD_image_set_key_color(image,col)
def image_get_alpha(image,x,y) :
return _lib.TCOD_image_get_alpha(image,c_int(x),c_int(y))
def image_is_pixel_transparent(image,x,y) :
return _lib.TCOD_image_is_pixel_transparent(image,c_int(x),c_int(y))
def image_load(filename):
return _lib.TCOD_image_load(c_char_p(filename))
def image_from_console(console):
return _lib.TCOD_image_from_console(console)
def image_refresh_console(image, console):
_lib.TCOD_image_refresh_console(image, console)
def image_get_size(image):
w=c_int()
h=c_int()
_lib.TCOD_image_get_size(image, byref(w), byref(h))
return w.value, h.value
def image_get_pixel(image, x, y):
return _lib.TCOD_image_get_pixel(image, x, y)
def image_get_mipmap_pixel(image, x0, y0, x1, y1):
return _lib.TCOD_image_get_mipmap_pixel(image, c_float(x0), c_float(y0),
c_float(x1), c_float(y1))
def image_put_pixel(image, x, y, col):
_lib.TCOD_image_put_pixel(image, x, y, col)
##_lib.TCOD_image_put_pixel_wrapper(image, x, y, col)
def image_blit(image, console, x, y, bkgnd_flag, scalex, scaley, angle):
_lib.TCOD_image_blit(image, console, c_float(x), c_float(y), bkgnd_flag,
c_float(scalex), c_float(scaley), c_float(angle))
def image_blit_rect(image, console, x, y, w, h, bkgnd_flag):
_lib.TCOD_image_blit_rect(image, console, x, y, w, h, bkgnd_flag)
def image_blit_2x(image, console, dx, dy, sx=0, sy=0, w=-1, h=-1):
_lib.TCOD_image_blit_2x(image, console, dx,dy,sx,sy,w,h)
def image_save(image, filename):
_lib.TCOD_image_save(image, c_char_p(filename))
def image_delete(image):
_lib.TCOD_image_delete(image)
############################
# mouse module
############################
class Mouse(Structure):
_fields_=[('x', c_int),
('y', c_int),
('dx', c_int),
('dy', c_int),
('cx', c_int),
('cy', c_int),
('dcx', c_int),
('dcy', c_int),
('lbutton', c_bool),
('rbutton', c_bool),
('mbutton', c_bool),
('lbutton_pressed', c_bool),
('rbutton_pressed', c_bool),
('mbutton_pressed', c_bool),
('wheel_up', c_bool),
('wheel_down', c_bool),
]
_lib.TCOD_mouse_is_cursor_visible.restype = c_bool
def mouse_show_cursor(visible):
_lib.TCOD_mouse_show_cursor(c_int(visible))
def mouse_is_cursor_visible():
return _lib.TCOD_mouse_is_cursor_visible()
def mouse_move(x, y):
_lib.TCOD_mouse_move(x, y)
def mouse_get_status():
mouse=Mouse()
_lib.TCOD_mouse_get_status_wrapper(byref(mouse))
return mouse
############################
# parser module
############################
_lib.TCOD_struct_get_name.restype = c_char_p
_lib.TCOD_struct_is_mandatory.restype = c_bool
_lib.TCOD_parser_get_bool_property.restype = c_bool
_lib.TCOD_parser_get_float_property.restype = c_float
_lib.TCOD_parser_get_string_property.restype = c_char_p
_lib.TCOD_parser_get_color_property.restype = Color
class Dice(Structure):
_fields_=[('nb_dices', c_int),
('nb_faces', c_int),
('multiplier', c_float),
('addsub', c_float),
]
def __repr__(self):
return "Dice(%d, %d, %s, %s)" % (self.nb_dices, self.nb_faces,
self.multiplier, self.addsub)
class _CValue(Union):
_fields_=[('c',c_uint8),
('i',c_int),
('f',c_float),
('s',c_char_p),
# JBR03192012 See http://bugs.python.org/issue14354 for why these are not defined as their actual types
('col',c_uint8 * 3),
('dice',c_int * 4),
('custom',c_void_p),
]
_CFUNC_NEW_STRUCT = CFUNCTYPE(c_uint, c_void_p, c_char_p)
_CFUNC_NEW_FLAG = CFUNCTYPE(c_uint, c_char_p)
_CFUNC_NEW_PROPERTY = CFUNCTYPE(c_uint, c_char_p, c_int, _CValue)
class _CParserListener(Structure):
_fields_=[('new_struct', _CFUNC_NEW_STRUCT),
('new_flag',_CFUNC_NEW_FLAG),
('new_property',_CFUNC_NEW_PROPERTY),
('end_struct',_CFUNC_NEW_STRUCT),
('error',_CFUNC_NEW_FLAG),
]
# property types
TYPE_NONE = 0
TYPE_BOOL = 1
TYPE_CHAR = 2
TYPE_INT = 3
TYPE_FLOAT = 4
TYPE_STRING = 5
TYPE_COLOR = 6
TYPE_DICE = 7
TYPE_VALUELIST00 = 8
TYPE_VALUELIST01 = 9
TYPE_VALUELIST02 = 10
TYPE_VALUELIST03 = 11
TYPE_VALUELIST04 = 12
TYPE_VALUELIST05 = 13
TYPE_VALUELIST06 = 14
TYPE_VALUELIST07 = 15
TYPE_VALUELIST08 = 16
TYPE_VALUELIST09 = 17
TYPE_VALUELIST10 = 18
TYPE_VALUELIST11 = 19
TYPE_VALUELIST12 = 20
TYPE_VALUELIST13 = 21
TYPE_VALUELIST14 = 22
TYPE_VALUELIST15 = 23
TYPE_LIST = 1024
def _convert_TCODList(clist, typ):
res = list()
for i in range(_lib.TCOD_list_size(clist)):
elt = _lib.TCOD_list_get(clist, i)
elt = cast(elt, c_void_p)
if typ == TYPE_BOOL:
elt = c_bool.from_buffer(elt).value
elif typ == TYPE_CHAR:
elt = c_char.from_buffer(elt).value
elif typ == TYPE_INT:
elt = c_int.from_buffer(elt).value
elif typ == TYPE_FLOAT:
elt = c_float.from_buffer(elt).value
elif typ == TYPE_STRING or TYPE_VALUELIST15 >= typ >= TYPE_VALUELIST00:
elt = cast(elt, c_char_p).value
elif typ == TYPE_COLOR:
elt = Color.from_buffer_copy(elt)
elif typ == TYPE_DICE:
# doesn't work
elt = Dice.from_buffer_copy(elt)
res.append(elt)
return res
def parser_new():
return _lib.TCOD_parser_new()
def parser_new_struct(parser, name):
return _lib.TCOD_parser_new_struct(parser, name)
def struct_add_flag(struct, name):
_lib.TCOD_struct_add_flag(struct, name)
def struct_add_property(struct, name, typ, mandatory):
_lib.TCOD_struct_add_property(struct, name, typ, c_bool(mandatory))
def struct_add_value_list(struct, name, value_list, mandatory):
CARRAY = c_char_p * (len(value_list) + 1)
cvalue_list = CARRAY()
for i in range(len(value_list)):
cvalue_list[i] = cast(value_list[i], c_char_p)
cvalue_list[len(value_list)] = 0
_lib.TCOD_struct_add_value_list(struct, name, cvalue_list, c_bool(mandatory))
def struct_add_list_property(struct, name, typ, mandatory):
_lib.TCOD_struct_add_list_property(struct, name, typ, c_bool(mandatory))
def struct_add_structure(struct, sub_struct):
_lib.TCOD_struct_add_structure(struct, sub_struct)
def struct_get_name(struct):
return _lib.TCOD_struct_get_name(struct)
def struct_is_mandatory(struct, name):
return _lib.TCOD_struct_is_mandatory(struct, name)
def struct_get_type(struct, name):
return _lib.TCOD_struct_get_type(struct, name)
def parser_run(parser, filename, listener=0):
if listener != 0:
clistener=_CParserListener()
def value_converter(name, typ, value):
if typ == TYPE_BOOL:
return listener.new_property(name, typ, value.c == 1)
elif typ == TYPE_CHAR:
return listener.new_property(name, typ, '%c' % (value.c & 0xFF))
elif typ == TYPE_INT:
return listener.new_property(name, typ, value.i)
elif typ == TYPE_FLOAT:
return listener.new_property(name, typ, value.f)
elif typ == TYPE_STRING or \
TYPE_VALUELIST15 >= typ >= TYPE_VALUELIST00:
return listener.new_property(name, typ, value.s)
elif typ == TYPE_COLOR:
col = cast(value.col, POINTER(Color)).contents
return listener.new_property(name, typ, col)
elif typ == TYPE_DICE:
dice = cast(value.dice, POINTER(Dice)).contents
return listener.new_property(name, typ, dice)
elif typ & TYPE_LIST:
return listener.new_property(name, typ,
_convert_TCODList(value.custom, typ & 0xFF))
return True
clistener.new_struct = _CFUNC_NEW_STRUCT(listener.new_struct)
clistener.new_flag = _CFUNC_NEW_FLAG(listener.new_flag)
clistener.new_property = _CFUNC_NEW_PROPERTY(value_converter)
clistener.end_struct = _CFUNC_NEW_STRUCT(listener.end_struct)
clistener.error = _CFUNC_NEW_FLAG(listener.error)
_lib.TCOD_parser_run(parser, c_char_p(filename), byref(clistener))
else:
_lib.TCOD_parser_run(parser, c_char_p(filename), 0)
def parser_delete(parser):
_lib.TCOD_parser_delete(parser)
def parser_get_bool_property(parser, name):
return _lib.TCOD_parser_get_bool_property(parser, c_char_p(name))
def parser_get_int_property(parser, name):
return _lib.TCOD_parser_get_int_property(parser, c_char_p(name))
def parser_get_char_property(parser, name):
return '%c' % _lib.TCOD_parser_get_char_property(parser, c_char_p(name))
def parser_get_float_property(parser, name):
return _lib.TCOD_parser_get_float_property(parser, c_char_p(name))
def parser_get_string_property(parser, name):
return _lib.TCOD_parser_get_string_property(parser, c_char_p(name))
def parser_get_color_property(parser, name):
return _lib.TCOD_parser_get_color_property(parser, c_char_p(name))
def parser_get_dice_property(parser, name):
d = Dice()
_lib.TCOD_parser_get_dice_property_py(c_void_p(parser), c_char_p(name), byref(d))
return d
def parser_get_list_property(parser, name, typ):
clist = _lib.TCOD_parser_get_list_property(parser, c_char_p(name), c_int(typ))
return _convert_TCODList(clist, typ)
############################
# random module
############################
_lib.TCOD_random_get_float.restype = c_float
_lib.TCOD_random_get_double.restype = c_double
RNG_MT = 0
RNG_CMWC = 1
DISTRIBUTION_LINEAR = 0
DISTRIBUTION_GAUSSIAN = 1
DISTRIBUTION_GAUSSIAN_RANGE = 2
DISTRIBUTION_GAUSSIAN_INVERSE = 3
DISTRIBUTION_GAUSSIAN_RANGE_INVERSE = 4
def random_get_instance():
return _lib.TCOD_random_get_instance()
def random_new(algo=RNG_CMWC):
return _lib.TCOD_random_new(algo)
def random_new_from_seed(seed, algo=RNG_CMWC):
return _lib.TCOD_random_new_from_seed(algo,c_uint(seed))
def random_set_distribution(rnd, dist) :
_lib.TCOD_random_set_distribution(rnd, dist)
def random_get_int(rnd, mi, ma):
return _lib.TCOD_random_get_int(rnd, mi, ma)
def random_get_float(rnd, mi, ma):
return _lib.TCOD_random_get_float(rnd, c_float(mi), c_float(ma))
def random_get_double(rnd, mi, ma):
return _lib.TCOD_random_get_double(rnd, c_double(mi), c_double(ma))
def random_get_int_mean(rnd, mi, ma, mean):
return _lib.TCOD_random_get_int_mean(rnd, mi, ma, mean)
def random_get_float_mean(rnd, mi, ma, mean):
return _lib.TCOD_random_get_float_mean(rnd, c_float(mi), c_float(ma), c_float(mean))
def random_get_double_mean(rnd, mi, ma, mean):
return _lib.TCOD_random_get_double_mean(rnd, c_double(mi), c_double(ma), c_double(mean))
def random_save(rnd):
return _lib.TCOD_random_save(rnd)
def random_restore(rnd, backup):
_lib.TCOD_random_restore(rnd, backup)
def random_delete(rnd):
_lib.TCOD_random_delete(rnd)
############################
# noise module
############################
_lib.TCOD_noise_get.restype = c_float
_lib.TCOD_noise_get_ex.restype = c_float
_lib.TCOD_noise_get_fbm.restype = c_float
_lib.TCOD_noise_get_fbm_ex.restype = c_float
_lib.TCOD_noise_get_turbulence.restype = c_float
_lib.TCOD_noise_get_turbulence_ex.restype = c_float
NOISE_DEFAULT_HURST = 0.5
NOISE_DEFAULT_LACUNARITY = 2.0
NOISE_DEFAULT = 0
NOISE_PERLIN = 1
NOISE_SIMPLEX = 2
NOISE_WAVELET = 4
_NOISE_PACKER_FUNC = (None,
(c_float * 1),
(c_float * 2),
(c_float * 3),
(c_float * 4),
)
def noise_new(dim, h=NOISE_DEFAULT_HURST, l=NOISE_DEFAULT_LACUNARITY, random=0):
return _lib.TCOD_noise_new(dim, c_float(h), c_float(l), random)
def noise_set_type(n, typ) :
_lib.TCOD_noise_set_type(n,typ)
def noise_get(n, f, typ=NOISE_DEFAULT):
return _lib.TCOD_noise_get_ex(n, _NOISE_PACKER_FUNC[len(f)](*f), typ)
def noise_get_fbm(n, f, oc, typ=NOISE_DEFAULT):
return _lib.TCOD_noise_get_fbm_ex(n, _NOISE_PACKER_FUNC[len(f)](*f), c_float(oc), typ)
def noise_get_turbulence(n, f, oc, typ=NOISE_DEFAULT):
return _lib.TCOD_noise_get_turbulence_ex(n, _NOISE_PACKER_FUNC[len(f)](*f), c_float(oc), typ)
def noise_delete(n):
_lib.TCOD_noise_delete(n)
############################
# fov module
############################
_lib.TCOD_map_is_in_fov.restype = c_bool
_lib.TCOD_map_is_transparent.restype = c_bool
_lib.TCOD_map_is_walkable.restype = c_bool
FOV_BASIC = 0
FOV_DIAMOND = 1
FOV_SHADOW = 2
FOV_PERMISSIVE_0 = 3
FOV_PERMISSIVE_1 = 4
FOV_PERMISSIVE_2 = 5
FOV_PERMISSIVE_3 = 6
FOV_PERMISSIVE_4 = 7
FOV_PERMISSIVE_5 = 8
FOV_PERMISSIVE_6 = 9
FOV_PERMISSIVE_7 = 10
FOV_PERMISSIVE_8 = 11
FOV_RESTRICTIVE = 12
NB_FOV_ALGORITHMS = 13
def FOV_PERMISSIVE(p) :
return FOV_PERMISSIVE_0+p
def map_new(w, h):
return _lib.TCOD_map_new(w, h)
def map_copy(source, dest):
return _lib.TCOD_map_copy(source, dest)
def map_set_properties(m, x, y, isTrans, isWalk):
_lib.TCOD_map_set_properties(m, x, y, c_int(isTrans), c_int(isWalk))
def map_clear(m,walkable=False,transparent=False):
_lib.TCOD_map_clear(m,c_int(walkable),c_int(transparent))
def map_compute_fov(m, x, y, radius=0, light_walls=True, algo=FOV_RESTRICTIVE ):
_lib.TCOD_map_compute_fov(m, x, y, c_int(radius), c_bool(light_walls), c_int(algo))
def map_is_in_fov(m, x, y):
return _lib.TCOD_map_is_in_fov(m, x, y)
def map_is_transparent(m, x, y):
return _lib.TCOD_map_is_transparent(m, x, y)
def map_is_walkable(m, x, y):
return _lib.TCOD_map_is_walkable(m, x, y)
def map_delete(m):
return _lib.TCOD_map_delete(m)
def map_get_width(map):
return _lib.TCOD_map_get_width(map)
def map_get_height(map):
return _lib.TCOD_map_get_height(map)
############################
# pathfinding module
############################
_lib.TCOD_path_compute.restype = c_bool
_lib.TCOD_path_is_empty.restype = c_bool
_lib.TCOD_path_walk.restype = c_bool
PATH_CBK_FUNC = CFUNCTYPE(c_float, c_int, c_int, c_int, c_int, py_object)
def path_new_using_map(m, dcost=1.41):
return (_lib.TCOD_path_new_using_map(c_void_p(m), c_float(dcost)), None)
def path_new_using_function(w, h, func, userdata=0, dcost=1.41):
cbk_func = PATH_CBK_FUNC(func)
return (_lib.TCOD_path_new_using_function(w, h, cbk_func,
py_object(userdata), c_float(dcost)), cbk_func)
def path_compute(p, ox, oy, dx, dy):
return _lib.TCOD_path_compute(p[0], ox, oy, dx, dy)
def path_get_origin(p):
x = c_int()
y = c_int()
_lib.TCOD_path_get_origin(p[0], byref(x), byref(y))
return x.value, y.value
def path_get_destination(p):
x = c_int()
y = c_int()
_lib.TCOD_path_get_destination(p[0], byref(x), byref(y))
return x.value, y.value
def path_size(p):
return _lib.TCOD_path_size(p[0])
def path_reverse(p):
_lib.TCOD_path_reverse(p[0])
def path_get(p, idx):
x = c_int()
y = c_int()
_lib.TCOD_path_get(p[0], idx, byref(x), byref(y))
return x.value, y.value
def path_is_empty(p):
return _lib.TCOD_path_is_empty(p[0])
def path_walk(p, recompute):
x = c_int()
y = c_int()
if _lib.TCOD_path_walk(p[0], byref(x), byref(y), c_int(recompute)):
return x.value, y.value
return None,None
def path_delete(p):
_lib.TCOD_path_delete(p[0])
_lib.TCOD_dijkstra_path_set.restype = c_bool
_lib.TCOD_dijkstra_is_empty.restype = c_bool
_lib.TCOD_dijkstra_path_walk.restype = c_bool
_lib.TCOD_dijkstra_get_distance.restype = c_float
def dijkstra_new(m, dcost=1.41):
return (_lib.TCOD_dijkstra_new(c_void_p(m), c_float(dcost)), None)
def dijkstra_new_using_function(w, h, func, userdata=0, dcost=1.41):
cbk_func = PATH_CBK_FUNC(func)
return (_lib.TCOD_path_dijkstra_using_function(w, h, cbk_func,
py_object(userdata), c_float(dcost)), cbk_func)
def dijkstra_compute(p, ox, oy):
_lib.TCOD_dijkstra_compute(p[0], c_int(ox), c_int(oy))
def dijkstra_path_set(p, x, y):
return _lib.TCOD_dijkstra_path_set(p[0], c_int(x), c_int(y))
def dijkstra_get_distance(p, x, y):
return _lib.TCOD_dijkstra_get_distance(p[0], c_int(x), c_int(y))
def dijkstra_size(p):
return _lib.TCOD_dijkstra_size(p[0])
def dijkstra_reverse(p):
_lib.TCOD_dijkstra_reverse(p[0])
def dijkstra_get(p, idx):
x = c_int()
y = c_int()
_lib.TCOD_dijkstra_get(p[0], c_int(idx), byref(x), byref(y))
return x.value, y.value
def dijkstra_is_empty(p):
return _lib.TCOD_dijkstra_is_empty(p[0])
def dijkstra_path_walk(p):
x = c_int()
y = c_int()
if _lib.TCOD_dijkstra_path_walk(p[0], byref(x), byref(y)):
return x.value, y.value
return None,None
def dijkstra_delete(p):
_lib.TCOD_dijkstra_delete(p[0])
############################
# bsp module
############################
class _CBsp(Structure):
_fields_ = [('next', c_void_p),
('father', c_void_p),
('son', c_void_p),
('x', c_int),
('y', c_int),
('w', c_int),
('h', c_int),
('position', c_int),
('level', c_uint8),
('horizontal', c_bool),
]
_lib.TCOD_bsp_new_with_size.restype = POINTER(_CBsp)
_lib.TCOD_bsp_left.restype = POINTER(_CBsp)
_lib.TCOD_bsp_right.restype = POINTER(_CBsp)
_lib.TCOD_bsp_father.restype = POINTER(_CBsp)
_lib.TCOD_bsp_is_leaf.restype = c_bool
_lib.TCOD_bsp_contains.restype = c_bool
_lib.TCOD_bsp_find_node.restype = POINTER(_CBsp)
BSP_CBK_FUNC = CFUNCTYPE(c_int, c_void_p, c_void_p)
# python class encapsulating the _CBsp pointer
class Bsp(object):
def __init__(self, cnode):
pcbsp = cast(cnode, POINTER(_CBsp))
self.p = pcbsp
def getx(self):
return self.p.contents.x
def setx(self, value):
self.p.contents.x = value
x = property(getx, setx)
def gety(self):
return self.p.contents.y
def sety(self, value):
self.p.contents.y = value
y = property(gety, sety)
def getw(self):
return self.p.contents.w
def setw(self, value):
self.p.contents.w = value
w = property(getw, setw)
def geth(self):
return self.p.contents.h
def seth(self, value):
self.p.contents.h = value
h = property(geth, seth)
def getpos(self):
return self.p.contents.position
def setpos(self, value):
self.p.contents.position = value
position = property(getpos, setpos)
def gethor(self):
return self.p.contents.horizontal
def sethor(self,value):
self.p.contents.horizontal = value
horizontal = property(gethor, sethor)
def getlev(self):
return self.p.contents.level
def setlev(self,value):
self.p.contents.level = value
level = property(getlev, setlev)
def bsp_new_with_size(x, y, w, h):
return Bsp(_lib.TCOD_bsp_new_with_size(x, y, w, h))
def bsp_split_once(node, horizontal, position):
_lib.TCOD_bsp_split_once(node.p, c_int(horizontal), position)
def bsp_split_recursive(node, randomizer, nb, minHSize, minVSize, maxHRatio,
maxVRatio):
_lib.TCOD_bsp_split_recursive(node.p, randomizer, nb, minHSize, minVSize,
c_float(maxHRatio), c_float(maxVRatio))
def bsp_resize(node, x, y, w, h):
_lib.TCOD_bsp_resize(node.p, x, y, w, h)
def bsp_left(node):
return Bsp(_lib.TCOD_bsp_left(node.p))
def bsp_right(node):
return Bsp(_lib.TCOD_bsp_right(node.p))
def bsp_father(node):
return Bsp(_lib.TCOD_bsp_father(node.p))
def bsp_is_leaf(node):
return _lib.TCOD_bsp_is_leaf(node.p)
def bsp_contains(node, cx, cy):
return _lib.TCOD_bsp_contains(node.p, cx, cy)
def bsp_find_node(node, cx, cy):
return Bsp(_lib.TCOD_bsp_find_node(node.p, cx, cy))
def _bsp_traverse(node, callback, userData, func):
# convert the c node into a python node
#before passing it to the actual callback
def node_converter(cnode, data):
node = Bsp(cnode)
return callback(node, data)
cbk_func = BSP_CBK_FUNC(node_converter)
func(node.p, cbk_func, userData)
def bsp_traverse_pre_order(node, callback, userData=0):
_bsp_traverse(node, callback, userData, _lib.TCOD_bsp_traverse_pre_order)
def bsp_traverse_in_order(node, callback, userData=0):
_bsp_traverse(node, callback, userData, _lib.TCOD_bsp_traverse_in_order)
def bsp_traverse_post_order(node, callback, userData=0):
_bsp_traverse(node, callback, userData, _lib.TCOD_bsp_traverse_post_order)
def bsp_traverse_level_order(node, callback, userData=0):
_bsp_traverse(node, callback, userData, _lib.TCOD_bsp_traverse_level_order)
def bsp_traverse_inverted_level_order(node, callback, userData=0):
_bsp_traverse(node, callback, userData,
_lib.TCOD_bsp_traverse_inverted_level_order)
def bsp_remove_sons(node):
_lib.TCOD_bsp_remove_sons(node.p)
def bsp_delete(node):
_lib.TCOD_bsp_delete(node.p)
############################
# heightmap module
############################
class _CHeightMap(Structure):
_fields_=[('w', c_int),
('h', c_int),
('values', POINTER(c_float)),
]
_lib.TCOD_heightmap_new.restype = POINTER(_CHeightMap)
_lib.TCOD_heightmap_get_value.restype = c_float
_lib.TCOD_heightmap_has_land_on_border.restype = c_bool
class HeightMap(object):
def __init__(self, chm):
pchm = cast(chm, POINTER(_CHeightMap))
self.p = pchm
def getw(self):
return self.p.contents.w
def setw(self, value):
self.p.contents.w = value
w = property(getw, setw)
def geth(self):
return self.p.contents.h
def seth(self, value):
self.p.contents.h = value
h = property(geth, seth)
def heightmap_new(w, h):
phm = _lib.TCOD_heightmap_new(w, h)
return HeightMap(phm)
def heightmap_set_value(hm, x, y, value):
_lib.TCOD_heightmap_set_value(hm.p, x, y, c_float(value))
def heightmap_add(hm, value):
_lib.TCOD_heightmap_add(hm.p, c_float(value))
def heightmap_scale(hm, value):
_lib.TCOD_heightmap_scale(hm.p, c_float(value))
def heightmap_clear(hm):
_lib.TCOD_heightmap_clear(hm.p)
def heightmap_clamp(hm, mi, ma):
_lib.TCOD_heightmap_clamp(hm.p, c_float(mi),c_float(ma))
def heightmap_copy(hm1, hm2):
_lib.TCOD_heightmap_copy(hm1.p, hm2.p)
def heightmap_normalize(hm, mi=0.0, ma=1.0):
_lib.TCOD_heightmap_normalize(hm.p, c_float(mi), c_float(ma))
def heightmap_lerp_hm(hm1, hm2, hm3, coef):
_lib.TCOD_heightmap_lerp_hm(hm1.p, hm2.p, hm3.p, c_float(coef))
def heightmap_add_hm(hm1, hm2, hm3):
_lib.TCOD_heightmap_add_hm(hm1.p, hm2.p, hm3.p)
def heightmap_multiply_hm(hm1, hm2, hm3):
_lib.TCOD_heightmap_multiply_hm(hm1.p, hm2.p, hm3.p)
def heightmap_add_hill(hm, x, y, radius, height):
_lib.TCOD_heightmap_add_hill(hm.p, c_float( x), c_float( y),
c_float( radius), c_float( height))
def heightmap_dig_hill(hm, x, y, radius, height):
_lib.TCOD_heightmap_dig_hill(hm.p, c_float( x), c_float( y),
c_float( radius), c_float( height))
def heightmap_rain_erosion(hm, nbDrops, erosionCoef, sedimentationCoef, rnd=0):
_lib.TCOD_heightmap_rain_erosion(hm.p, nbDrops, c_float( erosionCoef),
c_float( sedimentationCoef), rnd)
def heightmap_kernel_transform(hm, kernelsize, dx, dy, weight, minLevel,
maxLevel):
FARRAY = c_float * kernelsize
IARRAY = c_int * kernelsize
cdx = IARRAY(*dx)
cdy = IARRAY(*dy)
cweight = FARRAY(*weight)
_lib.TCOD_heightmap_kernel_transform(hm.p, kernelsize, cdx, cdy, cweight,
c_float(minLevel), c_float(maxLevel))
def heightmap_add_voronoi(hm, nbPoints, nbCoef, coef, rnd=0):
FARRAY = c_float * nbCoef
ccoef = FARRAY(*coef)
_lib.TCOD_heightmap_add_voronoi(hm.p, nbPoints, nbCoef, ccoef, rnd)
def heightmap_add_fbm(hm, noise, mulx, muly, addx, addy, octaves, delta, scale):
_lib.TCOD_heightmap_add_fbm(hm.p, noise, c_float(mulx), c_float(muly),
c_float(addx), c_float(addy),
c_float(octaves), c_float(delta),
c_float(scale))
def heightmap_scale_fbm(hm, noise, mulx, muly, addx, addy, octaves, delta,
scale):
_lib.TCOD_heightmap_scale_fbm(hm.p, noise, c_float(mulx), c_float(muly),
c_float(addx), c_float(addy),
c_float(octaves), c_float(delta),
c_float(scale))
def heightmap_dig_bezier(hm, px, py, startRadius, startDepth, endRadius,
endDepth):
IARRAY = c_int * 4
cpx = IARRAY(*px)
cpy = IARRAY(*py)
_lib.TCOD_heightmap_dig_bezier(hm.p, cpx, cpy, c_float(startRadius),
c_float(startDepth), c_float(endRadius),
c_float(endDepth))
def heightmap_get_value(hm, x, y):
return _lib.TCOD_heightmap_get_value(hm.p, x, y)
def heightmap_get_interpolated_value(hm, x, y):
return _lib.TCOD_heightmap_get_interpolated_value(hm.p, c_float(x),
c_float(y))
def heightmap_get_slope(hm, x, y):
return _lib.TCOD_heightmap_get_slope(hm.p, x, y)
def heightmap_get_normal(hm, x, y, waterLevel):
FARRAY = c_float * 3
cn = FARRAY()
_lib.TCOD_heightmap_get_normal(hm.p, c_float(x), c_float(y), cn,
c_float(waterLevel))
return cn[0], cn[1], cn[2]
def heightmap_count_cells(hm, mi, ma):
return _lib.TCOD_heightmap_count_cells(hm.p, c_float(mi), c_float(ma))
def heightmap_has_land_on_border(hm, waterlevel):
return _lib.TCOD_heightmap_has_land_on_border(hm.p, c_float(waterlevel))
def heightmap_get_minmax(hm):
mi = c_float()
ma = c_float()
_lib.TCOD_heightmap_get_minmax(hm.p, byref(mi), byref(ma))
return mi.value, ma.value
def heightmap_delete(hm):
_lib.TCOD_heightmap_delete(hm.p)
############################
# name generator module
############################
_lib.TCOD_namegen_generate.restype = c_char_p
_lib.TCOD_namegen_generate_custom.restype = c_char_p
def namegen_parse(filename,random=0) :
_lib.TCOD_namegen_parse(filename,random)
def namegen_generate(name) :
return _lib.TCOD_namegen_generate(name, 0)
def namegen_generate_custom(name, rule) :
return _lib.TCOD_namegen_generate(name, rule, 0)
def namegen_get_sets():
nb=_lib.TCOD_namegen_get_nb_sets_wrapper()
SARRAY = c_char_p * nb;
setsa = SARRAY()
_lib.TCOD_namegen_get_sets_wrapper(setsa)
return list(setsa)
def namegen_destroy() :
_lib.TCOD_namegen_destroy()
| gpl-2.0 |
openmips/gbremote-client | src/GBIpboxTimer.py | 1 | 3289 | #############################################################################
#
# Copyright (C) 2014 Impex-Sat Gmbh & Co.KG
# Written by Sandro Cavazzoni <[email protected]>
# All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#############################################################################
from Components.config import config
from GBIpboxDownloader import GBIpboxDownloader
from GBIpboxLocale import _
from enigma import eTimer
from time import localtime, time, strftime, mktime
class GBIpboxTimer:
def __init__(self, session):
self.session = session
self.ipboxdownloadtimer = eTimer()
self.ipboxdownloadtimer.callback.append(self.onIpboxDownloadTimer)
self.ipboxpolltimer = eTimer()
self.ipboxpolltimer.timeout.get().append(self.onIpboxPollTimer)
self.refreshScheduler()
def onIpboxPollTimer(self):
self.ipboxpolltimer.stop()
self.scheduledtime = self.prepareTimer()
def getTodayScheduledTime(self):
backupclock = config.ipboxclient.scheduletime.value
now = localtime(time())
return int(mktime((now.tm_year, now.tm_mon, now.tm_mday, backupclock[0], backupclock[1], 0, now.tm_wday, now.tm_yday, now.tm_isdst)))
def prepareTimer(self):
self.ipboxdownloadtimer.stop()
scheduled_time = self.getTodayScheduledTime()
now = int(time())
if scheduled_time > 0:
if scheduled_time < now:
if config.ipboxclient.repeattype.value == "daily":
scheduled_time += 24 * 3600
while (int(scheduled_time) - 30) < now:
scheduled_time += 24 * 3600
elif config.ipboxclient.repeattype.value == "weekly":
scheduled_time += 7 * 24 * 3600
while (int(scheduled_time) - 30) < now:
scheduled_time += 7 * 24 * 3600
elif config.ipboxclient.repeattype.value == "monthly":
scheduled_time += 30 * 24 * 3600
while (int(scheduled_time) - 30) < now:
scheduled_time += 30 * 24 * 3600
next = scheduled_time - now
self.ipboxdownloadtimer.startLongTimer(next)
else:
scheduled_time = -1
return scheduled_time
def onIpboxDownloadTimer(self):
self.ipboxdownloadtimer.stop()
now = int(time())
wake = self.getTodayScheduledTime()
if wake - now < 60:
downloader = GBIpboxDownloader(self.session)
try:
downloader.download()
except Exception, e:
print e
self.scheduledtime = self.prepareTimer()
def refreshScheduler(self):
now = int(time())
if config.ipboxclient.schedule.value:
if now > 1262304000:
self.scheduledtime = self.prepareTimer()
else:
self.scheduledtime = 0
self.ipboxpolltimer.start(36000)
else:
self.scheduledtime = 0
self.ipboxpolltimer.stop()
| gpl-2.0 |
striges/gapuff | gapuff/met_def.py | 1 | 2212 | #coding=utf-8
import numpy
import global_settings
class met_def:
"""We define meterological data here. And we can define meterological sequence and field here.
Met的数据格式有三种
0 恒定气象场,必须是一个tuple并且长度是(u,v,z,stab)
1 SAM站点数据,必须是一个list,并且list中的每个tuple都是(u,v,z,stab), then convert it to a numpy array (t_index, value_index)
2 气象场,必须是numpy的ndarray(t_index,x_index,y_index,value_index)
"""
def __init__(self, mode, data, seq):
self.mode = mode
self.seq = seq
if self.mode == 0 and isinstance(data, tuple) and len(data) == 4:
self.data = data
elif self.mode == 1 and isinstance(data, list) and len(data) == len(seq):
self.data = numpy.array(data, numpy.float32)
elif self.mode == 2 and isinstance(data, numpy.ndarray) and data.shape[0] == len(seq):
self.data = data
else:
raise Exception("Invalid input")
def extract(self, tick, position):
x, y, z = position
if self.mode == 0:
if abs(x) > global_settings.HALF_SIZE * global_settings.GRID_INTERVAL or abs(y) > global_settings.HALF_SIZE * global_settings.GRID_INTERVAL:
return None
else:
return self.data
if self.mode == 1:
raise NotImplementedError("Not Implemented")
if self.mode == 2:
try:
GridIdxX = int(x // global_settings.GRID_INTERVAL) + global_settings.HALF_SIZE
GridIdxY = int(y // global_settings.GRID_INTERVAL) + global_settings.HALF_SIZE
timeindex = sum(t < TIMESTEP * tick for t in self.seq)
virtualmet = self.data[timeindex, GridIdxX, GridIdxY]
stab = virtualmet[3]
uspeed = virtualmet[0] * (z / 10.0) ** global_settings.windprofile.urban[int(stab) - 1]
vspeed = virtualmet[1] * (z / 10.0) ** global_settings.windprofile.urban[int(stab) - 1]
#self.windspeed = math.sqrt(virtualmet[0] ** 2 + virtualmet[1] ** 2)
return [uspeed, vspeed, 0, stab]
except:
return None | gpl-3.0 |
whs/django | tests/raw_query/models.py | 112 | 1358 | from django.db import models
class Author(models.Model):
first_name = models.CharField(max_length=255)
last_name = models.CharField(max_length=255)
dob = models.DateField()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Protect against annotations being passed to __init__ --
# this'll make the test suite get angry if annotations aren't
# treated differently than fields.
for k in kwargs:
assert k in [f.attname for f in self._meta.fields], \
"Author.__init__ got an unexpected parameter: %s" % k
class Book(models.Model):
title = models.CharField(max_length=255)
author = models.ForeignKey(Author, models.CASCADE)
paperback = models.BooleanField(default=False)
opening_line = models.TextField()
class BookFkAsPk(models.Model):
book = models.ForeignKey(Book, models.CASCADE, primary_key=True, db_column="not_the_default")
class Coffee(models.Model):
brand = models.CharField(max_length=255, db_column="name")
price = models.DecimalField(max_digits=10, decimal_places=2, default=0)
class MixedCaseIDColumn(models.Model):
id = models.AutoField(primary_key=True, db_column='MiXeD_CaSe_Id')
class Reviewer(models.Model):
reviewed = models.ManyToManyField(Book)
class FriendlyAuthor(Author):
pass
| bsd-3-clause |
openstack/manila | manila/api/v2/quota_class_sets.py | 2 | 3475 | # Copyright 2012 OpenStack LLC.
# Copyright (c) 2015 Mirantis inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob
from manila.api.openstack import wsgi
from manila.api.views import quota_class_sets as quota_class_sets_views
from manila import db
from manila import exception
from manila import quota
QUOTAS = quota.QUOTAS
class QuotaClassSetsMixin(object):
"""The Quota Class Sets API controller common logic.
Mixin class that should be inherited by Quota Class Sets API controllers,
which are used for different API URLs and microversions.
"""
resource_name = "quota_class_set"
_view_builder_class = quota_class_sets_views.ViewBuilder
@wsgi.Controller.authorize("show")
def _show(self, req, id):
context = req.environ['manila.context']
try:
db.authorize_quota_class_context(context, id)
except exception.NotAuthorized:
raise webob.exc.HTTPForbidden()
return self._view_builder.detail_list(
req, QUOTAS.get_class_quotas(context, id), id)
@wsgi.Controller.authorize("update")
def _update(self, req, id, body):
context = req.environ['manila.context']
quota_class = id
for key in body.get(self.resource_name, {}).keys():
if key in QUOTAS:
value = int(body[self.resource_name][key])
try:
db.quota_class_update(context, quota_class, key, value)
except exception.QuotaClassNotFound:
db.quota_class_create(context, quota_class, key, value)
except exception.AdminRequired:
raise webob.exc.HTTPForbidden()
return self._view_builder.detail_list(
req, QUOTAS.get_class_quotas(context, quota_class))
class QuotaClassSetsControllerLegacy(QuotaClassSetsMixin, wsgi.Controller):
"""Deprecated Quota Class Sets API controller.
Used by legacy API v1 and v2 microversions from 2.0 to 2.6.
Registered under deprecated API URL 'os-quota-class-sets'.
"""
@wsgi.Controller.api_version('1.0', '2.6')
def show(self, req, id):
return self._show(req, id)
@wsgi.Controller.api_version('1.0', '2.6')
def update(self, req, id, body):
return self._update(req, id, body)
class QuotaClassSetsController(QuotaClassSetsMixin, wsgi.Controller):
"""Quota Class Sets API controller.
Used only by API v2 starting from microversion 2.7.
Registered under API URL 'quota-class-sets'.
"""
@wsgi.Controller.api_version('2.7')
def show(self, req, id):
return self._show(req, id)
@wsgi.Controller.api_version('2.7')
def update(self, req, id, body):
return self._update(req, id, body)
def create_resource_legacy():
return wsgi.Resource(QuotaClassSetsControllerLegacy())
def create_resource():
return wsgi.Resource(QuotaClassSetsController())
| apache-2.0 |
hyuh/villec2-kernel | tools/perf/scripts/python/check-perf-trace.py | 11214 | 2503 | # perf script event handlers, generated by perf script -g python
# (c) 2010, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# This script tests basic functionality such as flag and symbol
# strings, common_xxx() calls back into perf, begin, end, unhandled
# events, etc. Basically, if this script runs successfully and
# displays expected results, Python scripting support should be ok.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Core import *
from perf_trace_context import *
unhandled = autodict()
def trace_begin():
print "trace_begin"
pass
def trace_end():
print_unhandled()
def irq__softirq_entry(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
vec):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "vec=%s\n" % \
(symbol_str("irq__softirq_entry", "vec", vec)),
def kmem__kmalloc(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
call_site, ptr, bytes_req, bytes_alloc,
gfp_flags):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "call_site=%u, ptr=%u, bytes_req=%u, " \
"bytes_alloc=%u, gfp_flags=%s\n" % \
(call_site, ptr, bytes_req, bytes_alloc,
flag_str("kmem__kmalloc", "gfp_flags", gfp_flags)),
def trace_unhandled(event_name, context, event_fields_dict):
try:
unhandled[event_name] += 1
except TypeError:
unhandled[event_name] = 1
def print_header(event_name, cpu, secs, nsecs, pid, comm):
print "%-20s %5u %05u.%09u %8u %-20s " % \
(event_name, cpu, secs, nsecs, pid, comm),
# print trace fields not included in handler args
def print_uncommon(context):
print "common_preempt_count=%d, common_flags=%s, common_lock_depth=%d, " \
% (common_pc(context), trace_flag_str(common_flags(context)), \
common_lock_depth(context))
def print_unhandled():
keys = unhandled.keys()
if not keys:
return
print "\nunhandled events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for event_name in keys:
print "%-40s %10d\n" % (event_name, unhandled[event_name])
| gpl-2.0 |
laurentb/weboob | modules/residentadvisor/pages.py | 2 | 4917 | # -*- coding: utf-8 -*-
# Copyright(C) 2014 Alexandre Morignot
#
# This file is part of a weboob module.
#
# This weboob module is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This weboob module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this weboob module. If not, see <http://www.gnu.org/licenses/>.
from weboob.capabilities.calendar import CATEGORIES, STATUS, TICKET
from weboob.browser.elements import ItemElement, ListElement, method
from weboob.browser.filters.html import Attr, CleanHTML, Link
from weboob.browser.filters.standard import CleanDecimal, CleanText, Date, CombineDate, DateTime, Regexp, Time, Type
from weboob.browser.pages import HTMLPage
from weboob.capabilities.calendar import BaseCalendarEvent
from datetime import timedelta
class BasePage(HTMLPage):
@property
def logged(self):
return bool(self.doc.xpath('//li[@id="profile"]/span[contains(text(), "Welcome")]'))
class LoginPage(BasePage):
def login(self, username, password):
form = self.get_form()
form['UsernameOrEmailAddress'] = username
form['Password'] = password
form.submit()
class ListPage(BasePage):
@method
class get_events(ListElement):
item_xpath = '//ul[@id="items"]/li/article'
class item(ItemElement):
klass = BaseCalendarEvent
obj_url = Link('./div[@class="bbox"]/h1/a')
obj_id = Regexp(Link('./div[@class="bbox"]/h1/a'), r'aspx\?(.+)')
obj_location = CleanText('./div[@class="bbox"]/span/a')
obj_start_date = DateTime(Attr('.//time', 'datetime'))
obj_summary = Regexp(Attr('./div[@class="bbox"]/h1/a', 'title'), r'details of (.+)')
obj_category = CATEGORIES.CONCERT
obj_status = STATUS.CONFIRMED
def get_country_id(self, country):
return Regexp(Link('//li[@id="liCountry"]/ul/li/a[./text()="%s"]' % country, default=''), r'ai=([^&]+)&?', default=None)(self.doc)
def get_city_id(self, city):
return Regexp(Link('//li[@id="liArea"]/ul/li/a[./text()="%s"]' % city, default=''), r'ai=([^&]+)&?', default=None)(self.doc)
def get_country_id_next_to(self, country_id):
return Regexp(Link('//li[@id="liCountry"]/ul/li[./a[contains(@href, "ai=%s&")]]/following-sibling::li/a' % country_id, default=''), r'ai=([^&]+)&?', default=None)(self.doc)
class EventPage(BasePage):
@method
class get_event(ItemElement):
klass = BaseCalendarEvent
obj_summary = CleanText('//div[@id="sectionHead"]/h1')
obj_description = CleanHTML('//div[@id="event-item"]/div[3]/p[2]')
obj_price = CleanDecimal(Regexp(CleanText('//aside[@id="detail"]/ul/li[3]'), r'Cost /[^\d]*([\d ,.]+).', default=''), default=None)
obj_location = Regexp(CleanText('//aside[@id="detail"]/ul/li[2]'), r'Venue / (.+)')
obj_booked_entries = Type(CleanText('//h1[@id="MembersFavouriteCount"]'), type=int)
obj_status = STATUS.CONFIRMED
obj_category = CATEGORIES.CONCERT
_date = Date(CleanText('//aside[@id="detail"]/ul/li[1]/a[1]'))
def obj_start_date(self):
start_time = Time(Regexp(CleanText('//aside[@id="detail"]/ul/li[1]'), r'(\d{2}:\d{2}) -'))(self)
return CombineDate(self._date, start_time)(self)
def obj_end_date(self):
end_time = Time(Regexp(CleanText('//aside[@id="detail"]/ul/li[1]'), r'- (\d{2}:\d{2})'))(self)
end_date = CombineDate(self._date, end_time)(self)
if end_date > self.obj_start_date():
end_date += timedelta(days = 1)
return end_date
def obj_ticket(self):
li_class = Attr('//li[@id="tickets"]//li[1]', 'class', default=None)(self)
if li_class:
if li_class == 'closed':
return TICKET.CLOSED
else:
return TICKET.AVAILABLE
return TICKET.NOTAVAILABLE
class SearchPage(BasePage):
@method
class get_events(ListElement):
item_xpath = '//main/ul/li/section/div/div/ul/li'
class item(ItemElement):
klass = BaseCalendarEvent
obj_url = Link('./a[1]')
obj_id = Regexp(Link('./a[1]'), r'\?(\d+)')
obj_summary = CleanText('./a[1]')
obj_start_date = Date(CleanText('./span[1]'))
obj_category = CATEGORIES.CONCERT
obj_status = STATUS.CONFIRMED
| lgpl-3.0 |
openstack/nova | nova/tests/functional/regressions/test_bug_1797580.py | 2 | 4260 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import test
from nova.tests import fixtures as nova_fixtures
from nova.tests.functional import fixtures as func_fixtures
from nova.tests.functional import integrated_helpers
class ColdMigrateTargetHostThenLiveMigrateTest(
test.TestCase, integrated_helpers.InstanceHelperMixin):
"""Regression test for bug 1797580 introduced in Queens.
Microversion 2.56 allows cold migrating to a specified target host. The
compute API sets the requested destination on the request spec with the
specified target host and then conductor sends that request spec to the
scheduler to validate the host. Conductor later persists the changes to
the request spec because it's the resize flow and the flavor could change
(even though in this case it won't since it's a cold migrate). After
confirming the resize, if the server is live migrated it will fail during
scheduling because of the persisted RequestSpec.requested_destination
from the cold migration, and you can't live migrate to the same host on
which the instance is currently running.
This test reproduces the regression and will validate the fix.
"""
def setUp(self):
super(ColdMigrateTargetHostThenLiveMigrateTest, self).setUp()
self.useFixture(nova_fixtures.RealPolicyFixture())
self.useFixture(nova_fixtures.NeutronFixture(self))
self.glance = self.useFixture(nova_fixtures.GlanceFixture(self))
self.useFixture(func_fixtures.PlacementFixture())
api_fixture = self.useFixture(nova_fixtures.OSAPIFixture(
api_version='v2.1'))
# The admin API is used to get the server details to verify the
# host on which the server was built and cold/live migrate it.
self.admin_api = api_fixture.admin_api
self.api = api_fixture.api
# Use the latest microversion available to make sure something does
# not regress in new microversions; cap as necessary.
self.admin_api.microversion = 'latest'
self.api.microversion = 'latest'
self.start_service('conductor')
self.start_service('scheduler')
for host in ('host1', 'host2'):
self.start_service('compute', host=host)
def test_cold_migrate_target_host_then_live_migrate(self):
# Create a server, it doesn't matter on which host it builds.
server = self._build_server(networks='none')
server = self.api.post_server({'server': server})
server = self._wait_for_state_change(server, 'ACTIVE')
original_host = server['OS-EXT-SRV-ATTR:host']
target_host = 'host1' if original_host == 'host2' else 'host2'
# Cold migrate the server to the specific target host.
migrate_req = {'migrate': {'host': target_host}}
self.admin_api.post_server_action(server['id'], migrate_req)
server = self._wait_for_state_change(server, 'VERIFY_RESIZE')
# Confirm the resize so the server stays on the target host.
confim_req = {'confirmResize': None}
self.admin_api.post_server_action(server['id'], confim_req)
server = self._wait_for_state_change(server, 'ACTIVE')
# Attempt to live migrate the server but don't specify a host so the
# scheduler has to pick one.
live_migrate_req = {
'os-migrateLive': {'host': None, 'block_migration': 'auto'}}
self.admin_api.post_server_action(server['id'], live_migrate_req)
server = self._wait_for_state_change(server, 'ACTIVE')
# The live migration should have been successful and the server is now
# back on the original host.
self.assertEqual(original_host, server['OS-EXT-SRV-ATTR:host'])
| apache-2.0 |
ArthurGarnier/SickRage | lib/lxml/_elementpath.py | 18 | 10121 | #
# ElementTree
# $Id: ElementPath.py 3375 2008-02-13 08:05:08Z fredrik $
#
# limited xpath support for element trees
#
# history:
# 2003-05-23 fl created
# 2003-05-28 fl added support for // etc
# 2003-08-27 fl fixed parsing of periods in element names
# 2007-09-10 fl new selection engine
# 2007-09-12 fl fixed parent selector
# 2007-09-13 fl added iterfind; changed findall to return a list
# 2007-11-30 fl added namespaces support
# 2009-10-30 fl added child element value filter
#
# Copyright (c) 2003-2009 by Fredrik Lundh. All rights reserved.
#
# [email protected]
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The ElementTree toolkit is
#
# Copyright (c) 1999-2009 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
##
# Implementation module for XPath support. There's usually no reason
# to import this module directly; the <b>ElementTree</b> does this for
# you, if needed.
##
import re
xpath_tokenizer_re = re.compile(
"("
"'[^']*'|\"[^\"]*\"|"
"::|"
"//?|"
r"\.\.|"
r"\(\)|"
r"[/.*:\[\]\(\)@=])|"
r"((?:\{[^}]+\})?[^/\[\]\(\)@=\s]+)|"
r"\s+"
)
def xpath_tokenizer(pattern, namespaces=None):
default_namespace = namespaces.get(None) if namespaces else None
for token in xpath_tokenizer_re.findall(pattern):
tag = token[1]
if tag and tag[0] != "{":
if ":" in tag:
prefix, uri = tag.split(":", 1)
try:
if not namespaces:
raise KeyError
yield token[0], "{%s}%s" % (namespaces[prefix], uri)
except KeyError:
raise SyntaxError("prefix %r not found in prefix map" % prefix)
elif default_namespace:
yield token[0], "{%s}%s" % (default_namespace, tag)
else:
yield token
else:
yield token
def prepare_child(next, token):
tag = token[1]
def select(result):
for elem in result:
for e in elem.iterchildren(tag):
yield e
return select
def prepare_star(next, token):
def select(result):
for elem in result:
for e in elem.iterchildren('*'):
yield e
return select
def prepare_self(next, token):
def select(result):
return result
return select
def prepare_descendant(next, token):
token = next()
if token[0] == "*":
tag = "*"
elif not token[0]:
tag = token[1]
else:
raise SyntaxError("invalid descendant")
def select(result):
for elem in result:
for e in elem.iterdescendants(tag):
yield e
return select
def prepare_parent(next, token):
def select(result):
for elem in result:
parent = elem.getparent()
if parent is not None:
yield parent
return select
def prepare_predicate(next, token):
# FIXME: replace with real parser!!! refs:
# http://effbot.org/zone/simple-iterator-parser.htm
# http://javascript.crockford.com/tdop/tdop.html
signature = ''
predicate = []
while 1:
token = next()
if token[0] == "]":
break
if token == ('', ''):
# ignore whitespace
continue
if token[0] and token[0][:1] in "'\"":
token = "'", token[0][1:-1]
signature += token[0] or "-"
predicate.append(token[1])
# use signature to determine predicate type
if signature == "@-":
# [@attribute] predicate
key = predicate[1]
def select(result):
for elem in result:
if elem.get(key) is not None:
yield elem
return select
if signature == "@-='":
# [@attribute='value']
key = predicate[1]
value = predicate[-1]
def select(result):
for elem in result:
if elem.get(key) == value:
yield elem
return select
if signature == "-" and not re.match(r"-?\d+$", predicate[0]):
# [tag]
tag = predicate[0]
def select(result):
for elem in result:
for _ in elem.iterchildren(tag):
yield elem
break
return select
if signature == ".='" or (signature == "-='" and not re.match(r"-?\d+$", predicate[0])):
# [.='value'] or [tag='value']
tag = predicate[0]
value = predicate[-1]
if tag:
def select(result):
for elem in result:
for e in elem.iterchildren(tag):
if "".join(e.itertext()) == value:
yield elem
break
else:
def select(result):
for elem in result:
if "".join(elem.itertext()) == value:
yield elem
return select
if signature == "-" or signature == "-()" or signature == "-()-":
# [index] or [last()] or [last()-index]
if signature == "-":
# [index]
index = int(predicate[0]) - 1
if index < 0:
if index == -1:
raise SyntaxError(
"indices in path predicates are 1-based, not 0-based")
else:
raise SyntaxError("path index >= 1 expected")
else:
if predicate[0] != "last":
raise SyntaxError("unsupported function")
if signature == "-()-":
try:
index = int(predicate[2]) - 1
except ValueError:
raise SyntaxError("unsupported expression")
else:
index = -1
def select(result):
for elem in result:
parent = elem.getparent()
if parent is None:
continue
try:
# FIXME: what if the selector is "*" ?
elems = list(parent.iterchildren(elem.tag))
if elems[index] is elem:
yield elem
except IndexError:
pass
return select
raise SyntaxError("invalid predicate")
ops = {
"": prepare_child,
"*": prepare_star,
".": prepare_self,
"..": prepare_parent,
"//": prepare_descendant,
"[": prepare_predicate,
}
# --------------------------------------------------------------------
_cache = {}
def _build_path_iterator(path, namespaces):
"""compile selector pattern"""
if path[-1:] == "/":
path += "*" # implicit all (FIXME: keep this?)
cache_key = (path,)
if namespaces:
if '' in namespaces:
raise ValueError("empty namespace prefix must be passed as None, not the empty string")
if None in namespaces:
cache_key += (namespaces[None],) + tuple(sorted(
item for item in namespaces.items() if item[0] is not None))
else:
cache_key += tuple(sorted(namespaces.items()))
try:
return _cache[cache_key]
except KeyError:
pass
if len(_cache) > 100:
_cache.clear()
if path[:1] == "/":
raise SyntaxError("cannot use absolute path on element")
stream = iter(xpath_tokenizer(path, namespaces))
try:
_next = stream.next
except AttributeError:
# Python 3
_next = stream.__next__
try:
token = _next()
except StopIteration:
raise SyntaxError("empty path expression")
selector = []
while 1:
try:
selector.append(ops[token[0]](_next, token))
except StopIteration:
raise SyntaxError("invalid path")
try:
token = _next()
if token[0] == "/":
token = _next()
except StopIteration:
break
_cache[cache_key] = selector
return selector
##
# Iterate over the matching nodes
def iterfind(elem, path, namespaces=None):
selector = _build_path_iterator(path, namespaces)
result = iter((elem,))
for select in selector:
result = select(result)
return result
##
# Find first matching object.
def find(elem, path, namespaces=None):
it = iterfind(elem, path, namespaces)
try:
return next(it)
except StopIteration:
return None
##
# Find all matching objects.
def findall(elem, path, namespaces=None):
return list(iterfind(elem, path, namespaces))
##
# Find text for first matching object.
def findtext(elem, path, default=None, namespaces=None):
el = find(elem, path, namespaces)
if el is None:
return default
else:
return el.text or ''
| gpl-3.0 |
Tomasuh/Anti-virus-in-python | superior.py | 1 | 3498 | import psutil,time,database,pefile2,os,psfunc
class processWatcher:
"""docstring for processWatcher"""
def __init__(self):
self.db = database.databaseHandler()
self.mode =self.db.runMode()
self.pe = pefile2.peInfo()
self.ps = psfunc.psutilFunc()
#Action against process depending on mode.
def action(self,pid,scannedFile,absPath):
if self.mode=="Normal":
if scannedFile!="SAFE":
print "Malware"+ scannedFile +" detected, killing: " + absPath
self.ps.kill(pid)
elif self.mode=="Chill":
if scannedFile!="SAFE":
print "Wazup, looks like your infected by: " + scannedFile +"\n in:" + absPath
elif self.mode=="Bitch":
print absPath + " ain't in the database, lets kill it!"
self.ps.kill(pid)
#Returns true if the given file has same md5 hash as the one corresponding
#in the database has, no reason to scan if thats the case.
def md5FileCheck(self,absPath):
return self.db.md5Fil(absPath)==self.pe.md5_for_file(absPath)
def absProcessPath(self,pid):
return psutil.Process(pid).exe
#Returns list of active processes in pid form
def count (self):
#print psutil.get_process_list()
return psutil.get_pid_list()
#Checks if all the elements in list2 exists in list1. Those who don't returned as a list
#Example: class.diff([1,2,3],[2,3,4]) returns [4]
def diff (self,list1,list2):
newAdded = []
for elem in list2:
if elem in list1:
continue
else:
newAdded.append(elem)
return newAdded
#Loops through all given pids, checks if the process is in the db, if not, insert it.
def processName(self,pids):
for elem in pids:
try:
self.name = psutil.Process(elem).name
self.path = self.absProcessPath(elem)
self.exist = self.db.absProcesspathExist(self.path)
print self.exist
print self.name
if (self.exist==0):
fileData = self.pe.compend(self.path,self.name)
self.db.insertFile(fileData)
print self.name + " added to database."
else:
print "Process already in database."
#If the md5 in the database is the same, shit is already scanned.
if (self.md5FileCheck(self.path) and self.db.getLabel(self.path)=="SAFE"):
print "No need to scan process"
continue
print "Scanning "+ self.path
procLabel = self.db.getLabel(self.path)
self.action(elem,procLabel, self.path)
self.db.logFile([procLabel,self.path])
except psutil.NoSuchProcess:
print "Process exit to fast"
except os.error:
print "OS error when accessing file"+self.name
#Looks for new processes
def checker(self):
beginning=self.count()
while True:
try:
if beginning != self.count():
self.processName (self.diff(beginning,self.count()))
beginning=self.count()
except psutil.AccessDenied:
continue
time.sleep(0.4)
process = processWatcher()
process.checker() | gpl-3.0 |
unindented/streamcode | client/static/jsrepl/extern/python/closured/lib/python2.7/encodings/gb18030.py | 816 | 1031 | #
# gb18030.py: Python Unicode Codec for GB18030
#
# Written by Hye-Shik Chang <[email protected]>
#
import _codecs_cn, codecs
import _multibytecodec as mbc
codec = _codecs_cn.getcodec('gb18030')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='gb18030',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| mit |
AmandaCMS/amanda-cms | amanda/product/migrations/0001_initial.py | 1 | 2898 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'ProductImage'
db.create_table(u'product_productimage', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('product', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['product.Product'])),
('image', self.gf('django.db.models.fields.files.ImageField')(max_length=100)),
))
db.send_create_signal(u'product', ['ProductImage'])
# Adding model 'Product'
db.create_table(u'product_product', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=128)),
('slug', self.gf('django.db.models.fields.SlugField')(max_length=50)),
('description', self.gf('ckeditor.fields.RichTextField')()),
('product_category', self.gf('django.db.models.fields.CharField')(max_length=32)),
('documentation_link', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True)),
('demo_video_link', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True)),
))
db.send_create_signal(u'product', ['Product'])
def backwards(self, orm):
# Deleting model 'ProductImage'
db.delete_table(u'product_productimage')
# Deleting model 'Product'
db.delete_table(u'product_product')
models = {
u'product.product': {
'Meta': {'object_name': 'Product'},
'demo_video_link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'description': ('ckeditor.fields.RichTextField', [], {}),
'documentation_link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product_category': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
u'product.productimage': {
'Meta': {'object_name': 'ProductImage'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['product.Product']"})
}
}
complete_apps = ['product'] | mit |
cesargtz/YecoraOdoo | addons/account/account_financial_report.py | 339 | 7636 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from datetime import datetime
from dateutil.relativedelta import relativedelta
from operator import itemgetter
from openerp.osv import fields, osv
import openerp.addons.decimal_precision as dp
from openerp.tools.translate import _
# ---------------------------------------------------------
# Account Financial Report
# ---------------------------------------------------------
class account_financial_report(osv.osv):
_name = "account.financial.report"
_description = "Account Report"
def _get_level(self, cr, uid, ids, field_name, arg, context=None):
'''Returns a dictionary with key=the ID of a record and value = the level of this
record in the tree structure.'''
res = {}
for report in self.browse(cr, uid, ids, context=context):
level = 0
if report.parent_id:
level = report.parent_id.level + 1
res[report.id] = level
return res
def _get_children_by_order(self, cr, uid, ids, context=None):
'''returns a dictionary with the key= the ID of a record and value = all its children,
computed recursively, and sorted by sequence. Ready for the printing'''
res = []
for id in ids:
res.append(id)
ids2 = self.search(cr, uid, [('parent_id', '=', id)], order='sequence ASC', context=context)
res += self._get_children_by_order(cr, uid, ids2, context=context)
return res
def _get_balance(self, cr, uid, ids, field_names, args, context=None):
'''returns a dictionary with key=the ID of a record and value=the balance amount
computed for this record. If the record is of type :
'accounts' : it's the sum of the linked accounts
'account_type' : it's the sum of leaf accoutns with such an account_type
'account_report' : it's the amount of the related report
'sum' : it's the sum of the children of this record (aka a 'view' record)'''
account_obj = self.pool.get('account.account')
res = {}
for report in self.browse(cr, uid, ids, context=context):
if report.id in res:
continue
res[report.id] = dict((fn, 0.0) for fn in field_names)
if report.type == 'accounts':
# it's the sum of the linked accounts
for a in report.account_ids:
for field in field_names:
res[report.id][field] += getattr(a, field)
elif report.type == 'account_type':
# it's the sum the leaf accounts with such an account type
report_types = [x.id for x in report.account_type_ids]
account_ids = account_obj.search(cr, uid, [('user_type','in', report_types), ('type','!=','view')], context=context)
for a in account_obj.browse(cr, uid, account_ids, context=context):
for field in field_names:
res[report.id][field] += getattr(a, field)
elif report.type == 'account_report' and report.account_report_id:
# it's the amount of the linked report
res2 = self._get_balance(cr, uid, [report.account_report_id.id], field_names, False, context=context)
for key, value in res2.items():
for field in field_names:
res[report.id][field] += value[field]
elif report.type == 'sum':
# it's the sum of the children of this account.report
res2 = self._get_balance(cr, uid, [rec.id for rec in report.children_ids], field_names, False, context=context)
for key, value in res2.items():
for field in field_names:
res[report.id][field] += value[field]
return res
_columns = {
'name': fields.char('Report Name', required=True, translate=True),
'parent_id': fields.many2one('account.financial.report', 'Parent'),
'children_ids': fields.one2many('account.financial.report', 'parent_id', 'Account Report'),
'sequence': fields.integer('Sequence'),
'balance': fields.function(_get_balance, 'Balance', multi='balance'),
'debit': fields.function(_get_balance, 'Debit', multi='balance'),
'credit': fields.function(_get_balance, 'Credit', multi="balance"),
'level': fields.function(_get_level, string='Level', store=True, type='integer'),
'type': fields.selection([
('sum','View'),
('accounts','Accounts'),
('account_type','Account Type'),
('account_report','Report Value'),
],'Type'),
'account_ids': fields.many2many('account.account', 'account_account_financial_report', 'report_line_id', 'account_id', 'Accounts'),
'account_report_id': fields.many2one('account.financial.report', 'Report Value'),
'account_type_ids': fields.many2many('account.account.type', 'account_account_financial_report_type', 'report_id', 'account_type_id', 'Account Types'),
'sign': fields.selection([(-1, 'Reverse balance sign'), (1, 'Preserve balance sign')], 'Sign on Reports', required=True, help='For accounts that are typically more debited than credited and that you would like to print as negative amounts in your reports, you should reverse the sign of the balance; e.g.: Expense account. The same applies for accounts that are typically more credited than debited and that you would like to print as positive amounts in your reports; e.g.: Income account.'),
'display_detail': fields.selection([
('no_detail','No detail'),
('detail_flat','Display children flat'),
('detail_with_hierarchy','Display children with hierarchy')
], 'Display details'),
'style_overwrite': fields.selection([
(0, 'Automatic formatting'),
(1,'Main Title 1 (bold, underlined)'),
(2,'Title 2 (bold)'),
(3,'Title 3 (bold, smaller)'),
(4,'Normal Text'),
(5,'Italic Text (smaller)'),
(6,'Smallest Text'),
],'Financial Report Style', help="You can set up here the format you want this record to be displayed. If you leave the automatic formatting, it will be computed based on the financial reports hierarchy (auto-computed field 'level')."),
}
_defaults = {
'type': 'sum',
'display_detail': 'detail_flat',
'sign': 1,
'style_overwrite': 0,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
masamichi/bite-project | deps/gdata-python-client/samples/blogger/oauth-appengine/main.py | 41 | 1765 | __author__ = '[email protected] (Wiktor Gworek)'
import wsgiref.handlers
import atom
import os
import cgi
import gdata.blogger.service
from oauth import OAuthDanceHandler, OAuthHandler, requiresOAuth
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
class MainHandler(OAuthHandler):
"""Main handler. If user is not logged in via OAuth it will display welcome
page. In other case user's blogs on Blogger will be displayed."""
def get(self):
try:
template_values = {'logged': self.client.has_access_token()}
if template_values['logged']:
feed = self.client.blogger.GetBlogFeed()
blogs = []
for entry in feed.entry:
blogs.append({
'id': entry.GetBlogId(),
'title': entry.title.text,
'link': entry.GetHtmlLink().href,
'published': entry.published.text,
'updated': entry.updated.text
})
template_values['blogs'] = blogs
except gdata.service.RequestError, error:
template_values['logged'] = False
path = os.path.join(os.path.dirname(__file__), 'index.html')
self.response.out.write(template.render(path, template_values))
class NewPostHandler(OAuthHandler):
"""Handles AJAX POST request to create a new post on a blog."""
@requiresOAuth
def post(self):
entry = atom.Entry(content=atom.Content(text=self.request.get('body')))
self.client.blogger.AddPost(entry, blog_id=self.request.get('id'))
def main():
application = webapp.WSGIApplication([
(r'/oauth/(.*)', OAuthDanceHandler),
('/new_post', NewPostHandler),
('/', MainHandler),
], debug=True)
wsgiref.handlers.CGIHandler().run(application)
if __name__ == '__main__':
main()
| apache-2.0 |
AevumDecessus/fragforce.org | ffsfdc/models.py | 2 | 15743 | from django.db import models
from django.utils import timezone
class Hcmeta(models.Model):
hcver = models.IntegerField(blank=True, null=True)
org_id = models.CharField(max_length=50, blank=True, null=True)
details = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = '_hcmeta'
class SfEventLog(models.Model):
table_name = models.CharField(max_length=128, blank=True, null=True)
action = models.CharField(max_length=7, blank=True, null=True)
synced_at = models.DateTimeField(blank=True, null=True)
sf_timestamp = models.DateTimeField(blank=True, null=True)
sfid = models.CharField(max_length=20, blank=True, null=True)
record = models.TextField(blank=True, null=True)
processed = models.BooleanField(null=True)
class Meta:
managed = False
db_table = '_sf_event_log'
class TriggerLog(models.Model):
txid = models.BigIntegerField(blank=True, null=True)
created_at = models.DateTimeField(blank=True, null=True)
updated_at = models.DateTimeField(blank=True, null=True)
processed_at = models.DateTimeField(blank=True, null=True)
processed_tx = models.BigIntegerField(blank=True, null=True)
state = models.CharField(max_length=8, blank=True, null=True)
action = models.CharField(max_length=7, blank=True, null=True)
table_name = models.CharField(max_length=128, blank=True, null=True)
record_id = models.IntegerField(blank=True, null=True)
sfid = models.CharField(max_length=18, blank=True, null=True)
old = models.TextField(blank=True, null=True)
values = models.TextField(blank=True, null=True)
sf_result = models.IntegerField(blank=True, null=True)
sf_message = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = '_trigger_log'
class TriggerLogArchive(models.Model):
id = models.IntegerField(primary_key=True)
txid = models.BigIntegerField(blank=True, null=True)
created_at = models.DateTimeField(blank=True, null=True)
updated_at = models.DateTimeField(blank=True, null=True)
processed_at = models.DateTimeField(blank=True, null=True)
processed_tx = models.BigIntegerField(blank=True, null=True)
state = models.CharField(max_length=8, blank=True, null=True)
action = models.CharField(max_length=7, blank=True, null=True)
table_name = models.CharField(max_length=128, blank=True, null=True)
record_id = models.IntegerField(blank=True, null=True)
sfid = models.CharField(max_length=18, blank=True, null=True)
old = models.TextField(blank=True, null=True)
values = models.TextField(blank=True, null=True)
sf_result = models.IntegerField(blank=True, null=True)
sf_message = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = '_trigger_log_archive'
class SiteAccount(models.Model):
jigsaw = models.CharField(max_length=20, blank=True, null=True)
shippinglongitude = models.FloatField(blank=True, null=True)
shippingstate = models.CharField(max_length=80, blank=True, null=True)
youtubeid = models.CharField(db_column='youtubeid__c', max_length=80, blank=True, null=True)
numberofemployees = models.IntegerField(blank=True, null=True)
parent = models.ForeignKey('SiteAccount', to_field='sfid', db_column='parentid',
on_delete=models.CASCADE,
max_length=18, blank=True, null=True)
recordtypeid = models.CharField(max_length=18, blank=True, null=True)
shippingpostalcode = models.CharField(max_length=20, blank=True, null=True)
billingcity = models.CharField(max_length=40, blank=True, null=True)
billinglatitude = models.FloatField(blank=True, null=True)
accountsource = models.CharField(max_length=40, blank=True, null=True)
shippingcountry = models.CharField(max_length=80, blank=True, null=True)
lastvieweddate = models.DateTimeField(blank=True, null=True)
shippinggeocodeaccuracy = models.CharField(max_length=40, blank=True, null=True)
last_el_update = models.DateTimeField(db_column='last_el_update__c', blank=True, null=True)
name = models.CharField(max_length=255, blank=True, null=True)
site_el_raised = models.FloatField(db_column='site_el_raised__c', blank=True, null=True)
lastmodifieddate = models.DateTimeField(blank=True, null=True)
phone = models.CharField(max_length=40, blank=True, null=True)
masterrecordid = models.CharField(max_length=18, blank=True, null=True)
ownerid = models.CharField(max_length=18, blank=True, null=True)
isdeleted = models.BooleanField(null=True)
site_el_goal = models.FloatField(db_column='site_el_goal__c', blank=True, null=True)
systemmodstamp = models.DateTimeField(blank=True, null=True)
el_id = models.CharField(db_column='el_id__c', max_length=80, blank=True, null=True)
lastmodifiedbyid = models.CharField(max_length=18, blank=True, null=True)
shippingstreet = models.CharField(max_length=255, blank=True, null=True)
lastactivitydate = models.DateField(blank=True, null=True)
billingpostalcode = models.CharField(max_length=20, blank=True, null=True)
billinglongitude = models.FloatField(blank=True, null=True)
twitchid = models.CharField(db_column='twitchid__c', max_length=80, blank=True, null=True)
twitterid = models.CharField(db_column='twitterid__c', max_length=80, blank=True, null=True)
createddate = models.DateTimeField(blank=True, null=True)
billingstate = models.CharField(max_length=80, blank=True, null=True)
supplies = models.TextField(db_column='supplies__c', blank=True, null=True)
jigsawcompanyid = models.CharField(max_length=20, blank=True, null=True)
shippingcity = models.CharField(max_length=40, blank=True, null=True)
shippinglatitude = models.FloatField(blank=True, null=True)
createdbyid = models.CharField(max_length=18, blank=True, null=True)
type = models.CharField(max_length=40, blank=True, null=True)
website = models.CharField(max_length=255, blank=True, null=True)
billingcountry = models.CharField(max_length=80, blank=True, null=True)
description = models.TextField(blank=True, null=True)
billinggeocodeaccuracy = models.CharField(max_length=40, blank=True, null=True)
photourl = models.CharField(max_length=255, blank=True, null=True)
lastreferenceddate = models.DateTimeField(blank=True, null=True)
sicdesc = models.CharField(max_length=80, blank=True, null=True)
industry = models.CharField(max_length=40, blank=True, null=True)
billingstreet = models.CharField(max_length=255, blank=True, null=True)
site_email = models.CharField(db_column='site_email__c', max_length=80, blank=True, null=True)
sfid = models.CharField(unique=True, max_length=18, blank=True, null=True)
field_hc_lastop = models.CharField(db_column='_hc_lastop', max_length=32, blank=True, null=True)
field_hc_err = models.TextField(db_column='_hc_err', blank=True, null=True)
site_info = models.TextField(db_column='site_info__c', blank=True, null=True)
nerd_in_chief = models.CharField(db_column='nerd_in_chief__c', max_length=18, blank=True, null=True)
mayedit = models.BooleanField(null=True)
# contacturl = models.CharField(db_column='contacturl__c', max_length=1300, blank=True, null=True)
islocked = models.BooleanField(null=True)
loot_guard = models.CharField(db_column='loot_guard__c', max_length=18, blank=True, null=True)
class Meta:
managed = False
db_table = 'account'
def has_events(self):
""" Return True if this account has upcoming events """
return Event.objects.filter(event_start_date__gte=timezone.now(), site=self).count() > 0
def upcoming(self):
return self.events.filter(event_start_date__gte=timezone.now()).order_by('event_start_date').all()
def past(self):
return self.events.filter(event_start_date__lt=timezone.now()).order_by('-event_start_date').all()
class Contact(models.Model):
lastname = models.CharField(max_length=80, blank=True, null=True)
account = models.ForeignKey(SiteAccount, to_field='sfid', db_column='accountid', on_delete=models.CASCADE,
max_length=18, blank=True, null=True)
name = models.CharField(max_length=121, blank=True, null=True)
ownerid = models.CharField(max_length=18, blank=True, null=True)
department = models.CharField(max_length=80, blank=True, null=True)
extra_life_id = models.CharField(db_column='extra_life_id__c', unique=True, max_length=20, blank=True, null=True)
fragforce_org_user = models.CharField(db_column='fragforce_org_user__c', max_length=18, blank=True, null=True)
title = models.CharField(max_length=128, blank=True, null=True)
firstname = models.CharField(max_length=40, blank=True, null=True)
sfid = models.CharField(unique=True, max_length=18, blank=True, null=True)
field_hc_lastop = models.CharField(db_column='_hc_lastop', max_length=32, blank=True, null=True)
field_hc_err = models.TextField(db_column='_hc_err', blank=True, null=True)
def donate_link(self):
if self.extra_life_id:
return "https://www.extra-life.org/index.cfm?fuseaction=donate.participant&participantID=%d" % (
int(self.extra_life_id),
)
raise ValueError("No extra life id set for %r" % self)
class Meta:
managed = False
db_table = 'contact'
class ELHistory(models.Model):
currencyisocode = models.CharField(max_length=3, blank=True, null=True)
contact = models.ForeignKey(Contact, to_field='sfid', db_column='contact__c', on_delete=models.CASCADE,
max_length=18, blank=True, null=True)
year = models.CharField(db_column='year__c', max_length=255, blank=True, null=True)
name = models.CharField(max_length=80, blank=True, null=True)
raised = models.FloatField(db_column='raised__c', blank=True, null=True)
lastmodifieddate = models.DateTimeField(blank=True, null=True)
ownerid = models.CharField(max_length=18, blank=True, null=True)
mayedit = models.BooleanField(null=True)
isdeleted = models.BooleanField(null=True)
goal = models.FloatField(db_column='goal__c', blank=True, null=True)
systemmodstamp = models.DateTimeField(blank=True, null=True)
el_id = models.CharField(db_column='el_id__c', max_length=7, blank=True, null=True)
lastmodifiedbyid = models.CharField(max_length=18, blank=True, null=True)
islocked = models.BooleanField(null=True)
createddate = models.DateTimeField(blank=True, null=True)
createdbyid = models.CharField(max_length=18, blank=True, null=True)
site = models.ForeignKey(SiteAccount, to_field='sfid', db_column='site__c', on_delete=models.CASCADE, max_length=18,
blank=True, null=True)
sfid = models.CharField(unique=True, max_length=18, blank=True, null=True)
field_hc_lastop = models.CharField(db_column='_hc_lastop', max_length=32, blank=True, null=True)
field_hc_err = models.TextField(db_column='_hc_err', blank=True, null=True)
class Meta:
managed = False
db_table = 'el_history__c'
class Event(models.Model):
lastvieweddate = models.DateTimeField(blank=True, null=True)
volunteerforce_link = models.CharField(db_column='volunteerforce_link__c', max_length=255, blank=True, null=True)
name = models.CharField(max_length=80, blank=True, null=True)
event_end_date = models.DateTimeField(db_column='event_end_date__c', blank=True, null=True)
lastmodifieddate = models.DateTimeField(blank=True, null=True)
isdeleted = models.BooleanField(null=True)
systemmodstamp = models.DateTimeField(blank=True, null=True)
lastmodifiedbyid = models.CharField(max_length=18, blank=True, null=True)
lastactivitydate = models.DateField(blank=True, null=True)
event_start_date = models.DateTimeField(db_column='event_start_date__c', blank=True, null=True)
createddate = models.DateTimeField(blank=True, null=True)
createdbyid = models.CharField(max_length=18, blank=True, null=True)
site = models.ForeignKey(SiteAccount, to_field='sfid', db_column='site__c', on_delete=models.CASCADE, max_length=18,
blank=True, null=True, related_name='events')
lastreferenceddate = models.DateTimeField(blank=True, null=True)
sfid = models.CharField(unique=True, max_length=18, blank=True, null=True)
field_hc_lastop = models.CharField(db_column='_hc_lastop', max_length=32, blank=True, null=True)
field_hc_err = models.TextField(db_column='_hc_err', blank=True, null=True)
use_secondary_address = models.BooleanField(db_column='use_secondary_address__c', null=True)
stream_recording_link = models.CharField(db_column='stream_recording_link__c', max_length=255, blank=True,
null=True)
# participant_count = models.FloatField(db_column='participant_count__c', blank=True, null=True)
# prereg_url = models.CharField(db_column='prereg_url__c', max_length=1300, blank=True, null=True)
mayedit = models.BooleanField(null=True)
# open_for_preregistration = models.BooleanField(db_column='open_for_preregistration__c', null=True)
islocked = models.BooleanField(null=True)
# signinurl = models.CharField(db_column='signinurl__c', max_length=1300, blank=True, null=True)
# event_address_lookup = models.CharField(db_column='event_address_lookup__c', max_length=1300, blank=True, null=True)
event_information = models.TextField(db_column='event_information__c', blank=True, null=True)
# open_for_registration = models.BooleanField(db_column='open_for_registration__c', null=True)
# Short description of the event
description = models.TextField(db_column='description__c', blank=True, null=True)
class Meta:
managed = False
db_table = 'fragforce_event__c'
class EventParticipant(models.Model):
contact = models.ForeignKey(Contact, to_field='sfid', db_column='contact__c', on_delete=models.CASCADE,
max_length=18, blank=True, null=True)
lastvieweddate = models.DateTimeField(blank=True, null=True)
name = models.CharField(max_length=80, blank=True, null=True)
lastmodifieddate = models.DateTimeField(blank=True, null=True)
ownerid = models.CharField(max_length=18, blank=True, null=True)
mayedit = models.BooleanField(null=True)
event = models.ForeignKey(Event, to_field='sfid', db_column='fragforce_event__c', on_delete=models.CASCADE,
max_length=18, blank=True,
null=True)
isdeleted = models.BooleanField(null=True)
participant = models.BooleanField(db_column='participant__c', null=True)
systemmodstamp = models.DateTimeField(blank=True, null=True)
lastmodifiedbyid = models.CharField(max_length=18, blank=True, null=True)
lastactivitydate = models.DateField(blank=True, null=True)
islocked = models.BooleanField(null=True)
createddate = models.DateTimeField(blank=True, null=True)
name = models.CharField(db_column='name__c', max_length=120, blank=True, null=True)
createdbyid = models.CharField(max_length=18, blank=True, null=True)
lastreferenceddate = models.DateTimeField(blank=True, null=True)
sfid = models.CharField(unique=True, max_length=18, blank=True, null=True)
field_hc_lastop = models.CharField(db_column='_hc_lastop', max_length=32, blank=True, null=True)
field_hc_err = models.TextField(db_column='_hc_err', blank=True, null=True)
class Meta:
managed = False
db_table = 'event_participant__c'
| gpl-2.0 |
googleapis/python-logging | google/cloud/logging_v2/types/__init__.py | 1 | 3610 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .log_entry import (
LogEntry,
LogEntryOperation,
LogEntrySourceLocation,
)
from .logging import (
DeleteLogRequest,
ListLogEntriesRequest,
ListLogEntriesResponse,
ListLogsRequest,
ListLogsResponse,
ListMonitoredResourceDescriptorsRequest,
ListMonitoredResourceDescriptorsResponse,
TailLogEntriesRequest,
TailLogEntriesResponse,
WriteLogEntriesPartialErrors,
WriteLogEntriesRequest,
WriteLogEntriesResponse,
)
from .logging_config import (
BigQueryOptions,
CmekSettings,
CreateBucketRequest,
CreateExclusionRequest,
CreateSinkRequest,
CreateViewRequest,
DeleteBucketRequest,
DeleteExclusionRequest,
DeleteSinkRequest,
DeleteViewRequest,
GetBucketRequest,
GetCmekSettingsRequest,
GetExclusionRequest,
GetSinkRequest,
GetViewRequest,
ListBucketsRequest,
ListBucketsResponse,
ListExclusionsRequest,
ListExclusionsResponse,
ListSinksRequest,
ListSinksResponse,
ListViewsRequest,
ListViewsResponse,
LogBucket,
LogExclusion,
LogSink,
LogView,
UndeleteBucketRequest,
UpdateBucketRequest,
UpdateCmekSettingsRequest,
UpdateExclusionRequest,
UpdateSinkRequest,
UpdateViewRequest,
LifecycleState,
)
from .logging_metrics import (
CreateLogMetricRequest,
DeleteLogMetricRequest,
GetLogMetricRequest,
ListLogMetricsRequest,
ListLogMetricsResponse,
LogMetric,
UpdateLogMetricRequest,
)
__all__ = (
"LogEntry",
"LogEntryOperation",
"LogEntrySourceLocation",
"DeleteLogRequest",
"ListLogEntriesRequest",
"ListLogEntriesResponse",
"ListLogsRequest",
"ListLogsResponse",
"ListMonitoredResourceDescriptorsRequest",
"ListMonitoredResourceDescriptorsResponse",
"TailLogEntriesRequest",
"TailLogEntriesResponse",
"WriteLogEntriesPartialErrors",
"WriteLogEntriesRequest",
"WriteLogEntriesResponse",
"BigQueryOptions",
"CmekSettings",
"CreateBucketRequest",
"CreateExclusionRequest",
"CreateSinkRequest",
"CreateViewRequest",
"DeleteBucketRequest",
"DeleteExclusionRequest",
"DeleteSinkRequest",
"DeleteViewRequest",
"GetBucketRequest",
"GetCmekSettingsRequest",
"GetExclusionRequest",
"GetSinkRequest",
"GetViewRequest",
"ListBucketsRequest",
"ListBucketsResponse",
"ListExclusionsRequest",
"ListExclusionsResponse",
"ListSinksRequest",
"ListSinksResponse",
"ListViewsRequest",
"ListViewsResponse",
"LogBucket",
"LogExclusion",
"LogSink",
"LogView",
"UndeleteBucketRequest",
"UpdateBucketRequest",
"UpdateCmekSettingsRequest",
"UpdateExclusionRequest",
"UpdateSinkRequest",
"UpdateViewRequest",
"LifecycleState",
"CreateLogMetricRequest",
"DeleteLogMetricRequest",
"GetLogMetricRequest",
"ListLogMetricsRequest",
"ListLogMetricsResponse",
"LogMetric",
"UpdateLogMetricRequest",
)
| apache-2.0 |
danalec/dotfiles | sublime/.config/sublime-text-3/Packages/python-markdown/st3/markdown/extensions/smarty.py | 62 | 10360 | # -*- coding: utf-8 -*-
'''
Smarty extension for Python-Markdown
====================================
Adds conversion of ASCII dashes, quotes and ellipses to their HTML
entity equivalents.
See <https://pythonhosted.org/Markdown/extensions/smarty.html>
for documentation.
Author: 2013, Dmitry Shachnev <[email protected]>
All changes Copyright 2013-2014 The Python Markdown Project
License: [BSD](http://www.opensource.org/licenses/bsd-license.php)
SmartyPants license:
Copyright (c) 2003 John Gruber <http://daringfireball.net/>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
* Neither the name "SmartyPants" nor the names of its contributors
may be used to endorse or promote products derived from this
software without specific prior written permission.
This software is provided by the copyright holders and contributors "as
is" and any express or implied warranties, including, but not limited
to, the implied warranties of merchantability and fitness for a
particular purpose are disclaimed. In no event shall the copyright
owner or contributors be liable for any direct, indirect, incidental,
special, exemplary, or consequential damages (including, but not
limited to, procurement of substitute goods or services; loss of use,
data, or profits; or business interruption) however caused and on any
theory of liability, whether in contract, strict liability, or tort
(including negligence or otherwise) arising in any way out of the use
of this software, even if advised of the possibility of such damage.
smartypants.py license:
smartypants.py is a derivative work of SmartyPants.
Copyright (c) 2004, 2007 Chad Miller <http://web.chad.org/>
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
This software is provided by the copyright holders and contributors "as
is" and any express or implied warranties, including, but not limited
to, the implied warranties of merchantability and fitness for a
particular purpose are disclaimed. In no event shall the copyright
owner or contributors be liable for any direct, indirect, incidental,
special, exemplary, or consequential damages (including, but not
limited to, procurement of substitute goods or services; loss of use,
data, or profits; or business interruption) however caused and on any
theory of liability, whether in contract, strict liability, or tort
(including negligence or otherwise) arising in any way out of the use
of this software, even if advised of the possibility of such damage.
'''
from __future__ import unicode_literals
from . import Extension
from ..inlinepatterns import HtmlPattern, HTML_RE
from ..odict import OrderedDict
from ..treeprocessors import InlineProcessor
# Constants for quote education.
punctClass = r"""[!"#\$\%'()*+,-.\/:;<=>?\@\[\\\]\^_`{|}~]"""
endOfWordClass = r"[\s.,;:!?)]"
closeClass = "[^\ \t\r\n\[\{\(\-\u0002\u0003]"
openingQuotesBase = (
'(\s' # a whitespace char
'| ' # or a non-breaking space entity
'|--' # or dashes
'|–|—' # or unicode
'|&[mn]dash;' # or named dash entities
'|–|—' # or decimal entities
')'
)
substitutions = {
'mdash': '—',
'ndash': '–',
'ellipsis': '…',
'left-angle-quote': '«',
'right-angle-quote': '»',
'left-single-quote': '‘',
'right-single-quote': '’',
'left-double-quote': '“',
'right-double-quote': '”',
}
# Special case if the very first character is a quote
# followed by punctuation at a non-word-break. Close the quotes by brute force:
singleQuoteStartRe = r"^'(?=%s\B)" % punctClass
doubleQuoteStartRe = r'^"(?=%s\B)' % punctClass
# Special case for double sets of quotes, e.g.:
# <p>He said, "'Quoted' words in a larger quote."</p>
doubleQuoteSetsRe = r""""'(?=\w)"""
singleQuoteSetsRe = r"""'"(?=\w)"""
# Special case for decade abbreviations (the '80s):
decadeAbbrRe = r"(?<!\w)'(?=\d{2}s)"
# Get most opening double quotes:
openingDoubleQuotesRegex = r'%s"(?=\w)' % openingQuotesBase
# Double closing quotes:
closingDoubleQuotesRegex = r'"(?=\s)'
closingDoubleQuotesRegex2 = '(?<=%s)"' % closeClass
# Get most opening single quotes:
openingSingleQuotesRegex = r"%s'(?=\w)" % openingQuotesBase
# Single closing quotes:
closingSingleQuotesRegex = r"(?<=%s)'(?!\s|s\b|\d)" % closeClass
closingSingleQuotesRegex2 = r"(?<=%s)'(\s|s\b)" % closeClass
# All remaining quotes should be opening ones
remainingSingleQuotesRegex = "'"
remainingDoubleQuotesRegex = '"'
HTML_STRICT_RE = HTML_RE + r'(?!\>)'
class SubstituteTextPattern(HtmlPattern):
def __init__(self, pattern, replace, markdown_instance):
""" Replaces matches with some text. """
HtmlPattern.__init__(self, pattern)
self.replace = replace
self.markdown = markdown_instance
def handleMatch(self, m):
result = ''
for part in self.replace:
if isinstance(part, int):
result += m.group(part)
else:
result += self.markdown.htmlStash.store(part, safe=True)
return result
class SmartyExtension(Extension):
def __init__(self, *args, **kwargs):
self.config = {
'smart_quotes': [True, 'Educate quotes'],
'smart_angled_quotes': [False, 'Educate angled quotes'],
'smart_dashes': [True, 'Educate dashes'],
'smart_ellipses': [True, 'Educate ellipses'],
'substitutions': [{}, 'Overwrite default substitutions'],
}
super(SmartyExtension, self).__init__(*args, **kwargs)
self.substitutions = dict(substitutions)
self.substitutions.update(self.getConfig('substitutions', default={}))
def _addPatterns(self, md, patterns, serie):
for ind, pattern in enumerate(patterns):
pattern += (md,)
pattern = SubstituteTextPattern(*pattern)
after = ('>smarty-%s-%d' % (serie, ind - 1) if ind else '_begin')
name = 'smarty-%s-%d' % (serie, ind)
self.inlinePatterns.add(name, pattern, after)
def educateDashes(self, md):
emDashesPattern = SubstituteTextPattern(
r'(?<!-)---(?!-)', (self.substitutions['mdash'],), md
)
enDashesPattern = SubstituteTextPattern(
r'(?<!-)--(?!-)', (self.substitutions['ndash'],), md
)
self.inlinePatterns.add('smarty-em-dashes', emDashesPattern, '_begin')
self.inlinePatterns.add(
'smarty-en-dashes', enDashesPattern, '>smarty-em-dashes'
)
def educateEllipses(self, md):
ellipsesPattern = SubstituteTextPattern(
r'(?<!\.)\.{3}(?!\.)', (self.substitutions['ellipsis'],), md
)
self.inlinePatterns.add('smarty-ellipses', ellipsesPattern, '_begin')
def educateAngledQuotes(self, md):
leftAngledQuotePattern = SubstituteTextPattern(
r'\<\<', (self.substitutions['left-angle-quote'],), md
)
rightAngledQuotePattern = SubstituteTextPattern(
r'\>\>', (self.substitutions['right-angle-quote'],), md
)
self.inlinePatterns.add(
'smarty-left-angle-quotes', leftAngledQuotePattern, '_begin'
)
self.inlinePatterns.add(
'smarty-right-angle-quotes',
rightAngledQuotePattern,
'>smarty-left-angle-quotes'
)
def educateQuotes(self, md):
lsquo = self.substitutions['left-single-quote']
rsquo = self.substitutions['right-single-quote']
ldquo = self.substitutions['left-double-quote']
rdquo = self.substitutions['right-double-quote']
patterns = (
(singleQuoteStartRe, (rsquo,)),
(doubleQuoteStartRe, (rdquo,)),
(doubleQuoteSetsRe, (ldquo + lsquo,)),
(singleQuoteSetsRe, (lsquo + ldquo,)),
(decadeAbbrRe, (rsquo,)),
(openingSingleQuotesRegex, (2, lsquo)),
(closingSingleQuotesRegex, (rsquo,)),
(closingSingleQuotesRegex2, (rsquo, 2)),
(remainingSingleQuotesRegex, (lsquo,)),
(openingDoubleQuotesRegex, (2, ldquo)),
(closingDoubleQuotesRegex, (rdquo,)),
(closingDoubleQuotesRegex2, (rdquo,)),
(remainingDoubleQuotesRegex, (ldquo,))
)
self._addPatterns(md, patterns, 'quotes')
def extendMarkdown(self, md, md_globals):
configs = self.getConfigs()
self.inlinePatterns = OrderedDict()
if configs['smart_ellipses']:
self.educateEllipses(md)
if configs['smart_quotes']:
self.educateQuotes(md)
if configs['smart_angled_quotes']:
self.educateAngledQuotes(md)
# Override HTML_RE from inlinepatterns.py so that it does not
# process tags with duplicate closing quotes.
md.inlinePatterns["html"] = HtmlPattern(HTML_STRICT_RE, md)
if configs['smart_dashes']:
self.educateDashes(md)
inlineProcessor = InlineProcessor(md)
inlineProcessor.inlinePatterns = self.inlinePatterns
md.treeprocessors.add('smarty', inlineProcessor, '_end')
md.ESCAPED_CHARS.extend(['"', "'"])
def makeExtension(*args, **kwargs):
return SmartyExtension(*args, **kwargs)
| mit |
mgadi/naemonbox | sources/psdash/pyzmq-13.1.0/zmq/tests/test_context.py | 5 | 6745 | #-----------------------------------------------------------------------------
# Copyright (c) 2010-2012 Brian Granger, Min Ragan-Kelley
#
# This file is part of pyzmq
#
# Distributed under the terms of the New BSD License. The full license is in
# the file COPYING.BSD, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import gc
import sys
import time
from threading import Thread, Event
import zmq
from zmq.tests import (
BaseZMQTestCase, have_gevent, GreenTest, skip_green, PYPY, SkipTest,
)
#-----------------------------------------------------------------------------
# Tests
#-----------------------------------------------------------------------------
class TestContext(BaseZMQTestCase):
def test_init(self):
c1 = self.Context()
self.assert_(isinstance(c1, self.Context))
del c1
c2 = self.Context()
self.assert_(isinstance(c2, self.Context))
del c2
c3 = self.Context()
self.assert_(isinstance(c3, self.Context))
del c3
def test_dir(self):
ctx = self.Context()
self.assertTrue('socket' in dir(ctx))
if zmq.zmq_version_info() > (3,):
self.assertTrue('IO_THREADS' in dir(ctx))
ctx.term()
def test_term(self):
c = self.Context()
c.term()
self.assert_(c.closed)
def test_fail_init(self):
self.assertRaisesErrno(zmq.EINVAL, self.Context, -1)
def test_term_hang(self):
rep,req = self.create_bound_pair(zmq.ROUTER, zmq.DEALER)
req.setsockopt(zmq.LINGER, 0)
req.send(b'hello', copy=False)
req.close()
rep.close()
self.context.term()
def test_instance(self):
ctx = self.Context.instance()
c2 = self.Context.instance(io_threads=2)
self.assertTrue(c2 is ctx)
c2.term()
c3 = self.Context.instance()
c4 = self.Context.instance()
self.assertFalse(c3 is c2)
self.assertFalse(c3.closed)
self.assertTrue(c3 is c4)
def test_many_sockets(self):
"""opening and closing many sockets shouldn't cause problems"""
ctx = self.Context()
for i in range(16):
sockets = [ ctx.socket(zmq.REP) for i in range(65) ]
[ s.close() for s in sockets ]
# give the reaper a chance
time.sleep(1e-2)
ctx.term()
def test_sockopts(self):
"""setting socket options with ctx attributes"""
ctx = self.Context()
ctx.linger = 5
self.assertEqual(ctx.linger, 5)
s = ctx.socket(zmq.REQ)
self.assertEqual(s.linger, 5)
self.assertEqual(s.getsockopt(zmq.LINGER), 5)
s.close()
# check that subscribe doesn't get set on sockets that don't subscribe:
ctx.subscribe = b''
s = ctx.socket(zmq.REQ)
s.close()
ctx.term()
def test_destroy(self):
"""Context.destroy should close sockets"""
ctx = self.Context()
sockets = [ ctx.socket(zmq.REP) for i in range(65) ]
# close half of the sockets
[ s.close() for s in sockets[::2] ]
ctx.destroy()
# reaper is not instantaneous
time.sleep(1e-2)
for s in sockets:
self.assertTrue(s.closed)
def test_destroy_linger(self):
"""Context.destroy should set linger on closing sockets"""
req,rep = self.create_bound_pair(zmq.REQ, zmq.REP)
req.send(b'hi')
time.sleep(1e-2)
self.context.destroy(linger=0)
# reaper is not instantaneous
time.sleep(1e-2)
for s in (req,rep):
self.assertTrue(s.closed)
def test_term_noclose(self):
"""Context.term won't close sockets"""
ctx = self.Context()
s = ctx.socket(zmq.REQ)
self.assertFalse(s.closed)
t = Thread(target=ctx.term)
t.start()
t.join(timeout=0.1)
self.assertTrue(t.is_alive(), "Context should be waiting")
s.close()
t.join(timeout=0.1)
self.assertFalse(t.is_alive(), "Context should have closed")
def test_gc(self):
"""test close&term by garbage collection alone"""
if PYPY:
raise SkipTest("GC doesn't work ")
# test credit @dln (GH #137):
def gcf():
def inner():
ctx = self.Context()
s = ctx.socket(zmq.PUSH)
inner()
gc.collect()
t = Thread(target=gcf)
t.start()
t.join(timeout=1)
self.assertFalse(t.is_alive(), "Garbage collection should have cleaned up context")
def test_cyclic_destroy(self):
"""ctx.destroy should succeed when cyclic ref prevents gc"""
# test credit @dln (GH #137):
class CyclicReference(object):
def __init__(self, parent=None):
self.parent = parent
def crash(self, sock):
self.sock = sock
self.child = CyclicReference(self)
def crash_zmq():
ctx = self.Context()
sock = ctx.socket(zmq.PULL)
c = CyclicReference()
c.crash(sock)
ctx.destroy()
crash_zmq()
def test_term_thread(self):
"""ctx.term should not crash active threads (#139)"""
ctx = self.Context()
evt = Event()
evt.clear()
def block():
s = ctx.socket(zmq.REP)
s.bind_to_random_port('tcp://127.0.0.1')
evt.set()
try:
s.recv()
except zmq.ZMQError as e:
self.assertEqual(e.errno, zmq.ETERM)
return
finally:
s.close()
self.fail("recv should have been interrupted with ETERM")
t = Thread(target=block)
t.start()
evt.wait(1)
self.assertTrue(evt.is_set(), "sync event never fired")
time.sleep(0.01)
ctx.term()
t.join(timeout=1)
self.assertFalse(t.is_alive(), "term should have interrupted s.recv()")
if False: # disable green context tests
class TestContextGreen(GreenTest, TestContext):
"""gevent subclass of context tests"""
# skip tests that use real threads:
test_gc = GreenTest.skip_green
test_term_thread = GreenTest.skip_green
test_destroy_linger = GreenTest.skip_green
| gpl-2.0 |
jimi-c/ansible | lib/ansible/modules/network/netscaler/netscaler_cs_action.py | 72 | 9032 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2017 Citrix Systems
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: netscaler_cs_action
short_description: Manage content switching actions
description:
- Manage content switching actions
- This module is intended to run either on the ansible control node or a bastion (jumpserver) with access to the actual netscaler instance
version_added: "2.4.0"
author: George Nikolopoulos (@giorgos-nikolopoulos)
options:
name:
description:
- >-
Name for the content switching action. Must begin with an ASCII alphanumeric or underscore C(_)
character, and must contain only ASCII alphanumeric, underscore C(_), hash C(#), period C(.), space C( ), colon
C(:), at sign C(@), equal sign C(=), and hyphen C(-) characters. Can be changed after the content
switching action is created.
targetlbvserver:
description:
- "Name of the load balancing virtual server to which the content is switched."
targetvserver:
description:
- "Name of the VPN virtual server to which the content is switched."
targetvserverexpr:
description:
- "Information about this content switching action."
comment:
description:
- "Comments associated with this cs action."
extends_documentation_fragment: netscaler
requirements:
- nitro python sdk
'''
EXAMPLES = '''
# lb_vserver_1 must have been already created with the netscaler_lb_vserver module
- name: Configure netscaler content switching action
delegate_to: localhost
netscaler_cs_action:
nsip: 172.18.0.2
nitro_user: nsroot
nitro_pass: nsroot
validate_certs: no
state: present
name: action-1
targetlbvserver: lb_vserver_1
'''
RETURN = '''
loglines:
description: list of logged messages by the module
returned: always
type: list
sample: "['message 1', 'message 2']"
msg:
description: Message detailing the failure reason
returned: failure
type: string
sample: "Action does not exist"
diff:
description: List of differences between the actual configured object and the configuration specified in the module
returned: failure
type: dictionary
sample: "{ 'targetlbvserver': 'difference. ours: (str) server1 other: (str) server2' }"
'''
import json
try:
from nssrc.com.citrix.netscaler.nitro.resource.config.cs.csaction import csaction
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
PYTHON_SDK_IMPORTED = True
except ImportError as e:
PYTHON_SDK_IMPORTED = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.netscaler.netscaler import (
ConfigProxy,
get_nitro_client,
netscaler_common_arguments,
log, loglines,
ensure_feature_is_enabled,
get_immutables_intersection
)
def action_exists(client, module):
if csaction.count_filtered(client, 'name:%s' % module.params['name']) > 0:
return True
else:
return False
def action_identical(client, module, csaction_proxy):
if len(diff_list(client, module, csaction_proxy)) == 0:
return True
else:
return False
def diff_list(client, module, csaction_proxy):
action_list = csaction.get_filtered(client, 'name:%s' % module.params['name'])
diff_list = csaction_proxy.diff_object(action_list[0])
if False and 'targetvserverexpr' in diff_list:
json_value = json.loads(action_list[0].targetvserverexpr)
if json_value == module.params['targetvserverexpr']:
del diff_list['targetvserverexpr']
return diff_list
def main():
module_specific_arguments = dict(
name=dict(type='str'),
targetlbvserver=dict(type='str'),
targetvserverexpr=dict(type='str'),
comment=dict(type='str'),
)
argument_spec = dict()
argument_spec.update(netscaler_common_arguments)
argument_spec.update(module_specific_arguments)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
module_result = dict(
changed=False,
failed=False,
loglines=loglines
)
# Fail the module if imports failed
if not PYTHON_SDK_IMPORTED:
module.fail_json(msg='Could not load nitro python sdk')
# Fallthrough to rest of execution
client = get_nitro_client(module)
try:
client.login()
except nitro_exception as e:
msg = "nitro exception during login. errorcode=%s, message=%s" % (str(e.errorcode), e.message)
module.fail_json(msg=msg)
except Exception as e:
if str(type(e)) == "<class 'requests.exceptions.ConnectionError'>":
module.fail_json(msg='Connection error %s' % str(e))
elif str(type(e)) == "<class 'requests.exceptions.SSLError'>":
module.fail_json(msg='SSL Error %s' % str(e))
else:
module.fail_json(msg='Unexpected error during login %s' % str(e))
readwrite_attrs = [
'name',
'targetlbvserver',
'targetvserverexpr',
'comment',
]
readonly_attrs = [
'hits',
'referencecount',
'undefhits',
'builtin',
]
immutable_attrs = [
'name',
'targetvserverexpr',
]
transforms = {
}
# Instantiate config proxy
csaction_proxy = ConfigProxy(
actual=csaction(),
client=client,
attribute_values_dict=module.params,
readwrite_attrs=readwrite_attrs,
readonly_attrs=readonly_attrs,
immutable_attrs=immutable_attrs,
transforms=transforms,
)
try:
ensure_feature_is_enabled(client, 'CS')
# Apply appropriate state
if module.params['state'] == 'present':
log('Applying actions for state present')
if not action_exists(client, module):
if not module.check_mode:
csaction_proxy.add()
if module.params['save_config']:
client.save_config()
module_result['changed'] = True
elif not action_identical(client, module, csaction_proxy):
# Check if we try to change value of immutable attributes
immutables_changed = get_immutables_intersection(csaction_proxy, diff_list(client, module, csaction_proxy).keys())
if immutables_changed != []:
module.fail_json(
msg='Cannot update immutable attributes %s' % (immutables_changed,),
diff=diff_list(client, module, csaction_proxy),
**module_result
)
if not module.check_mode:
csaction_proxy.update()
if module.params['save_config']:
client.save_config()
module_result['changed'] = True
else:
module_result['changed'] = False
# Sanity check for state
log('Sanity checks for state present')
if not module.check_mode:
if not action_exists(client, module):
module.fail_json(msg='Content switching action does not exist', **module_result)
if not action_identical(client, module, csaction_proxy):
module.fail_json(
msg='Content switching action differs from configured',
diff=diff_list(client, module, csaction_proxy),
**module_result
)
elif module.params['state'] == 'absent':
log('Applying actions for state absent')
if action_exists(client, module):
if not module.check_mode:
csaction_proxy.delete()
if module.params['save_config']:
client.save_config()
module_result['changed'] = True
else:
module_result['changed'] = False
# Sanity check for state
if not module.check_mode:
log('Sanity checks for state absent')
if action_exists(client, module):
module.fail_json(msg='Content switching action still exists', **module_result)
except nitro_exception as e:
msg = "nitro exception errorcode=%s, message=%s" % (str(e.errorcode), e.message)
module.fail_json(msg=msg, **module_result)
client.logout()
module.exit_json(**module_result)
if __name__ == "__main__":
main()
| gpl-3.0 |
akash1808/nova | nova/tests/functional/v3/test_flavor_access.py | 29 | 4566 | # Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from nova.tests.functional.v3 import api_sample_base
CONF = cfg.CONF
CONF.import_opt('osapi_compute_extension',
'nova.api.openstack.compute.extensions')
class FlavorAccessSampleJsonTests(api_sample_base.ApiSampleTestBaseV3):
ADMIN_API = True
extension_name = 'flavor-access'
# TODO(Park): Overriding '_api_version' till all functional tests
# are merged between v2 and v2.1. After that base class variable
# itself can be changed to 'v2'
_api_version = 'v2'
def _get_flags(self):
f = super(FlavorAccessSampleJsonTests, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.'
'flavor_access.Flavor_access')
# FlavorAccess extension also needs Flavormanage to be loaded.
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.'
'flavormanage.Flavormanage')
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.'
'flavor_disabled.Flavor_disabled')
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.'
'flavorextradata.Flavorextradata')
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.'
'flavor_swap.Flavor_swap')
return f
def _add_tenant(self):
subs = {
'tenant_id': 'fake_tenant',
'flavor_id': 10,
}
response = self._do_post('flavors/10/action',
'flavor-access-add-tenant-req',
subs)
self._verify_response('flavor-access-add-tenant-resp',
subs, response, 200)
def _create_flavor(self):
subs = {
'flavor_id': 10,
'flavor_name': 'test_flavor'
}
response = self._do_post("flavors",
"flavor-access-create-req",
subs)
subs.update(self._get_regexes())
self._verify_response("flavor-access-create-resp", subs, response, 200)
def test_flavor_access_create(self):
self._create_flavor()
def test_flavor_access_detail(self):
response = self._do_get('flavors/detail')
subs = self._get_regexes()
self._verify_response('flavor-access-detail-resp', subs, response, 200)
def test_flavor_access_list(self):
self._create_flavor()
self._add_tenant()
flavor_id = 10
response = self._do_get('flavors/%s/os-flavor-access' % flavor_id)
subs = {
'flavor_id': flavor_id,
'tenant_id': 'fake_tenant',
}
self._verify_response('flavor-access-list-resp', subs, response, 200)
def test_flavor_access_show(self):
flavor_id = 1
response = self._do_get('flavors/%s' % flavor_id)
subs = {
'flavor_id': flavor_id
}
subs.update(self._get_regexes())
self._verify_response('flavor-access-show-resp', subs, response, 200)
def test_flavor_access_add_tenant(self):
self._create_flavor()
self._add_tenant()
def test_flavor_access_remove_tenant(self):
self._create_flavor()
self._add_tenant()
subs = {
'tenant_id': 'fake_tenant',
}
response = self._do_post('flavors/10/action',
"flavor-access-remove-tenant-req",
subs)
exp_subs = {
"tenant_id": self.api.project_id,
"flavor_id": "10"
}
self._verify_response('flavor-access-remove-tenant-resp',
exp_subs, response, 200)
| apache-2.0 |
wangjun/wakatime | wakatime/packages/pygments_py2/pygments/formatters/terminal.py | 76 | 5401 | # -*- coding: utf-8 -*-
"""
pygments.formatters.terminal
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Formatter for terminal output with ANSI sequences.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import sys
from pygments.formatter import Formatter
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Token, Whitespace
from pygments.console import ansiformat
from pygments.util import get_choice_opt
__all__ = ['TerminalFormatter']
#: Map token types to a tuple of color values for light and dark
#: backgrounds.
TERMINAL_COLORS = {
Token: ('', ''),
Whitespace: ('lightgray', 'darkgray'),
Comment: ('lightgray', 'darkgray'),
Comment.Preproc: ('teal', 'turquoise'),
Keyword: ('darkblue', 'blue'),
Keyword.Type: ('teal', 'turquoise'),
Operator.Word: ('purple', 'fuchsia'),
Name.Builtin: ('teal', 'turquoise'),
Name.Function: ('darkgreen', 'green'),
Name.Namespace: ('_teal_', '_turquoise_'),
Name.Class: ('_darkgreen_', '_green_'),
Name.Exception: ('teal', 'turquoise'),
Name.Decorator: ('darkgray', 'lightgray'),
Name.Variable: ('darkred', 'red'),
Name.Constant: ('darkred', 'red'),
Name.Attribute: ('teal', 'turquoise'),
Name.Tag: ('blue', 'blue'),
String: ('brown', 'brown'),
Number: ('darkblue', 'blue'),
Generic.Deleted: ('red', 'red'),
Generic.Inserted: ('darkgreen', 'green'),
Generic.Heading: ('**', '**'),
Generic.Subheading: ('*purple*', '*fuchsia*'),
Generic.Error: ('red', 'red'),
Error: ('_red_', '_red_'),
}
class TerminalFormatter(Formatter):
r"""
Format tokens with ANSI color sequences, for output in a text console.
Color sequences are terminated at newlines, so that paging the output
works correctly.
The `get_style_defs()` method doesn't do anything special since there is
no support for common styles.
Options accepted:
`bg`
Set to ``"light"`` or ``"dark"`` depending on the terminal's background
(default: ``"light"``).
`colorscheme`
A dictionary mapping token types to (lightbg, darkbg) color names or
``None`` (default: ``None`` = use builtin colorscheme).
`linenos`
Set to ``True`` to have line numbers on the terminal output as well
(default: ``False`` = no line numbers).
"""
name = 'Terminal'
aliases = ['terminal', 'console']
filenames = []
def __init__(self, **options):
Formatter.__init__(self, **options)
self.darkbg = get_choice_opt(options, 'bg',
['light', 'dark'], 'light') == 'dark'
self.colorscheme = options.get('colorscheme', None) or TERMINAL_COLORS
self.linenos = options.get('linenos', False)
self._lineno = 0
def format(self, tokensource, outfile):
# hack: if the output is a terminal and has an encoding set,
# use that to avoid unicode encode problems
if not self.encoding and hasattr(outfile, "encoding") and \
hasattr(outfile, "isatty") and outfile.isatty() and \
sys.version_info < (3,):
self.encoding = outfile.encoding
return Formatter.format(self, tokensource, outfile)
def _write_lineno(self, outfile):
self._lineno += 1
outfile.write("\n%04d: " % self._lineno)
def _format_unencoded_with_lineno(self, tokensource, outfile):
self._write_lineno(outfile)
for ttype, value in tokensource:
if value.endswith("\n"):
self._write_lineno(outfile)
value = value[:-1]
color = self.colorscheme.get(ttype)
while color is None:
ttype = ttype[:-1]
color = self.colorscheme.get(ttype)
if color:
color = color[self.darkbg]
spl = value.split('\n')
for line in spl[:-1]:
self._write_lineno(outfile)
if line:
outfile.write(ansiformat(color, line[:-1]))
if spl[-1]:
outfile.write(ansiformat(color, spl[-1]))
else:
outfile.write(value)
outfile.write("\n")
def format_unencoded(self, tokensource, outfile):
if self.linenos:
self._format_unencoded_with_lineno(tokensource, outfile)
return
for ttype, value in tokensource:
color = self.colorscheme.get(ttype)
while color is None:
ttype = ttype[:-1]
color = self.colorscheme.get(ttype)
if color:
color = color[self.darkbg]
spl = value.split('\n')
for line in spl[:-1]:
if line:
outfile.write(ansiformat(color, line))
outfile.write('\n')
if spl[-1]:
outfile.write(ansiformat(color, spl[-1]))
else:
outfile.write(value)
| bsd-3-clause |
DanielSBrown/osf.io | scripts/migration/migrate_mailing_lists_to_mailchimp_field.py | 40 | 1043 | """
Used to transfer over subscriptions current users might have from their mailing_list field (which is to be deprecated),
to the new mailchimp_mailing_lists field. After that is done, to clean-up, remove mailing_lists as a User field.
"""
import logging
import sys
from website import models
from website.app import init_app
from modularodm import Q
logger = logging.getLogger(__name__)
def main():
init_app(routes=False)
dry_run = 'dry' in sys.argv
logger.warn('Users will have "mailchimp_mailing_lists" updated from deprecated field "mailing_lists" value')
if dry_run:
logger.warn('Dry_run mode')
for user in get_users_needing_mailing_lists_update():
logger.info('User {0} "mailchimp_mailing_lists" updated'.format(user.username))
if not dry_run:
user.mailchimp_mailing_lists = user.mailing_lists
user.save()
def get_users_needing_mailing_lists_update():
return models.User.find(
Q('mailing_lists', 'ne', {})
)
if __name__ == '__main__':
main()
| apache-2.0 |
pombreda/django-hotclub | apps/local_apps/account/templatetags/other_service_tags.py | 12 | 1234 | import re
from django import template
from account.models import other_service
register = template.Library()
class OtherServiceNode(template.Node):
def __init__(self, user, key, asvar):
self.user = user
self.key = key
self.asvar = asvar
def render(self, context):
user = self.user.resolve(context)
key = self.key
value = other_service(user, key)
if self.asvar:
context[self.asvar] = value
return ''
else:
return value
@register.tag(name='other_service')
def other_service_tag(parser, token):
bits = token.split_contents()
if len(bits) == 3: # {% other_service user key %}
user = parser.compile_filter(bits[1])
key = bits[2]
asvar = None
elif len(bits) == 5: # {% other_service user key as var %}
if bits[3] != "as":
raise template.TemplateSyntaxError("3rd argument to %s should be 'as'" % bits[0])
user = parser.compile_filter(bits[1])
key = bits[2]
asvar = bits[4]
else:
raise template.TemplateSyntaxError("wrong number of arguments to %s" % bits[0])
return OtherServiceNode(user, key, asvar)
| mit |
RO-ny9/python-for-android | python3-alpha/extra_modules/gdata/contacts/service.py | 120 | 17345 | #!/usr/bin/env python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""ContactsService extends the GDataService for Google Contacts operations.
ContactsService: Provides methods to query feeds and manipulate items.
Extends GDataService.
DictionaryToParamList: Function which converts a dictionary into a list of
URL arguments (represented as strings). This is a
utility function used in CRUD operations.
"""
__author__ = 'dbrattli (Dag Brattli)'
import gdata
import gdata.calendar
import gdata.service
DEFAULT_BATCH_URL = ('http://www.google.com/m8/feeds/contacts/default/full'
'/batch')
DEFAULT_PROFILES_BATCH_URL = ('http://www.google.com'
'/m8/feeds/profiles/default/full/batch')
GDATA_VER_HEADER = 'GData-Version'
class Error(Exception):
pass
class RequestError(Error):
pass
class ContactsService(gdata.service.GDataService):
"""Client for the Google Contacts service."""
def __init__(self, email=None, password=None, source=None,
server='www.google.com', additional_headers=None,
contact_list='default', **kwargs):
"""Creates a client for the Contacts service.
Args:
email: string (optional) The user's email address, used for
authentication.
password: string (optional) The user's password.
source: string (optional) The name of the user's application.
server: string (optional) The name of the server to which a connection
will be opened. Default value: 'www.google.com'.
contact_list: string (optional) The name of the default contact list to
use when no URI is specified to the methods of the service.
Default value: 'default' (the logged in user's contact list).
**kwargs: The other parameters to pass to gdata.service.GDataService
constructor.
"""
self.contact_list = contact_list
gdata.service.GDataService.__init__(
self, email=email, password=password, service='cp', source=source,
server=server, additional_headers=additional_headers, **kwargs)
def GetFeedUri(self, kind='contacts', contact_list=None, projection='full',
scheme=None):
"""Builds a feed URI.
Args:
kind: The type of feed to return, typically 'groups' or 'contacts'.
Default value: 'contacts'.
contact_list: The contact list to return a feed for.
Default value: self.contact_list.
projection: The projection to apply to the feed contents, for example
'full', 'base', 'base/12345', 'full/batch'. Default value: 'full'.
scheme: The URL scheme such as 'http' or 'https', None to return a
relative URI without hostname.
Returns:
A feed URI using the given kind, contact list, and projection.
Example: '/m8/feeds/contacts/default/full'.
"""
contact_list = contact_list or self.contact_list
if kind == 'profiles':
contact_list = 'domain/%s' % contact_list
prefix = scheme and '%s://%s' % (scheme, self.server) or ''
return '%s/m8/feeds/%s/%s/%s' % (prefix, kind, contact_list, projection)
def GetContactsFeed(self, uri=None):
uri = uri or self.GetFeedUri()
return self.Get(uri, converter=gdata.contacts.ContactsFeedFromString)
def GetContact(self, uri):
return self.Get(uri, converter=gdata.contacts.ContactEntryFromString)
def CreateContact(self, new_contact, insert_uri=None, url_params=None,
escape_params=True):
"""Adds an new contact to Google Contacts.
Args:
new_contact: atom.Entry or subclass A new contact which is to be added to
Google Contacts.
insert_uri: the URL to post new contacts to the feed
url_params: dict (optional) Additional URL parameters to be included
in the insertion request.
escape_params: boolean (optional) If true, the url_parameters will be
escaped before they are included in the request.
Returns:
On successful insert, an entry containing the contact created
On failure, a RequestError is raised of the form:
{'status': HTTP status code from server,
'reason': HTTP reason from the server,
'body': HTTP body of the server's response}
"""
insert_uri = insert_uri or self.GetFeedUri()
return self.Post(new_contact, insert_uri, url_params=url_params,
escape_params=escape_params,
converter=gdata.contacts.ContactEntryFromString)
def UpdateContact(self, edit_uri, updated_contact, url_params=None,
escape_params=True):
"""Updates an existing contact.
Args:
edit_uri: string The edit link URI for the element being updated
updated_contact: string, atom.Entry or subclass containing
the Atom Entry which will replace the contact which is
stored at the edit_url
url_params: dict (optional) Additional URL parameters to be included
in the update request.
escape_params: boolean (optional) If true, the url_parameters will be
escaped before they are included in the request.
Returns:
On successful update, a httplib.HTTPResponse containing the server's
response to the PUT request.
On failure, a RequestError is raised of the form:
{'status': HTTP status code from server,
'reason': HTTP reason from the server,
'body': HTTP body of the server's response}
"""
return self.Put(updated_contact, self._CleanUri(edit_uri),
url_params=url_params,
escape_params=escape_params,
converter=gdata.contacts.ContactEntryFromString)
def DeleteContact(self, edit_uri, extra_headers=None,
url_params=None, escape_params=True):
"""Removes an contact with the specified ID from Google Contacts.
Args:
edit_uri: string The edit URL of the entry to be deleted. Example:
'/m8/feeds/contacts/default/full/xxx/yyy'
url_params: dict (optional) Additional URL parameters to be included
in the deletion request.
escape_params: boolean (optional) If true, the url_parameters will be
escaped before they are included in the request.
Returns:
On successful delete, a httplib.HTTPResponse containing the server's
response to the DELETE request.
On failure, a RequestError is raised of the form:
{'status': HTTP status code from server,
'reason': HTTP reason from the server,
'body': HTTP body of the server's response}
"""
return self.Delete(self._CleanUri(edit_uri),
url_params=url_params, escape_params=escape_params)
def GetGroupsFeed(self, uri=None):
uri = uri or self.GetFeedUri('groups')
return self.Get(uri, converter=gdata.contacts.GroupsFeedFromString)
def CreateGroup(self, new_group, insert_uri=None, url_params=None,
escape_params=True):
insert_uri = insert_uri or self.GetFeedUri('groups')
return self.Post(new_group, insert_uri, url_params=url_params,
escape_params=escape_params,
converter=gdata.contacts.GroupEntryFromString)
def UpdateGroup(self, edit_uri, updated_group, url_params=None,
escape_params=True):
return self.Put(updated_group, self._CleanUri(edit_uri),
url_params=url_params,
escape_params=escape_params,
converter=gdata.contacts.GroupEntryFromString)
def DeleteGroup(self, edit_uri, extra_headers=None,
url_params=None, escape_params=True):
return self.Delete(self._CleanUri(edit_uri),
url_params=url_params, escape_params=escape_params)
def ChangePhoto(self, media, contact_entry_or_url, content_type=None,
content_length=None):
"""Change the photo for the contact by uploading a new photo.
Performs a PUT against the photo edit URL to send the binary data for the
photo.
Args:
media: filename, file-like-object, or a gdata.MediaSource object to send.
contact_entry_or_url: ContactEntry or str If it is a ContactEntry, this
method will search for an edit photo link URL and
perform a PUT to the URL.
content_type: str (optional) the mime type for the photo data. This is
necessary if media is a file or file name, but if media
is a MediaSource object then the media object can contain
the mime type. If media_type is set, it will override the
mime type in the media object.
content_length: int or str (optional) Specifying the content length is
only required if media is a file-like object. If media
is a filename, the length is determined using
os.path.getsize. If media is a MediaSource object, it is
assumed that it already contains the content length.
"""
if isinstance(contact_entry_or_url, gdata.contacts.ContactEntry):
url = contact_entry_or_url.GetPhotoEditLink().href
else:
url = contact_entry_or_url
if isinstance(media, gdata.MediaSource):
payload = media
# If the media object is a file-like object, then use it as the file
# handle in the in the MediaSource.
elif hasattr(media, 'read'):
payload = gdata.MediaSource(file_handle=media,
content_type=content_type, content_length=content_length)
# Assume that the media object is a file name.
else:
payload = gdata.MediaSource(content_type=content_type,
content_length=content_length, file_path=media)
return self.Put(payload, url)
def GetPhoto(self, contact_entry_or_url):
"""Retrives the binary data for the contact's profile photo as a string.
Args:
contact_entry_or_url: a gdata.contacts.ContactEntry objecr or a string
containing the photo link's URL. If the contact entry does not
contain a photo link, the image will not be fetched and this method
will return None.
"""
# TODO: add the ability to write out the binary image data to a file,
# reading and writing a chunk at a time to avoid potentially using up
# large amounts of memory.
url = None
if isinstance(contact_entry_or_url, gdata.contacts.ContactEntry):
photo_link = contact_entry_or_url.GetPhotoLink()
if photo_link:
url = photo_link.href
else:
url = contact_entry_or_url
if url:
return self.Get(url, converter=str)
else:
return None
def DeletePhoto(self, contact_entry_or_url):
url = None
if isinstance(contact_entry_or_url, gdata.contacts.ContactEntry):
url = contact_entry_or_url.GetPhotoEditLink().href
else:
url = contact_entry_or_url
if url:
self.Delete(url)
def GetProfilesFeed(self, uri=None):
"""Retrieves a feed containing all domain's profiles.
Args:
uri: string (optional) the URL to retrieve the profiles feed,
for example /m8/feeds/profiles/default/full
Returns:
On success, a ProfilesFeed containing the profiles.
On failure, raises a RequestError.
"""
uri = uri or self.GetFeedUri('profiles')
return self.Get(uri,
converter=gdata.contacts.ProfilesFeedFromString)
def GetProfile(self, uri):
"""Retrieves a domain's profile for the user.
Args:
uri: string the URL to retrieve the profiles feed,
for example /m8/feeds/profiles/default/full/username
Returns:
On success, a ProfileEntry containing the profile for the user.
On failure, raises a RequestError
"""
return self.Get(uri,
converter=gdata.contacts.ProfileEntryFromString)
def UpdateProfile(self, edit_uri, updated_profile, url_params=None,
escape_params=True):
"""Updates an existing profile.
Args:
edit_uri: string The edit link URI for the element being updated
updated_profile: string atom.Entry or subclass containing
the Atom Entry which will replace the profile which is
stored at the edit_url.
url_params: dict (optional) Additional URL parameters to be included
in the update request.
escape_params: boolean (optional) If true, the url_params will be
escaped before they are included in the request.
Returns:
On successful update, a httplib.HTTPResponse containing the server's
response to the PUT request.
On failure, raises a RequestError.
"""
return self.Put(updated_profile, self._CleanUri(edit_uri),
url_params=url_params, escape_params=escape_params,
converter=gdata.contacts.ProfileEntryFromString)
def ExecuteBatch(self, batch_feed, url,
converter=gdata.contacts.ContactsFeedFromString):
"""Sends a batch request feed to the server.
Args:
batch_feed: gdata.contacts.ContactFeed A feed containing batch
request entries. Each entry contains the operation to be performed
on the data contained in the entry. For example an entry with an
operation type of insert will be used as if the individual entry
had been inserted.
url: str The batch URL to which these operations should be applied.
converter: Function (optional) The function used to convert the server's
response to an object. The default value is ContactsFeedFromString.
Returns:
The results of the batch request's execution on the server. If the
default converter is used, this is stored in a ContactsFeed.
"""
return self.Post(batch_feed, url, converter=converter)
def ExecuteBatchProfiles(self, batch_feed, url,
converter=gdata.contacts.ProfilesFeedFromString):
"""Sends a batch request feed to the server.
Args:
batch_feed: gdata.profiles.ProfilesFeed A feed containing batch
request entries. Each entry contains the operation to be performed
on the data contained in the entry. For example an entry with an
operation type of insert will be used as if the individual entry
had been inserted.
url: string The batch URL to which these operations should be applied.
converter: Function (optional) The function used to convert the server's
response to an object. The default value is
gdata.profiles.ProfilesFeedFromString.
Returns:
The results of the batch request's execution on the server. If the
default converter is used, this is stored in a ProfilesFeed.
"""
return self.Post(batch_feed, url, converter=converter)
def _CleanUri(self, uri):
"""Sanitizes a feed URI.
Args:
uri: The URI to sanitize, can be relative or absolute.
Returns:
The given URI without its http://server prefix, if any.
Keeps the leading slash of the URI.
"""
url_prefix = 'http://%s' % self.server
if uri.startswith(url_prefix):
uri = uri[len(url_prefix):]
return uri
class ContactsQuery(gdata.service.Query):
def __init__(self, feed=None, text_query=None, params=None,
categories=None, group=None):
self.feed = feed or '/m8/feeds/contacts/default/full'
if group:
self._SetGroup(group)
gdata.service.Query.__init__(self, feed=self.feed, text_query=text_query,
params=params, categories=categories)
def _GetGroup(self):
if 'group' in self:
return self['group']
else:
return None
def _SetGroup(self, group_id):
self['group'] = group_id
group = property(_GetGroup, _SetGroup,
doc='The group query parameter to find only contacts in this group')
class GroupsQuery(gdata.service.Query):
def __init__(self, feed=None, text_query=None, params=None,
categories=None):
self.feed = feed or '/m8/feeds/groups/default/full'
gdata.service.Query.__init__(self, feed=self.feed, text_query=text_query,
params=params, categories=categories)
class ProfilesQuery(gdata.service.Query):
"""Constructs a query object for the profiles feed."""
def __init__(self, feed=None, text_query=None, params=None,
categories=None):
self.feed = feed or '/m8/feeds/profiles/default/full'
gdata.service.Query.__init__(self, feed=self.feed, text_query=text_query,
params=params, categories=categories)
| apache-2.0 |
40223136/w11-2 | static/Brython3.1.0-20150301-090019/Lib/multiprocessing/process.py | 694 | 2304 | #
# Module providing the `Process` class which emulates `threading.Thread`
#
# multiprocessing/process.py
#
# Copyright (c) 2006-2008, R Oudkerk
# Licensed to PSF under a Contributor Agreement.
#
__all__ = ['Process', 'current_process', 'active_children']
#
# Imports
#
import os
import sys
import signal
import itertools
from _weakrefset import WeakSet
#for brython
from _multiprocessing import Process
#
#
#
try:
ORIGINAL_DIR = os.path.abspath(os.getcwd())
except OSError:
ORIGINAL_DIR = None
#
# Public functions
#
def current_process():
'''
Return process object representing the current process
'''
return _current_process
def active_children():
'''
Return list of process objects corresponding to live child processes
'''
_cleanup()
return list(_current_process._children)
#
#
#
def _cleanup():
# check for processes which have finished
for p in list(_current_process._children):
if p._popen.poll() is not None:
_current_process._children.discard(p)
#
# The `Process` class
#
# brython note: class Process is defined in /usr/libs/_multiprocessing.js
#
# We subclass bytes to avoid accidental transmission of auth keys over network
#
class AuthenticationString(bytes):
def __reduce__(self):
from .forking import Popen
if not Popen.thread_is_spawning():
raise TypeError(
'Pickling an AuthenticationString object is '
'disallowed for security reasons'
)
return AuthenticationString, (bytes(self),)
#
# Create object representing the main process
#
class _MainProcess(Process):
def __init__(self):
self._identity = ()
self._daemonic = False
self._name = 'MainProcess'
self._parent_pid = None
self._popen = None
self._counter = itertools.count(1)
self._children = set()
self._authkey = AuthenticationString(os.urandom(32))
self._tempdir = None
_current_process = _MainProcess()
del _MainProcess
#
# Give names to some return codes
#
_exitcode_to_name = {}
for name, signum in list(signal.__dict__.items()):
if name[:3]=='SIG' and '_' not in name:
_exitcode_to_name[-signum] = name
# For debug and leak testing
_dangling = WeakSet()
| gpl-3.0 |
cherusk/ansible | lib/ansible/plugins/cache/jsonfile.py | 36 | 1681 | # (c) 2014, Brian Coca, Josh Drake, et al
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
'''
DOCUMENTATION:
cache: jsonfile
short_description: File backed, JSON formated.
description:
- File backed cache that uses JSON as a format, the files are per host.
version_added: "1.9"
author: Brian Coca (@bcoca)
'''
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import codecs
try:
import simplejson as json
except ImportError:
import json
from ansible.parsing.utils.jsonify import jsonify
from ansible.plugins.cache import BaseFileCacheModule
class CacheModule(BaseFileCacheModule):
"""
A caching module backed by json files.
"""
def _load(self, filepath):
# Valid JSON is always UTF-8 encoded.
with codecs.open(filepath, 'r', encoding='utf-8') as f:
return json.load(f)
def _dump(self, value, filepath):
with codecs.open(filepath, 'w', encoding='utf-8') as f:
f.write(jsonify(value, format=True))
| gpl-3.0 |
openregister/openregister-python | openregister/item.py | 2 | 1777 | from copy import copy
from .datatypes.digest import git_hash, base32_encode
class Item(object):
"""An Item, a content addressable set of attributes."""
def __init__(self, **kwds):
self.__dict__.update(kwds)
def __getitem__(self, key, default=None):
try:
return self.__dict__[key]
except KeyError:
return default
def __setitem__(self, key, value):
if not value:
if key in self.__dict__:
self.__dict__.__delitem__(key)
return None
self.__dict__[key] = value
return value
def get(self, key, default=None):
return self.__getitem__(key, default)
def set(self, key, value):
return self.__setitem__(key, value)
@property
def hash(self):
"""The git hash-object value of for the Item."""
return git_hash(self.json.encode("utf-8"))
@property
def hashkey(self):
"""The hash value as a RFC 3548 Base 32 encoded string."""
return base32_encode(self.hash)
@property
def keys(self):
return sorted(list(self.primitive.keys()))
@property
def values(self):
return (self.__dict__[key] for key in self.keys)
@property
def primitive(self):
"""Python primitive representation."""
dict = {}
for key, value in self.__dict__.items():
if not key.startswith('_'):
dict[key] = copy(value)
for key in dict:
if isinstance(dict[key], (set)):
dict[key] = sorted(list(dict[key]))
return dict
@primitive.setter
def primitive(self, dictionary):
"""Item from Python primitive."""
self.__dict__ = {k: v for k, v in dictionary.items() if v}
| mit |
rversteegen/commandergenius | project/jni/python/src/Demo/pdist/makechangelog.py | 43 | 2989 | #! /usr/bin/env python
"""Turn a pile of RCS log output into ChangeLog file entries.
"""
import sys
import string
import re
import getopt
import time
def main():
args = sys.argv[1:]
opts, args = getopt.getopt(args, 'p:')
prefix = ''
for o, a in opts:
if p == '-p': prefix = a
f = sys.stdin
allrevs = []
while 1:
file = getnextfile(f)
if not file: break
revs = []
while 1:
rev = getnextrev(f, file)
if not rev:
break
revs.append(rev)
if revs:
allrevs[len(allrevs):] = revs
allrevs.sort()
allrevs.reverse()
for rev in allrevs:
formatrev(rev, prefix)
parsedateprog = re.compile(
'^date: ([0-9]+)/([0-9]+)/([0-9]+) ' +
'([0-9]+):([0-9]+):([0-9]+); author: ([^ ;]+)')
authormap = {
'guido': 'Guido van Rossum <[email protected]>',
'jack': 'Jack Jansen <[email protected]>',
'sjoerd': 'Sjoerd Mullender <[email protected]>',
}
def formatrev(rev, prefix):
dateline, file, revline, log = rev
if parsedateprog.match(dateline) >= 0:
fields = parsedateprog.group(1, 2, 3, 4, 5, 6)
author = parsedateprog.group(7)
if authormap.has_key(author): author = authormap[author]
tfields = map(string.atoi, fields) + [0, 0, 0]
tfields[5] = tfields[5] - time.timezone
t = time.mktime(tuple(tfields))
print time.ctime(t), '', author
words = string.split(log)
words[:0] = ['*', prefix + file + ':']
maxcol = 72-8
col = maxcol
for word in words:
if col > 0 and col + len(word) >= maxcol:
print
print '\t' + word,
col = -1
else:
print word,
col = col + 1 + len(word)
print
print
startprog = re.compile("^Working file: (.*)$")
def getnextfile(f):
while 1:
line = f.readline()
if not line: return None
if startprog.match(line) >= 0:
file = startprog.group(1)
# Skip until first revision
while 1:
line = f.readline()
if not line: return None
if line[:10] == '='*10: return None
if line[:10] == '-'*10: break
## print "Skipped", line,
return file
## else:
## print "Ignored", line,
def getnextrev(f, file):
# This is called when we are positioned just after a '---' separator
revline = f.readline()
dateline = f.readline()
log = ''
while 1:
line = f.readline()
if not line: break
if line[:10] == '='*10:
# Ignore the *last* log entry for each file since it
# is the revision since which we are logging.
return None
if line[:10] == '-'*10: break
log = log + line
return dateline, file, revline, log
if __name__ == '__main__':
main()
| lgpl-2.1 |
2014c2g9/c2g9 | exts/wsgi/static/Brython2.1.0-20140419-113919/Lib/atexit.py | 743 | 1049 | """allow programmer to define multiple exit functions to be executedupon normal program termination.
Two public functions, register and unregister, are defined.
"""
class __loader__(object):
pass
def _clear(*args,**kw):
"""_clear() -> None
Clear the list of previously registered exit functions."""
pass
def _run_exitfuncs(*args,**kw):
"""_run_exitfuncs() -> None
Run all registered exit functions."""
pass
def register(*args,**kw):
"""register(func, *args, **kwargs) -> func
Register a function to be executed upon normal program termination
func - function to be called at exit
args - optional arguments to pass to func
kwargs - optional keyword arguments to pass to func
func is returned to facilitate usage as a decorator."""
pass
def unregister(*args,**kw):
"""unregister(func) -> None
Unregister a exit function which was previously registered using
atexit.register
func - function to be unregistered"""
pass
| gpl-2.0 |
edum1978/eduengage | boilerplate/external/babel/messages/tests/checkers.py | 31 | 12764 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2008 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://babel.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://babel.edgewall.org/log/.
from datetime import datetime
import time
import unittest
from StringIO import StringIO
from babel import __version__ as VERSION
from babel.core import Locale, UnknownLocaleError
from babel.dates import format_datetime
from babel.messages import checkers
from babel.messages.plurals import PLURALS
from babel.messages.pofile import read_po
from babel.util import LOCALTZ
class CheckersTestCase(unittest.TestCase):
# the last msgstr[idx] is always missing except for singular plural forms
def test_1_num_plurals_checkers(self):
for _locale in [p for p in PLURALS if PLURALS[p][0] == 1]:
try:
locale = Locale.parse(_locale)
except UnknownLocaleError:
# Just an alias? Not what we're testing here, let's continue
continue
po_file = (ur"""\
# %(english_name)s translations for TestProject.
# Copyright (C) 2007 FooBar, Inc.
# This file is distributed under the same license as the TestProject
# project.
# FIRST AUTHOR <EMAIL@ADDRESS>, 2007.
#
msgid ""
msgstr ""
"Project-Id-Version: TestProject 0.1\n"
"Report-Msgid-Bugs-To: [email protected]\n"
"POT-Creation-Date: 2007-04-01 15:30+0200\n"
"PO-Revision-Date: %(date)s\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: %(locale)s <[email protected]>\n"
"Plural-Forms: nplurals=%(num_plurals)s; plural=%(plural_expr)s\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel %(version)s\n"
#. This will be a translator comment,
#. that will include several lines
#: project/file1.py:8
msgid "bar"
msgstr ""
#: project/file2.py:9
msgid "foobar"
msgid_plural "foobars"
msgstr[0] ""
""" % dict(locale = _locale,
english_name = locale.english_name,
version = VERSION,
year = time.strftime('%Y'),
date = format_datetime(datetime.now(LOCALTZ),
'yyyy-MM-dd HH:mmZ',
tzinfo=LOCALTZ, locale=_locale),
num_plurals = PLURALS[_locale][0],
plural_expr = PLURALS[_locale][0])).encode('utf-8')
# This test will fail for revisions <= 406 because so far
# catalog.num_plurals was neglected
catalog = read_po(StringIO(po_file), _locale)
message = catalog['foobar']
checkers.num_plurals(catalog, message)
def test_2_num_plurals_checkers(self):
# in this testcase we add an extra msgstr[idx], we should be
# disregarding it
for _locale in [p for p in PLURALS if PLURALS[p][0] == 2]:
if _locale in ['nn', 'no']:
_locale = 'nn_NO'
num_plurals = PLURALS[_locale.split('_')[0]][0]
plural_expr = PLURALS[_locale.split('_')[0]][1]
else:
num_plurals = PLURALS[_locale][0]
plural_expr = PLURALS[_locale][1]
try:
locale = Locale(_locale)
date = format_datetime(datetime.now(LOCALTZ),
'yyyy-MM-dd HH:mmZ',
tzinfo=LOCALTZ, locale=_locale)
except UnknownLocaleError:
# Just an alias? Not what we're testing here, let's continue
continue
po_file = (ur"""\
# %(english_name)s translations for TestProject.
# Copyright (C) 2007 FooBar, Inc.
# This file is distributed under the same license as the TestProject
# project.
# FIRST AUTHOR <EMAIL@ADDRESS>, 2007.
#
msgid ""
msgstr ""
"Project-Id-Version: TestProject 0.1\n"
"Report-Msgid-Bugs-To: [email protected]\n"
"POT-Creation-Date: 2007-04-01 15:30+0200\n"
"PO-Revision-Date: %(date)s\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: %(locale)s <[email protected]>\n"
"Plural-Forms: nplurals=%(num_plurals)s; plural=%(plural_expr)s\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel %(version)s\n"
#. This will be a translator comment,
#. that will include several lines
#: project/file1.py:8
msgid "bar"
msgstr ""
#: project/file2.py:9
msgid "foobar"
msgid_plural "foobars"
msgstr[0] ""
msgstr[1] ""
msgstr[2] ""
""" % dict(locale = _locale,
english_name = locale.english_name,
version = VERSION,
year = time.strftime('%Y'),
date = date,
num_plurals = num_plurals,
plural_expr = plural_expr)).encode('utf-8')
# we should be adding the missing msgstr[0]
# This test will fail for revisions <= 406 because so far
# catalog.num_plurals was neglected
catalog = read_po(StringIO(po_file), _locale)
message = catalog['foobar']
checkers.num_plurals(catalog, message)
def test_3_num_plurals_checkers(self):
for _locale in [p for p in PLURALS if PLURALS[p][0] == 3]:
po_file = r"""\
# %(english_name)s translations for TestProject.
# Copyright (C) 2007 FooBar, Inc.
# This file is distributed under the same license as the TestProject
# project.
# FIRST AUTHOR <EMAIL@ADDRESS>, 2007.
#
msgid ""
msgstr ""
"Project-Id-Version: TestProject 0.1\n"
"Report-Msgid-Bugs-To: [email protected]\n"
"POT-Creation-Date: 2007-04-01 15:30+0200\n"
"PO-Revision-Date: %(date)s\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: %(locale)s <[email protected]>\n"
"Plural-Forms: nplurals=%(num_plurals)s; plural=%(plural_expr)s\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel %(version)s\n"
#. This will be a translator comment,
#. that will include several lines
#: project/file1.py:8
msgid "bar"
msgstr ""
#: project/file2.py:9
msgid "foobar"
msgid_plural "foobars"
msgstr[0] ""
msgstr[1] ""
""" % dict(locale = _locale,
english_name = Locale.parse(_locale).english_name,
version = VERSION,
year = time.strftime('%Y'),
date = format_datetime(datetime.now(LOCALTZ),
'yyyy-MM-dd HH:mmZ',
tzinfo=LOCALTZ, locale=_locale),
num_plurals = PLURALS[_locale][0],
plural_expr = PLURALS[_locale][0])
# This test will fail for revisions <= 406 because so far
# catalog.num_plurals was neglected
catalog = read_po(StringIO(po_file), _locale)
message = catalog['foobar']
checkers.num_plurals(catalog, message)
def test_4_num_plurals_checkers(self):
for _locale in [p for p in PLURALS if PLURALS[p][0] == 4]:
po_file = r"""\
# %(english_name)s translations for TestProject.
# Copyright (C) 2007 FooBar, Inc.
# This file is distributed under the same license as the TestProject
# project.
# FIRST AUTHOR <EMAIL@ADDRESS>, 2007.
#
msgid ""
msgstr ""
"Project-Id-Version: TestProject 0.1\n"
"Report-Msgid-Bugs-To: [email protected]\n"
"POT-Creation-Date: 2007-04-01 15:30+0200\n"
"PO-Revision-Date: %(date)s\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: %(locale)s <[email protected]>\n"
"Plural-Forms: nplurals=%(num_plurals)s; plural=%(plural_expr)s\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel %(version)s\n"
#. This will be a translator comment,
#. that will include several lines
#: project/file1.py:8
msgid "bar"
msgstr ""
#: project/file2.py:9
msgid "foobar"
msgid_plural "foobars"
msgstr[0] ""
msgstr[1] ""
msgstr[2] ""
""" % dict(locale = _locale,
english_name = Locale.parse(_locale).english_name,
version = VERSION,
year = time.strftime('%Y'),
date = format_datetime(datetime.now(LOCALTZ),
'yyyy-MM-dd HH:mmZ',
tzinfo=LOCALTZ, locale=_locale),
num_plurals = PLURALS[_locale][0],
plural_expr = PLURALS[_locale][0])
# This test will fail for revisions <= 406 because so far
# catalog.num_plurals was neglected
catalog = read_po(StringIO(po_file), _locale)
message = catalog['foobar']
checkers.num_plurals(catalog, message)
def test_5_num_plurals_checkers(self):
for _locale in [p for p in PLURALS if PLURALS[p][0] == 5]:
po_file = r"""\
# %(english_name)s translations for TestProject.
# Copyright (C) 2007 FooBar, Inc.
# This file is distributed under the same license as the TestProject
# project.
# FIRST AUTHOR <EMAIL@ADDRESS>, 2007.
#
msgid ""
msgstr ""
"Project-Id-Version: TestProject 0.1\n"
"Report-Msgid-Bugs-To: [email protected]\n"
"POT-Creation-Date: 2007-04-01 15:30+0200\n"
"PO-Revision-Date: %(date)s\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: %(locale)s <[email protected]>\n"
"Plural-Forms: nplurals=%(num_plurals)s; plural=%(plural_expr)s\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel %(version)s\n"
#. This will be a translator comment,
#. that will include several lines
#: project/file1.py:8
msgid "bar"
msgstr ""
#: project/file2.py:9
msgid "foobar"
msgid_plural "foobars"
msgstr[0] ""
msgstr[1] ""
msgstr[2] ""
msgstr[3] ""
""" % dict(locale = _locale,
english_name = Locale.parse(_locale).english_name,
version = VERSION,
year = time.strftime('%Y'),
date = format_datetime(datetime.now(LOCALTZ),
'yyyy-MM-dd HH:mmZ',
tzinfo=LOCALTZ, locale=_locale),
num_plurals = PLURALS[_locale][0],
plural_expr = PLURALS[_locale][0])
# This test will fail for revisions <= 406 because so far
# catalog.num_plurals was neglected
catalog = read_po(StringIO(po_file), _locale)
message = catalog['foobar']
checkers.num_plurals(catalog, message)
def test_6_num_plurals_checkers(self):
for _locale in [p for p in PLURALS if PLURALS[p][0] == 6]:
po_file = r"""\
# %(english_name)s translations for TestProject.
# Copyright (C) 2007 FooBar, Inc.
# This file is distributed under the same license as the TestProject
# project.
# FIRST AUTHOR <EMAIL@ADDRESS>, 2007.
#
msgid ""
msgstr ""
"Project-Id-Version: TestProject 0.1\n"
"Report-Msgid-Bugs-To: [email protected]\n"
"POT-Creation-Date: 2007-04-01 15:30+0200\n"
"PO-Revision-Date: %(date)s\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: %(locale)s <[email protected]>\n"
"Plural-Forms: nplurals=%(num_plurals)s; plural=%(plural_expr)s\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel %(version)s\n"
#. This will be a translator comment,
#. that will include several lines
#: project/file1.py:8
msgid "bar"
msgstr ""
#: project/file2.py:9
msgid "foobar"
msgid_plural "foobars"
msgstr[0] ""
msgstr[1] ""
msgstr[2] ""
msgstr[3] ""
msgstr[4] ""
""" % dict(locale = _locale,
english_name = Locale.parse(_locale).english_name,
version = VERSION,
year = time.strftime('%Y'),
date = format_datetime(datetime.now(LOCALTZ),
'yyyy-MM-dd HH:mmZ',
tzinfo=LOCALTZ, locale=_locale),
num_plurals = PLURALS[_locale][0],
plural_expr = PLURALS[_locale][0])
# This test will fail for revisions <= 406 because so far
# catalog.num_plurals was neglected
catalog = read_po(StringIO(po_file), _locale)
message = catalog['foobar']
checkers.num_plurals(catalog, message)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(CheckersTestCase))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| lgpl-3.0 |
kaiyou/docker-py | tests/unit/api_network_test.py | 4 | 6282 | import json
import six
from .api_test import BaseAPIClientTest, url_prefix, response
from ..helpers import requires_api_version
from docker.types import IPAMConfig, IPAMPool
try:
from unittest import mock
except ImportError:
import mock
class NetworkTest(BaseAPIClientTest):
@requires_api_version('1.21')
def test_list_networks(self):
networks = [
{
"name": "none",
"id": "8e4e55c6863ef424",
"type": "null",
"endpoints": []
},
{
"name": "host",
"id": "062b6d9ea7913fde",
"type": "host",
"endpoints": []
},
]
get = mock.Mock(return_value=response(
status_code=200, content=json.dumps(networks).encode('utf-8')))
with mock.patch('docker.api.client.APIClient.get', get):
self.assertEqual(self.client.networks(), networks)
self.assertEqual(get.call_args[0][0], url_prefix + 'networks')
filters = json.loads(get.call_args[1]['params']['filters'])
self.assertFalse(filters)
self.client.networks(names=['foo'])
filters = json.loads(get.call_args[1]['params']['filters'])
self.assertEqual(filters, {'name': ['foo']})
self.client.networks(ids=['123'])
filters = json.loads(get.call_args[1]['params']['filters'])
self.assertEqual(filters, {'id': ['123']})
@requires_api_version('1.21')
def test_create_network(self):
network_data = {
"id": 'abc12345',
"warning": "",
}
network_response = response(status_code=200, content=network_data)
post = mock.Mock(return_value=network_response)
with mock.patch('docker.api.client.APIClient.post', post):
result = self.client.create_network('foo')
self.assertEqual(result, network_data)
self.assertEqual(
post.call_args[0][0],
url_prefix + 'networks/create')
self.assertEqual(
json.loads(post.call_args[1]['data']),
{"Name": "foo"})
opts = {
'com.docker.network.bridge.enable_icc': False,
'com.docker.network.bridge.enable_ip_masquerade': False,
}
self.client.create_network('foo', 'bridge', opts)
self.assertEqual(
json.loads(post.call_args[1]['data']),
{"Name": "foo", "Driver": "bridge", "Options": opts})
ipam_pool_config = IPAMPool(subnet="192.168.52.0/24",
gateway="192.168.52.254")
ipam_config = IPAMConfig(pool_configs=[ipam_pool_config])
self.client.create_network("bar", driver="bridge",
ipam=ipam_config)
self.assertEqual(
json.loads(post.call_args[1]['data']),
{
"Name": "bar",
"Driver": "bridge",
"IPAM": {
"Driver": "default",
"Config": [{
"IPRange": None,
"Gateway": "192.168.52.254",
"Subnet": "192.168.52.0/24",
"AuxiliaryAddresses": None,
}],
}
})
@requires_api_version('1.21')
def test_remove_network(self):
network_id = 'abc12345'
delete = mock.Mock(return_value=response(status_code=200))
with mock.patch('docker.api.client.APIClient.delete', delete):
self.client.remove_network(network_id)
args = delete.call_args
self.assertEqual(args[0][0],
url_prefix + 'networks/{0}'.format(network_id))
@requires_api_version('1.21')
def test_inspect_network(self):
network_id = 'abc12345'
network_name = 'foo'
network_data = {
six.u('name'): network_name,
six.u('id'): network_id,
six.u('driver'): 'bridge',
six.u('containers'): {},
}
network_response = response(status_code=200, content=network_data)
get = mock.Mock(return_value=network_response)
with mock.patch('docker.api.client.APIClient.get', get):
result = self.client.inspect_network(network_id)
self.assertEqual(result, network_data)
args = get.call_args
self.assertEqual(args[0][0],
url_prefix + 'networks/{0}'.format(network_id))
@requires_api_version('1.21')
def test_connect_container_to_network(self):
network_id = 'abc12345'
container_id = 'def45678'
post = mock.Mock(return_value=response(status_code=201))
with mock.patch('docker.api.client.APIClient.post', post):
self.client.connect_container_to_network(
{'Id': container_id},
network_id,
aliases=['foo', 'bar'],
links=[('baz', 'quux')]
)
self.assertEqual(
post.call_args[0][0],
url_prefix + 'networks/{0}/connect'.format(network_id))
self.assertEqual(
json.loads(post.call_args[1]['data']),
{
'Container': container_id,
'EndpointConfig': {
'Aliases': ['foo', 'bar'],
'Links': ['baz:quux'],
},
})
@requires_api_version('1.21')
def test_disconnect_container_from_network(self):
network_id = 'abc12345'
container_id = 'def45678'
post = mock.Mock(return_value=response(status_code=201))
with mock.patch('docker.api.client.APIClient.post', post):
self.client.disconnect_container_from_network(
{'Id': container_id}, network_id)
self.assertEqual(
post.call_args[0][0],
url_prefix + 'networks/{0}/disconnect'.format(network_id))
self.assertEqual(
json.loads(post.call_args[1]['data']),
{'Container': container_id})
| apache-2.0 |
huiren/ece511 | ext/ply/doc/makedoc.py | 177 | 5862 | #!/usr/local/bin/python
###############################################################################
# Takes a chapter as input and adds internal links and numbering to all
# of the H1, H2, H3, H4 and H5 sections.
#
# Every heading HTML tag (H1, H2 etc) is given an autogenerated name to link
# to. However, if the name is not an autogenerated name from a previous run,
# it will be kept. If it is autogenerated, it might change on subsequent runs
# of this program. Thus if you want to create links to one of the headings,
# then change the heading link name to something that does not look like an
# autogenerated link name.
###############################################################################
import sys
import re
import string
###############################################################################
# Functions
###############################################################################
# Regexs for <a name="..."></a>
alink = re.compile(r"<a *name *= *\"(.*)\"></a>", re.IGNORECASE)
heading = re.compile(r"(_nn\d)", re.IGNORECASE)
def getheadingname(m):
autogeneratedheading = True;
if m.group(1) != None:
amatch = alink.match(m.group(1))
if amatch:
# A non-autogenerated heading - keep it
headingname = amatch.group(1)
autogeneratedheading = heading.match(headingname)
if autogeneratedheading:
# The heading name was either non-existent or autogenerated,
# We can create a new heading / change the existing heading
headingname = "%s_nn%d" % (filenamebase, nameindex)
return headingname
###############################################################################
# Main program
###############################################################################
if len(sys.argv) != 2:
print "usage: makedoc.py filename"
sys.exit(1)
filename = sys.argv[1]
filenamebase = string.split(filename,".")[0]
section = 0
subsection = 0
subsubsection = 0
subsubsubsection = 0
nameindex = 0
name = ""
# Regexs for <h1>,... <h5> sections
h1 = re.compile(r".*?<H1>(<a.*a>)*[\d\.\s]*(.*?)</H1>", re.IGNORECASE)
h2 = re.compile(r".*?<H2>(<a.*a>)*[\d\.\s]*(.*?)</H2>", re.IGNORECASE)
h3 = re.compile(r".*?<H3>(<a.*a>)*[\d\.\s]*(.*?)</H3>", re.IGNORECASE)
h4 = re.compile(r".*?<H4>(<a.*a>)*[\d\.\s]*(.*?)</H4>", re.IGNORECASE)
h5 = re.compile(r".*?<H5>(<a.*a>)*[\d\.\s]*(.*?)</H5>", re.IGNORECASE)
data = open(filename).read() # Read data
open(filename+".bak","w").write(data) # Make backup
lines = data.splitlines()
result = [ ] # This is the result of postprocessing the file
index = "<!-- INDEX -->\n<div class=\"sectiontoc\">\n" # index contains the index for adding at the top of the file. Also printed to stdout.
skip = 0
skipspace = 0
for s in lines:
if s == "<!-- INDEX -->":
if not skip:
result.append("@INDEX@")
skip = 1
else:
skip = 0
continue;
if skip:
continue
if not s and skipspace:
continue
if skipspace:
result.append("")
result.append("")
skipspace = 0
m = h2.match(s)
if m:
prevheadingtext = m.group(2)
nameindex += 1
section += 1
headingname = getheadingname(m)
result.append("""<H2><a name="%s"></a>%d. %s</H2>""" % (headingname,section, prevheadingtext))
if subsubsubsection:
index += "</ul>\n"
if subsubsection:
index += "</ul>\n"
if subsection:
index += "</ul>\n"
if section == 1:
index += "<ul>\n"
index += """<li><a href="#%s">%s</a>\n""" % (headingname,prevheadingtext)
subsection = 0
subsubsection = 0
subsubsubsection = 0
skipspace = 1
continue
m = h3.match(s)
if m:
prevheadingtext = m.group(2)
nameindex += 1
subsection += 1
headingname = getheadingname(m)
result.append("""<H3><a name="%s"></a>%d.%d %s</H3>""" % (headingname,section, subsection, prevheadingtext))
if subsubsubsection:
index += "</ul>\n"
if subsubsection:
index += "</ul>\n"
if subsection == 1:
index += "<ul>\n"
index += """<li><a href="#%s">%s</a>\n""" % (headingname,prevheadingtext)
subsubsection = 0
skipspace = 1
continue
m = h4.match(s)
if m:
prevheadingtext = m.group(2)
nameindex += 1
subsubsection += 1
subsubsubsection = 0
headingname = getheadingname(m)
result.append("""<H4><a name="%s"></a>%d.%d.%d %s</H4>""" % (headingname,section, subsection, subsubsection, prevheadingtext))
if subsubsubsection:
index += "</ul>\n"
if subsubsection == 1:
index += "<ul>\n"
index += """<li><a href="#%s">%s</a>\n""" % (headingname,prevheadingtext)
skipspace = 1
continue
m = h5.match(s)
if m:
prevheadingtext = m.group(2)
nameindex += 1
subsubsubsection += 1
headingname = getheadingname(m)
result.append("""<H5><a name="%s"></a>%d.%d.%d.%d %s</H5>""" % (headingname,section, subsection, subsubsection, subsubsubsection, prevheadingtext))
if subsubsubsection == 1:
index += "<ul>\n"
index += """<li><a href="#%s">%s</a>\n""" % (headingname,prevheadingtext)
skipspace = 1
continue
result.append(s)
if subsubsubsection:
index += "</ul>\n"
if subsubsection:
index += "</ul>\n"
if subsection:
index += "</ul>\n"
if section:
index += "</ul>\n"
index += "</div>\n<!-- INDEX -->\n"
data = "\n".join(result)
data = data.replace("@INDEX@",index) + "\n";
# Write the file back out
open(filename,"w").write(data)
| bsd-3-clause |
bratsche/Neutron-Drive | google_appengine/lib/django_1_3/django/contrib/localflavor/pl/forms.py | 273 | 5444 | """
Polish-specific form helpers
"""
import re
from django.forms import ValidationError
from django.forms.fields import Select, RegexField
from django.utils.translation import ugettext_lazy as _
from django.core.validators import EMPTY_VALUES
class PLProvinceSelect(Select):
"""
A select widget with list of Polish administrative provinces as choices.
"""
def __init__(self, attrs=None):
from pl_voivodeships import VOIVODESHIP_CHOICES
super(PLProvinceSelect, self).__init__(attrs, choices=VOIVODESHIP_CHOICES)
class PLCountySelect(Select):
"""
A select widget with list of Polish administrative units as choices.
"""
def __init__(self, attrs=None):
from pl_administrativeunits import ADMINISTRATIVE_UNIT_CHOICES
super(PLCountySelect, self).__init__(attrs, choices=ADMINISTRATIVE_UNIT_CHOICES)
class PLPESELField(RegexField):
"""
A form field that validates as Polish Identification Number (PESEL).
Checks the following rules:
* the length consist of 11 digits
* has a valid checksum
The algorithm is documented at http://en.wikipedia.org/wiki/PESEL.
"""
default_error_messages = {
'invalid': _(u'National Identification Number consists of 11 digits.'),
'checksum': _(u'Wrong checksum for the National Identification Number.'),
}
def __init__(self, *args, **kwargs):
super(PLPESELField, self).__init__(r'^\d{11}$',
max_length=None, min_length=None, *args, **kwargs)
def clean(self,value):
super(PLPESELField, self).clean(value)
if value in EMPTY_VALUES:
return u''
if not self.has_valid_checksum(value):
raise ValidationError(self.error_messages['checksum'])
return u'%s' % value
def has_valid_checksum(self, number):
"""
Calculates a checksum with the provided algorithm.
"""
multiple_table = (1, 3, 7, 9, 1, 3, 7, 9, 1, 3, 1)
result = 0
for i in range(len(number)):
result += int(number[i]) * multiple_table[i]
return result % 10 == 0
class PLNIPField(RegexField):
"""
A form field that validates as Polish Tax Number (NIP).
Valid forms are: XXX-XXX-YY-YY or XX-XX-YYY-YYY.
Checksum algorithm based on documentation at
http://wipos.p.lodz.pl/zylla/ut/nip-rego.html
"""
default_error_messages = {
'invalid': _(u'Enter a tax number field (NIP) in the format XXX-XXX-XX-XX or XX-XX-XXX-XXX.'),
'checksum': _(u'Wrong checksum for the Tax Number (NIP).'),
}
def __init__(self, *args, **kwargs):
super(PLNIPField, self).__init__(r'^\d{3}-\d{3}-\d{2}-\d{2}$|^\d{2}-\d{2}-\d{3}-\d{3}$',
max_length=None, min_length=None, *args, **kwargs)
def clean(self,value):
super(PLNIPField, self).clean(value)
if value in EMPTY_VALUES:
return u''
value = re.sub("[-]", "", value)
if not self.has_valid_checksum(value):
raise ValidationError(self.error_messages['checksum'])
return u'%s' % value
def has_valid_checksum(self, number):
"""
Calculates a checksum with the provided algorithm.
"""
multiple_table = (6, 5, 7, 2, 3, 4, 5, 6, 7)
result = 0
for i in range(len(number)-1):
result += int(number[i]) * multiple_table[i]
result %= 11
if result == int(number[-1]):
return True
else:
return False
class PLREGONField(RegexField):
"""
A form field that validates its input is a REGON number.
Valid regon number consists of 9 or 14 digits.
See http://www.stat.gov.pl/bip/regon_ENG_HTML.htm for more information.
"""
default_error_messages = {
'invalid': _(u'National Business Register Number (REGON) consists of 9 or 14 digits.'),
'checksum': _(u'Wrong checksum for the National Business Register Number (REGON).'),
}
def __init__(self, *args, **kwargs):
super(PLREGONField, self).__init__(r'^\d{9,14}$',
max_length=None, min_length=None, *args, **kwargs)
def clean(self,value):
super(PLREGONField, self).clean(value)
if value in EMPTY_VALUES:
return u''
if not self.has_valid_checksum(value):
raise ValidationError(self.error_messages['checksum'])
return u'%s' % value
def has_valid_checksum(self, number):
"""
Calculates a checksum with the provided algorithm.
"""
weights = (
(8, 9, 2, 3, 4, 5, 6, 7, -1),
(2, 4, 8, 5, 0, 9, 7, 3, 6, 1, 2, 4, 8, -1),
(8, 9, 2, 3, 4, 5, 6, 7, -1, 0, 0, 0, 0, 0),
)
weights = [table for table in weights if len(table) == len(number)]
for table in weights:
checksum = sum([int(n) * w for n, w in zip(number, table)])
if checksum % 11 % 10:
return False
return bool(weights)
class PLPostalCodeField(RegexField):
"""
A form field that validates as Polish postal code.
Valid code is XX-XXX where X is digit.
"""
default_error_messages = {
'invalid': _(u'Enter a postal code in the format XX-XXX.'),
}
def __init__(self, *args, **kwargs):
super(PLPostalCodeField, self).__init__(r'^\d{2}-\d{3}$',
max_length=None, min_length=None, *args, **kwargs)
| bsd-3-clause |
lmazuel/azure-sdk-for-python | azure-mgmt-web/azure/mgmt/web/models/deleted_site.py | 1 | 2033 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class DeletedSite(Model):
"""A deleted app.
Variables are only populated by the server, and will be ignored when
sending a request.
:param id: Numeric id for the deleted site
:type id: int
:ivar deleted_timestamp: Time in UTC when the app was deleted.
:vartype deleted_timestamp: str
:ivar subscription: Subscription containing the deleted site
:vartype subscription: str
:ivar resource_group: ResourceGroup that contained the deleted site
:vartype resource_group: str
:ivar name: Name of the deleted site
:vartype name: str
:ivar slot: Slot of the deleted site
:vartype slot: str
"""
_validation = {
'deleted_timestamp': {'readonly': True},
'subscription': {'readonly': True},
'resource_group': {'readonly': True},
'name': {'readonly': True},
'slot': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'deleted_timestamp': {'key': 'deletedTimestamp', 'type': 'str'},
'subscription': {'key': 'subscription', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'slot': {'key': 'slot', 'type': 'str'},
}
def __init__(self, id=None):
super(DeletedSite, self).__init__()
self.id = id
self.deleted_timestamp = None
self.subscription = None
self.resource_group = None
self.name = None
self.slot = None
| mit |
tux-00/ansible | lib/ansible/modules/cloud/cloudstack/cs_loadbalancer_rule.py | 54 | 11383 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2015, Darren Worrall <[email protected]>
# (c) 2015, René Moser <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cs_loadbalancer_rule
short_description: Manages load balancer rules on Apache CloudStack based clouds.
description:
- Add, update and remove load balancer rules.
version_added: '2.0'
author:
- "Darren Worrall (@dazworrall)"
- "René Moser (@resmo)"
options:
name:
description:
- The name of the load balancer rule.
required: true
description:
description:
- The description of the load balancer rule.
required: false
default: null
algorithm:
description:
- Load balancer algorithm
- Required when using C(state=present).
required: false
choices: [ 'source', 'roundrobin', 'leastconn' ]
default: 'source'
private_port:
description:
- The private port of the private ip address/virtual machine where the network traffic will be load balanced to.
- Required when using C(state=present).
- Can not be changed once the rule exists due API limitation.
required: false
default: null
public_port:
description:
- The public port from where the network traffic will be load balanced from.
- Required when using C(state=present).
- Can not be changed once the rule exists due API limitation.
required: true
default: null
ip_address:
description:
- Public IP address from where the network traffic will be load balanced from.
required: true
aliases: [ 'public_ip' ]
open_firewall:
description:
- Whether the firewall rule for public port should be created, while creating the new rule.
- Use M(cs_firewall) for managing firewall rules.
required: false
default: false
cidr:
description:
- CIDR (full notation) to be used for firewall rule if required.
required: false
default: null
protocol:
description:
- The protocol to be used on the load balancer
required: false
default: null
project:
description:
- Name of the project the load balancer IP address is related to.
required: false
default: null
state:
description:
- State of the rule.
required: true
default: 'present'
choices: [ 'present', 'absent' ]
domain:
description:
- Domain the rule is related to.
required: false
default: null
account:
description:
- Account the rule is related to.
required: false
default: null
zone:
description:
- Name of the zone in which the rule should be created.
- If not set, default zone is used.
required: false
default: null
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
# Create a load balancer rule
- local_action:
module: cs_loadbalancer_rule
name: balance_http
public_ip: 1.2.3.4
algorithm: leastconn
public_port: 80
private_port: 8080
# update algorithm of an existing load balancer rule
- local_action:
module: cs_loadbalancer_rule
name: balance_http
public_ip: 1.2.3.4
algorithm: roundrobin
public_port: 80
private_port: 8080
# Delete a load balancer rule
- local_action:
module: cs_loadbalancer_rule
name: balance_http
public_ip: 1.2.3.4
state: absent
'''
RETURN = '''
---
id:
description: UUID of the rule.
returned: success
type: string
sample: a6f7a5fc-43f8-11e5-a151-feff819cdc9f
zone:
description: Name of zone the rule is related to.
returned: success
type: string
sample: ch-gva-2
project:
description: Name of project the rule is related to.
returned: success
type: string
sample: Production
account:
description: Account the rule is related to.
returned: success
type: string
sample: example account
domain:
description: Domain the rule is related to.
returned: success
type: string
sample: example domain
algorithm:
description: Load balancer algorithm used.
returned: success
type: string
sample: "source"
cidr:
description: CIDR to forward traffic from.
returned: success
type: string
sample: ""
name:
description: Name of the rule.
returned: success
type: string
sample: "http-lb"
description:
description: Description of the rule.
returned: success
type: string
sample: "http load balancer rule"
protocol:
description: Protocol of the rule.
returned: success
type: string
sample: "tcp"
public_port:
description: Public port.
returned: success
type: string
sample: 80
private_port:
description: Private IP address.
returned: success
type: string
sample: 80
public_ip:
description: Public IP address.
returned: success
type: string
sample: "1.2.3.4"
tags:
description: List of resource tags associated with the rule.
returned: success
type: dict
sample: '[ { "key": "foo", "value": "bar" } ]'
state:
description: State of the rule.
returned: success
type: string
sample: "Add"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.cloudstack import (
AnsibleCloudStack,
CloudStackException,
cs_argument_spec,
cs_required_together,
)
class AnsibleCloudStackLBRule(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackLBRule, self).__init__(module)
self.returns = {
'publicip': 'public_ip',
'algorithm': 'algorithm',
'cidrlist': 'cidr',
'protocol': 'protocol',
}
# these values will be casted to int
self.returns_to_int = {
'publicport': 'public_port',
'privateport': 'private_port',
}
def get_rule(self, **kwargs):
rules = self.cs.listLoadBalancerRules(**kwargs)
if rules:
return rules['loadbalancerrule'][0]
def _get_common_args(self):
return {
'account': self.get_account(key='name'),
'domainid': self.get_domain(key='id'),
'projectid': self.get_project(key='id'),
'zoneid': self.get_zone(key='id') if self.module.params.get('zone') else None,
'publicipid': self.get_ip_address(key='id'),
'name': self.module.params.get('name'),
}
def present_lb_rule(self):
missing_params = []
for required_params in [
'algorithm',
'private_port',
'public_port',
]:
if not self.module.params.get(required_params):
missing_params.append(required_params)
if missing_params:
self.module.fail_json(msg="missing required arguments: %s" % ','.join(missing_params))
args = self._get_common_args()
rule = self.get_rule(**args)
if rule:
rule = self._update_lb_rule(rule)
else:
rule = self._create_lb_rule(rule)
if rule:
rule = self.ensure_tags(resource=rule, resource_type='LoadBalancer')
return rule
def _create_lb_rule(self, rule):
self.result['changed'] = True
if not self.module.check_mode:
args = self._get_common_args()
args.update({
'algorithm': self.module.params.get('algorithm'),
'privateport': self.module.params.get('private_port'),
'publicport': self.module.params.get('public_port'),
'cidrlist': self.module.params.get('cidr'),
'description': self.module.params.get('description'),
'protocol': self.module.params.get('protocol'),
})
res = self.cs.createLoadBalancerRule(**args)
if 'errortext' in res:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
poll_async = self.module.params.get('poll_async')
if poll_async:
rule = self.poll_job(res, 'loadbalancer')
return rule
def _update_lb_rule(self, rule):
args = {
'id': rule['id'],
'algorithm': self.module.params.get('algorithm'),
'description': self.module.params.get('description'),
}
if self.has_changed(args, rule):
self.result['changed'] = True
if not self.module.check_mode:
res = self.cs.updateLoadBalancerRule(**args)
if 'errortext' in res:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
poll_async = self.module.params.get('poll_async')
if poll_async:
rule = self.poll_job(res, 'loadbalancer')
return rule
def absent_lb_rule(self):
args = self._get_common_args()
rule = self.get_rule(**args)
if rule:
self.result['changed'] = True
if rule and not self.module.check_mode:
res = self.cs.deleteLoadBalancerRule(id=rule['id'])
if 'errortext' in res:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
poll_async = self.module.params.get('poll_async')
if poll_async:
res = self.poll_job(res, 'loadbalancer')
return rule
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
name=dict(required=True),
description=dict(),
algorithm=dict(choices=['source', 'roundrobin', 'leastconn'], default='source'),
private_port=dict(type='int'),
public_port=dict(type='int'),
protocol=dict(),
state=dict(choices=['present', 'absent'], default='present'),
ip_address=dict(required=True, aliases=['public_ip']),
cidr=dict(),
project=dict(),
open_firewall=dict(type='bool', default=False),
tags=dict(type='list', aliases=['tag']),
zone=dict(),
domain=dict(),
account=dict(),
poll_async=dict(type='bool', default=True),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
supports_check_mode=True
)
try:
acs_lb_rule = AnsibleCloudStackLBRule(module)
state = module.params.get('state')
if state in ['absent']:
rule = acs_lb_rule.absent_lb_rule()
else:
rule = acs_lb_rule.present_lb_rule()
result = acs_lb_rule.get_result(rule)
except CloudStackException as e:
module.fail_json(msg='CloudStackException: %s' % str(e))
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
JimCircadian/ansible | lib/ansible/modules/storage/infinidat/infini_export_client.py | 43 | 5489 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Gregory Shulov ([email protected])
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: infini_export_client
version_added: 2.3
short_description: Create, Delete or Modify NFS Client(s) for existing exports on Infinibox
description:
- This module creates, deletes or modifys NFS client(s) for existing exports on Infinibox.
author: Gregory Shulov (@GR360RY)
options:
client:
description:
- Client IP or Range. Ranges can be defined as follows
192.168.0.1-192.168.0.254.
aliases: ['name']
required: true
state:
description:
- Creates/Modifies client when present and removes when absent.
required: false
default: "present"
choices: [ "present", "absent" ]
access_mode:
description:
- Read Write or Read Only Access.
choices: [ "RW", "RO" ]
default: RW
required: false
no_root_squash:
description:
- Don't squash root user to anonymous. Will be set to "no" on creation if not specified explicitly.
type: bool
default: no
required: false
export:
description:
- Name of the export.
required: true
extends_documentation_fragment:
- infinibox
requirements:
- munch
'''
EXAMPLES = '''
- name: Make sure nfs client 10.0.0.1 is configured for export. Allow root access
infini_export_client:
client: 10.0.0.1
access_mode: RW
no_root_squash: yes
export: /data
user: admin
password: secret
system: ibox001
- name: Add multiple clients with RO access. Squash root privileges
infini_export_client:
client: "{{ item }}"
access_mode: RO
no_root_squash: no
export: /data
user: admin
password: secret
system: ibox001
with_items:
- 10.0.0.2
- 10.0.0.3
'''
RETURN = '''
'''
try:
from munch import Munch, unmunchify
HAS_MUNCH = True
except ImportError:
HAS_MUNCH = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.infinibox import HAS_INFINISDK, api_wrapper, get_system, infinibox_argument_spec
def transform(d):
return frozenset(d.items())
@api_wrapper
def get_export(module, system):
"""Retrun export if found. Fail module if not found"""
try:
export = system.exports.get(export_path=module.params['export'])
except:
module.fail_json(msg="Export with export path {} not found".format(module.params['export']))
return export
@api_wrapper
def update_client(module, export):
"""Update export client list"""
changed = False
client = module.params['client']
access_mode = module.params['access_mode']
no_root_squash = module.params['no_root_squash']
client_list = export.get_permissions()
client_not_in_list = True
for index, item in enumerate(client_list):
if item.client == client:
client_not_in_list = False
if item.access != access_mode:
item.access = access_mode
changed = True
if item.no_root_squash is not no_root_squash:
item.no_root_squash = no_root_squash
changed = True
# If access_mode and/or no_root_squash not passed as arguments to the module,
# use access_mode with RW value and set no_root_squash to False
if client_not_in_list:
changed = True
client_list.append(Munch(client=client, access=access_mode, no_root_squash=no_root_squash))
if changed:
for index, item in enumerate(client_list):
client_list[index] = unmunchify(item)
if not module.check_mode:
export.update_permissions(client_list)
module.exit_json(changed=changed)
@api_wrapper
def delete_client(module, export):
"""Update export client list"""
changed = False
client = module.params['client']
client_list = export.get_permissions()
for index, item in enumerate(client_list):
if item.client == client:
changed = True
del client_list[index]
if changed:
for index, item in enumerate(client_list):
client_list[index] = unmunchify(item)
if not module.check_mode:
export.update_permissions(client_list)
module.exit_json(changed=changed)
def main():
argument_spec = infinibox_argument_spec()
argument_spec.update(
dict(
client=dict(required=True),
access_mode=dict(choices=['RO', 'RW'], default='RW'),
no_root_squash=dict(type='bool', default='no'),
state=dict(default='present', choices=['present', 'absent']),
export=dict(required=True)
)
)
module = AnsibleModule(argument_spec, supports_check_mode=True)
if not HAS_INFINISDK:
module.fail_json(msg='infinisdk is required for this module')
if not HAS_MUNCH:
module.fail_json(msg='the python munch library is required for this module')
system = get_system(module)
export = get_export(module, system)
if module.params['state'] == 'present':
update_client(module, export)
else:
delete_client(module, export)
if __name__ == '__main__':
main()
| gpl-3.0 |
timpaul/remember | node_modules/node-sass/node_modules/pangyp/gyp/pylib/gyp/generator/gypsh.py | 2779 | 1665 | # Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""gypsh output module
gypsh is a GYP shell. It's not really a generator per se. All it does is
fire up an interactive Python session with a few local variables set to the
variables passed to the generator. Like gypd, it's intended as a debugging
aid, to facilitate the exploration of .gyp structures after being processed
by the input module.
The expected usage is "gyp -f gypsh -D OS=desired_os".
"""
import code
import sys
# All of this stuff about generator variables was lovingly ripped from gypd.py.
# That module has a much better description of what's going on and why.
_generator_identity_variables = [
'EXECUTABLE_PREFIX',
'EXECUTABLE_SUFFIX',
'INTERMEDIATE_DIR',
'PRODUCT_DIR',
'RULE_INPUT_ROOT',
'RULE_INPUT_DIRNAME',
'RULE_INPUT_EXT',
'RULE_INPUT_NAME',
'RULE_INPUT_PATH',
'SHARED_INTERMEDIATE_DIR',
]
generator_default_variables = {
}
for v in _generator_identity_variables:
generator_default_variables[v] = '<(%s)' % v
def GenerateOutput(target_list, target_dicts, data, params):
locals = {
'target_list': target_list,
'target_dicts': target_dicts,
'data': data,
}
# Use a banner that looks like the stock Python one and like what
# code.interact uses by default, but tack on something to indicate what
# locals are available, and identify gypsh.
banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \
(sys.version, sys.platform, repr(sorted(locals.keys())))
code.interact(banner, local=locals)
| mit |
Elettronik/SickRage | tests/config_tests.py | 12 | 25664 | # coding=utf-8
"""
Test sickbeard.config's classes and methods
Classes:
ConfigMigrator
migrate_config
_migrate_v1
_name_to_pattern
_migrate_v2
_migrate_v3
_migrate_v4
_migrate_v5
_migrate_v6
_migrate_v7
_migrate_v8
_migrate_v9
_migrate_v10
Methods
change_https_cert
change_https_key
change_unrar_tool
change_sickrage_background
change_custom_css
change_log_dir
change_nzb_dir
change_torrent_dir
change_tv_download_dir
change_unpack_dir
change_postprocessor_frequency
change_daily_search_frequency
change_backlog_frequency
change_update_frequency
change_showupdate_hour
change_subtitle_finder_frequency
change_version_notify
change_download_propers
change_use_trakt
change_use_subtitles
change_process_automatically
check_section
checkbox_to_value
clean_host
clean_hosts
clean_url
min_max
check_setting_int
check_setting_float
check_setting_str
check_setting_bool
"""
# pylint: disable=line-too-long
import logging
import os.path
import sys
import unittest
import mock
from collections import namedtuple
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from sickbeard import config, scheduler
from configobj import ConfigObj
from rarfile import RarExecError
import sickbeard
class ConfigTestBasic(unittest.TestCase):
"""
Test basic methods in sickbeard.config
"""
def test_check_section(self):
"""
Test check_section
"""
CFG = ConfigObj('config.ini', encoding='UTF-8')
self.assertFalse(config.check_section(CFG, 'General'))
self.assertTrue(config.check_section(CFG, 'General'))
def test_checkbox_to_value(self):
"""
Test checkbox_to_value
"""
self.assertTrue(config.checkbox_to_value(1))
self.assertTrue(config.checkbox_to_value(['option', 'True']))
self.assertEqual(config.checkbox_to_value('0', 'yes', 'no'), 'no')
def test_clean_host(self):
"""
Test clean_host
"""
self.assertEqual(config.clean_host('http://127.0.0.1:8080'), '127.0.0.1:8080')
self.assertEqual(config.clean_host('https://mail.google.com/mail'), 'mail.google.com')
self.assertEqual(config.clean_host('http://localhost:8081/home/displayShow?show=80379#season-10'),
'localhost:8081')
self.assertEqual(config.clean_host('http://testme.co.uk', 9000), 'testme.co.uk:9000') # default port
self.assertEqual(config.clean_host('www.google.com/search'), 'www.google.com')
self.assertEqual(config.clean_host(''), '') # empty host
def test_clean_hosts(self):
"""
Test clean_hosts
"""
dirty_hosts = 'http://127.0.0.1:8080,https://mail.google.com/mail,' \
'http://localhost:8081/home/displayShow?show=80379#season-10,' \
'www.google.com/search,'
clean_result = '127.0.0.1:8080,mail.google.com:5050,localhost:8081,www.google.com:5050'
self.assertEqual(config.clean_hosts(dirty_hosts, '5050'), clean_result)
def test_clean_url(self):
"""
Test cleaning of urls
"""
log = logging.getLogger(__name__)
test = namedtuple('test', 'expected_result dirty clean')
url_tests = [
test(True, "https://subdomain.domain.tld/endpoint", "https://subdomain.domain.tld/endpoint"), # does not add a final /
test(True, "http://www.example.com/folder/", "http://www.example.com/folder/"), # does not remove the final /
test(True, "google.com/xml.rpc", "http://google.com/xml.rpc"), # add scheme if missing
test(True, "google.com", "http://google.com/"), # add scheme if missing and final / if its just the domain
test(True, "scgi:///home/user/.config/path/socket", "scgi:///home/user/.config/path/socket"), # scgi identified as scheme
test(True, None, ''), # None URL returns empty string
test(False, "https://subdomain.domain.tld/endpoint", "http://subdomain.domain.tld/endpoint"), # does not change schemes from https to http
test(False, "http://subdomain.domain.tld/endpoint", "https://subdomain.domain.tld/endpoint"), # ...or vice versa
test(False, "google.com/xml.rpc", "google.com/xml.rpc"), # scheme is always added
test(False, "google.com", "https://google.com/"), # does not default to https
test(False, "http://www.example.com/folder/", "http://www.example.com/folder"), # does not strip final /
test(False, "scgi:///home/user/.config/path/socket", "scgi:///home/user/.config/path/socket/"), # does not add a final /
test(AttributeError, 1, 1), # None URL returns empty string
]
for test_url in url_tests:
if issubclass(type(Exception), type(test_url.expected_result)):
with self.assertRaises(test_url.expected_result):
self.assertEqual(config.clean_url(test_url.dirty), test_url.clean)
elif test_url.expected_result is True:
self.assertEqual(config.clean_url(test_url.dirty), test_url.clean)
elif test_url.expected_result is False:
self.assertNotEqual(config.clean_url(test_url.dirty), test_url.clean)
else:
log.error('Test not defined for %s', test_url)
def test_min_max(self):
"""
Test min_max
"""
self.assertEqual(config.min_max('100', default=50, low=50, high=200), 100)
self.assertEqual(config.min_max('25', default=50, low=50, high=200), 50)
self.assertEqual(config.min_max('250', default=50, low=50, high=200), 200)
def test_check_setting_int(self):
"""
Test check_setting_int
"""
# setup
CFG = ConfigObj('config.ini', encoding='UTF-8')
config.check_section(CFG, 'General')
CFG['General']['indexer_timeout'] = 60
CFG['General']['use_icacls'] = 'True'
CFG['General']['use_nzbs'] = 'False'
CFG['General']['status_default'] = None
# normal
self.assertEqual(config.check_setting_int(CFG, 'General', 'indexer_timeout', 30), 60)
self.assertEqual(CFG['General']['indexer_timeout'], 60)
# force min/max
self.assertEqual(config.check_setting_int(CFG, 'General', 'indexer_timeout', 150, 100, 200), 150)
self.assertEqual(CFG['General']['indexer_timeout'], 150)
self.assertEqual(config.check_setting_int(CFG, 'General', 'indexer_timeout', 250, 200, 300, False), 200)
self.assertEqual(CFG['General']['indexer_timeout'], 200)
self.assertEqual(config.check_setting_int(CFG, 'General', 'indexer_timeout', 90, 50, 100), 90)
self.assertEqual(CFG['General']['indexer_timeout'], 90)
self.assertEqual(config.check_setting_int(CFG, 'General', 'indexer_timeout', 20, 10, 30, False), 30)
self.assertEqual(CFG['General']['indexer_timeout'], 30)
# true/false => int
self.assertEqual(config.check_setting_int(CFG, 'General', 'use_icacls', 1), 1)
self.assertEqual(CFG['General']['use_icacls'], 'True')
self.assertEqual(config.check_setting_int(CFG, 'General', 'use_nzbs', 0), 0)
self.assertEqual(CFG['General']['use_nzbs'], 'False')
# None value type + silent off
self.assertEqual(config.check_setting_int(CFG, 'General', 'status_default', 5, silent=False), 5)
self.assertEqual(CFG['General']['status_default'], 5)
# unmatched section
self.assertEqual(config.check_setting_int(CFG, 'Subtitles', 'subtitles_finder_frequency', 1), 1)
self.assertEqual(CFG['Subtitles']['subtitles_finder_frequency'], 1)
# wrong def_val/min/max type
self.assertEqual(config.check_setting_int(CFG, 'General', 'indexer_timeout', 'ba', 'min', 'max'), 30)
self.assertEqual(CFG['General']['indexer_timeout'], 30)
def test_check_setting_float(self):
"""
Test check_setting_float
"""
# setup
CFG = ConfigObj('config.ini', encoding='UTF-8')
config.check_section(CFG, 'General')
CFG['General']['fanart_background_opacity'] = 0.5
CFG['General']['log_size'] = None
# normal
self.assertEqual(config.check_setting_float(CFG, 'General', 'fanart_background_opacity', 0.4), 0.5)
self.assertEqual(CFG['General']['fanart_background_opacity'], 0.5)
# force min/max
self.assertEqual(config.check_setting_float(CFG, 'General', 'fanart_background_opacity', 0.7, 0.6, 1.0), 0.7)
self.assertEqual(CFG['General']['fanart_background_opacity'], 0.7)
self.assertEqual(config.check_setting_float(CFG, 'General', 'fanart_background_opacity', 0.7, 0.8, 1.0, False), 0.8)
self.assertEqual(CFG['General']['fanart_background_opacity'], 0.8)
self.assertEqual(config.check_setting_float(CFG, 'General', 'fanart_background_opacity', 0.3, 0.1, 0.4), 0.3)
self.assertEqual(CFG['General']['fanart_background_opacity'], 0.3)
self.assertEqual(config.check_setting_float(CFG, 'General', 'fanart_background_opacity', 0.1, 0.1, 0.2, False), 0.2)
self.assertEqual(CFG['General']['fanart_background_opacity'], 0.2)
# None value type + silent off
self.assertEqual(config.check_setting_float(CFG, 'General', 'log_size', 10.0, silent=False), 10.0)
self.assertEqual(CFG['General']['log_size'], 10.0)
# unmatched section
self.assertEqual(config.check_setting_float(CFG, 'Kodi', 'log_size', 2.5), 2.5)
self.assertEqual(CFG['Kodi']['log_size'], 2.5)
# wrong def_val/min/max type
self.assertEqual(config.check_setting_float(CFG, 'General', 'fanart_background_opacity', 'ba', 'min', 'max'), 0.2)
self.assertEqual(CFG['General']['fanart_background_opacity'], 0.2)
def test_check_setting_str(self):
"""
Test check_setting_str
"""
# setup
CFG = ConfigObj('config.ini', encoding='UTF-8')
config.check_section(CFG, 'General')
CFG['General']['process_method'] = "copy"
CFG['General']['git_password'] = "SFa342FHb_"
CFG['General']['extra_scripts'] = None
# normal
self.assertEqual(config.check_setting_str(CFG, 'General', 'process_method', 'move'), 'copy')
self.assertEqual(config.check_setting_str(CFG, 'General', 'git_password', '', silent=False, censor_log=True),
'SFa342FHb_')
# None value type
self.assertEqual(config.check_setting_str(CFG, 'General', 'extra_scripts', ''), '')
# unmatched section
self.assertEqual(config.check_setting_str(CFG, 'Subtitles', 'subtitles_languages', 'eng'), 'eng')
# wrong def_val type
self.assertEqual(config.check_setting_str(CFG, 'General', 'process_method', ['fail']), 'copy')
def test_check_setting_bool(self):
"""
Test check_setting_bool
"""
# setup
CFG = ConfigObj('config.ini', encoding='UTF-8')
config.check_section(CFG, 'General')
CFG['General']['debug'] = True
CFG['General']['season_folders_default'] = False
CFG['General']['dbdebug'] = None
# normal
self.assertTrue(config.check_setting_bool(CFG, 'General', 'debug'))
self.assertFalse(config.check_setting_bool(CFG, 'General', 'season_folders_default', def_val=True))
# None value type
self.assertFalse(config.check_setting_bool(
CFG, 'General', 'dbdebug', False))
# unmatched item
self.assertTrue(config.check_setting_bool(CFG, 'General', 'git_reset', def_val=True))
# unmatched section
self.assertFalse(config.check_setting_bool(CFG, 'Subtitles', 'use_subtitles', def_val=False))
# wrong def_val type, silent = off
self.assertTrue(config.check_setting_bool(
CFG, 'General', 'debug', def_val=['fail'], silent=False))
class ConfigTestChanges(unittest.TestCase):
"""
Test change methods in sickbeard.config
"""
def test_change_https_cert(self):
"""
Test change_https_cert
"""
sickbeard.HTTPS_CERT = 'server.crt' # Initialize
self.assertTrue(config.change_https_cert(''))
self.assertTrue(config.change_https_cert('server.crt'))
self.assertFalse(config.change_https_cert('/:/server.crt')) # INVALID
sickbeard.HTTPS_CERT = ''
def test_change_https_key(self):
"""
Test change_https_key
"""
sickbeard.HTTPS_KEY = 'server.key' # Initialize
self.assertTrue(config.change_https_key(''))
self.assertTrue(config.change_https_key('server.key'))
self.assertFalse(config.change_https_key('/:/server.key')) # INVALID
sickbeard.HTTPS_KEY = ''
@mock.patch('platform.system', mock.MagicMock(return_value="Windows"))
@mock.patch('sickbeard.helpers.download_file', mock.MagicMock(return_value=True))
@mock.patch('sickbeard.helpers.extractZip', mock.MagicMock(return_value=True))
def test_change_unrar_tool(self):
"""
Test change_unrar_tool
"""
custom_check_mock = mock.patch('rarfile.custom_check', mock.MagicMock())
custom_check_mock.new.side_effect = [RarExecError(), True]
with custom_check_mock,\
mock.patch('os.path.exists', mock.MagicMock(return_value=True)),\
mock.patch('os.path.getsize', mock.MagicMock(return_value=447440)),\
mock.patch('os.remove'):
self.assertTrue(config.change_unrar_tool('unrar', 'bsdtar'))
my_environ = mock.patch.dict(os.environ,
{'ProgramFiles': 'C:\\Program Files (x86)\\'}, clear=True)
with my_environ:
self.assertFalse(config.change_unrar_tool('unrar', 'bsdtar'))
sickbeard.PROG_DIR = 'C:\\SickRage'
my_environ = mock.patch.dict(os.environ,
{'ProgramFiles': 'C:\\Program Files (x86)\\',
'ProgramFiles(x86)': 'C:\\Program Files (x86)\\',
'ProgramW6432': 'C:\\Program Files\\'}, clear=True)
custom_check_mock.new.side_effect = [RarExecError(), RarExecError(), True, True, True, True]
isfile_mock = mock.patch('os.path.isfile', mock.MagicMock())
isfile_mock.new.side_effect = [True, False, True]
with custom_check_mock, isfile_mock, my_environ:
self.assertTrue(config.change_unrar_tool('unrar', 'bsdtar'))
def test_change_sickrage_background(self):
"""
Test change_sickrage_background
"""
sickbeard.SICKRAGE_BACKGROUND_PATH = '' # Initialize
self.assertTrue(config.change_sickrage_background(__file__))
self.assertFalse(config.change_sickrage_background('not_real.jpg'))
self.assertTrue(config.change_sickrage_background(''))
def test_change_custom_css(self):
"""
Test change_custom_css
"""
sickbeard.CUSTOM_CSS_PATH = '' # Initialize
self.assertFalse(config.change_custom_css(__file__)) # not a css file
self.assertFalse(config.change_custom_css('not_real.jpg')) # doesn't exist
self.assertFalse(config.change_custom_css('sickrage_tests')) # isn't a file
css_file = os.path.join(os.path.dirname(__file__), 'custom.css')
with open(css_file, 'w') as f:
f.write('table.main {\n width: 100%;\n}')
self.assertTrue(config.change_custom_css(css_file)) # real
os.remove(css_file)
self.assertTrue(config.change_custom_css('')) # empty
def test_change_log_dir(self):
"""
Test change_log_dir
"""
sickbeard.DATA_DIR = os.path.dirname(__file__)
sickbeard.ACTUAL_LOG_DIR = ''
sickbeard.LOG_DIR = os.path.join(sickbeard.DATA_DIR, sickbeard.ACTUAL_LOG_DIR)
sickbeard.WEB_LOG = False
self.assertFalse(config.change_log_dir('/:/Logs', True))
self.assertTrue(config.change_log_dir('Logs', True))
def test_change_nzb_dir(self):
"""
Test change_nzb_dir
"""
sickbeard.NZB_DIR = ''
self.assertTrue(config.change_nzb_dir('cache'))
self.assertFalse(config.change_nzb_dir('/:/NZB_Downloads')) # INVALID
self.assertTrue(config.change_nzb_dir(''))
def test_change_torrent_dir(self):
"""
Test change_torrent_dir
"""
sickbeard.TORRENT_DIR = ''
self.assertTrue(config.change_torrent_dir('cache'))
self.assertFalse(config.change_torrent_dir('/:/Downloads')) # INVALID
self.assertTrue(config.change_torrent_dir(''))
def test_change_tv_download_dir(self):
"""
Test change_tv_download_dir
"""
sickbeard.TV_DOWNLOAD_DIR = ''
self.assertTrue(config.change_tv_download_dir('cache'))
self.assertFalse(config.change_tv_download_dir('/:/Downloads/Completed')) # INVALID
self.assertTrue(config.change_tv_download_dir(''))
def test_change_unpack_dir(self):
"""
Test change_unpack_dir
"""
sickbeard.UNPACK_DIR = ''
self.assertTrue(config.change_unpack_dir('cache'))
self.assertFalse(config.change_unpack_dir('/:/Extract')) # INVALID
self.assertTrue(config.change_unpack_dir(''))
def test_change_auto_pp_freq(self):
"""
Test change_postprocessor_frequency
"""
sickbeard.autoPostProcessorScheduler = scheduler.Scheduler(lambda:None) # dummy
config.change_postprocessor_frequency(0)
self.assertEqual(sickbeard.AUTOPOSTPROCESSOR_FREQUENCY, sickbeard.MIN_AUTOPOSTPROCESSOR_FREQUENCY)
config.change_postprocessor_frequency('s')
self.assertEqual(sickbeard.AUTOPOSTPROCESSOR_FREQUENCY, sickbeard.DEFAULT_AUTOPOSTPROCESSOR_FREQUENCY)
config.change_postprocessor_frequency(60)
self.assertEqual(sickbeard.AUTOPOSTPROCESSOR_FREQUENCY, 60)
def test_change_daily_search_freq(self):
"""
Test change_daily_search_frequency
"""
sickbeard.dailySearchScheduler = scheduler.Scheduler(lambda:None) # dummy
config.change_daily_search_frequency(0)
self.assertEqual(sickbeard.DAILYSEARCH_FREQUENCY, sickbeard.MIN_DAILYSEARCH_FREQUENCY)
config.change_daily_search_frequency('s')
self.assertEqual(sickbeard.DAILYSEARCH_FREQUENCY, sickbeard.DEFAULT_DAILYSEARCH_FREQUENCY)
config.change_daily_search_frequency(60)
self.assertEqual(sickbeard.DAILYSEARCH_FREQUENCY, 60)
def test_change_backlog_freq(self):
"""
Test change_backlog_frequency
"""
sickbeard.backlogSearchScheduler = scheduler.Scheduler(lambda:None) # dummy
sickbeard.DAILYSEARCH_FREQUENCY = sickbeard.DEFAULT_DAILYSEARCH_FREQUENCY # needed
config.change_backlog_frequency(0)
self.assertEqual(sickbeard.BACKLOG_FREQUENCY, sickbeard.MIN_BACKLOG_FREQUENCY)
config.change_backlog_frequency('s')
self.assertEqual(sickbeard.BACKLOG_FREQUENCY, sickbeard.MIN_BACKLOG_FREQUENCY)
config.change_backlog_frequency(1440)
self.assertEqual(sickbeard.BACKLOG_FREQUENCY, 1440)
def test_change_update_freq(self):
"""
Test change_update_frequency
"""
sickbeard.versionCheckScheduler = scheduler.Scheduler(lambda:None) # dummy
config.change_update_frequency(0)
self.assertEqual(sickbeard.UPDATE_FREQUENCY, sickbeard.MIN_UPDATE_FREQUENCY)
config.change_update_frequency('s')
self.assertEqual(sickbeard.UPDATE_FREQUENCY, sickbeard.DEFAULT_UPDATE_FREQUENCY)
config.change_update_frequency(60)
self.assertEqual(sickbeard.UPDATE_FREQUENCY, 60)
def test_change_show_update_hour(self):
"""
Test change_showupdate_hour
"""
sickbeard.showUpdateScheduler = scheduler.Scheduler(lambda:None) # dummy
config.change_showupdate_hour(-2)
self.assertEqual(sickbeard.SHOWUPDATE_HOUR, 0)
config.change_showupdate_hour('s')
self.assertEqual(sickbeard.SHOWUPDATE_HOUR, sickbeard.DEFAULT_SHOWUPDATE_HOUR)
config.change_showupdate_hour(60)
self.assertEqual(sickbeard.SHOWUPDATE_HOUR, 0)
config.change_showupdate_hour(12)
self.assertEqual(sickbeard.SHOWUPDATE_HOUR, 12)
def test_change_sub_finder_freq(self):
"""
Test change_subtitle_finder_frequency
"""
config.change_subtitle_finder_frequency('')
self.assertEqual(sickbeard.SUBTITLES_FINDER_FREQUENCY, 1)
config.change_subtitle_finder_frequency('s')
self.assertEqual(sickbeard.SUBTITLES_FINDER_FREQUENCY, 1)
config.change_subtitle_finder_frequency(8)
self.assertEqual(sickbeard.SUBTITLES_FINDER_FREQUENCY, 8)
def test_change_version_notify(self):
"""
Test change_version_notify
"""
class dummy_action(object): # needed for *scheduler.action.forceRun()
def __init__(self):
self.amActive = False
sickbeard.versionCheckScheduler = scheduler.Scheduler(dummy_action()) # dummy
sickbeard.VERSION_NOTIFY = True
config.change_version_notify(True) # no change
self.assertTrue(sickbeard.VERSION_NOTIFY)
config.change_version_notify('stop') # = defaults to False
self.assertFalse(sickbeard.VERSION_NOTIFY and sickbeard.versionCheckScheduler.enable)
config.change_version_notify('on')
self.assertTrue(sickbeard.VERSION_NOTIFY and sickbeard.versionCheckScheduler.enable)
def test_change_download_propers(self):
"""
Test change_download_propers
"""
sickbeard.properFinderScheduler = scheduler.Scheduler(lambda:None) # dummy
sickbeard.DOWNLOAD_PROPERS = True
config.change_download_propers(True) # no change
self.assertTrue(sickbeard.DOWNLOAD_PROPERS)
config.change_download_propers('stop') # = defaults to False
self.assertFalse(sickbeard.DOWNLOAD_PROPERS and sickbeard.properFinderScheduler.enable)
config.change_download_propers('on')
self.assertTrue(sickbeard.DOWNLOAD_PROPERS and sickbeard.properFinderScheduler.enable)
def test_change_use_trakt(self):
"""
Test change_use_trakt
"""
sickbeard.traktCheckerScheduler = scheduler.Scheduler(lambda:None) # dummy
sickbeard.USE_TRAKT = True
config.change_use_trakt(True) # no change
self.assertTrue(sickbeard.USE_TRAKT)
config.change_use_trakt('stop') # = defaults to False
self.assertFalse(sickbeard.USE_TRAKT and sickbeard.traktCheckerScheduler.enable)
config.change_use_trakt('on')
self.assertTrue(sickbeard.USE_TRAKT and sickbeard.traktCheckerScheduler.enable)
def test_change_use_subtitles(self):
"""
Test change_use_subtitles
"""
sickbeard.subtitlesFinderScheduler = scheduler.Scheduler(lambda:None) # dummy
sickbeard.USE_SUBTITLES = True
config.change_use_subtitles(True) # no change
self.assertTrue(sickbeard.USE_SUBTITLES)
config.change_use_subtitles('stop') # = defaults to False
self.assertFalse(sickbeard.USE_SUBTITLES and sickbeard.subtitlesFinderScheduler.enable)
config.change_use_subtitles('on')
self.assertTrue(sickbeard.USE_SUBTITLES and sickbeard.subtitlesFinderScheduler.enable)
def test_change_process_auto(self):
"""
Test change_process_automatically
"""
sickbeard.autoPostProcessorScheduler = scheduler.Scheduler(lambda:None) # dummy
sickbeard.PROCESS_AUTOMATICALLY = True
config.change_process_automatically(True) # no change
self.assertTrue(sickbeard.PROCESS_AUTOMATICALLY)
config.change_process_automatically('stop') # = defaults to False
self.assertFalse(sickbeard.PROCESS_AUTOMATICALLY and sickbeard.autoPostProcessorScheduler.enable)
config.change_process_automatically('on')
self.assertTrue(sickbeard.PROCESS_AUTOMATICALLY and sickbeard.autoPostProcessorScheduler.enable)
class ConfigTestMigrator(unittest.TestCase):
"""
Test the sickbeard.config.ConfigMigrator class
"""
@unittest.expectedFailure # Not fully implemented
def test_config_migrator(self):
"""
Test migrate_config
"""
# TODO: Assert the 'too-advanced-config-version' error
CFG = ConfigObj('config.ini', encoding='UTF-8')
config.check_section(CFG, 'General')
CFG['General']['config_version'] = 0
sickbeard.CONFIG_VERSION = 11
sickbeard.CONFIG_FILE = 'config.ini'
migrator = config.ConfigMigrator(CFG)
migrator.migrate_config()
if __name__ == '__main__':
logging.basicConfig(stream=sys.stderr)
logging.getLogger(__name__).setLevel(logging.DEBUG)
SUITE = unittest.TestLoader().loadTestsFromTestCase(ConfigTestBasic)
unittest.TextTestRunner(verbosity=2).run(SUITE)
SUITE = unittest.TestLoader().loadTestsFromTestCase(ConfigTestChanges)
unittest.TextTestRunner(verbosity=2).run(SUITE)
SUITE = unittest.TestLoader().loadTestsFromTestCase(ConfigTestMigrator)
unittest.TextTestRunner(verbosity=2).run(SUITE)
| gpl-3.0 |
sorenk/ansible | test/units/modules/network/nxos/test_nxos_acl_interface.py | 57 | 2910 | # (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metacl_interfaceass__ = type
from ansible.compat.tests.mock import patch
from ansible.modules.network.nxos import nxos_acl_interface
from .nxos_module import TestNxosModule, load_fixture, set_module_args
class TestNxosAclInterfaceModule(TestNxosModule):
module = nxos_acl_interface
def setUp(self):
super(TestNxosAclInterfaceModule, self).setUp()
self.mock_run_commands = patch('ansible.modules.network.nxos.nxos_acl_interface.run_commands')
self.run_commands = self.mock_run_commands.start()
self.mock_load_config = patch('ansible.modules.network.nxos.nxos_acl_interface.load_config')
self.load_config = self.mock_load_config.start()
def tearDown(self):
super(TestNxosAclInterfaceModule, self).tearDown()
self.mock_run_commands.stop()
self.mock_load_config.stop()
def load_fixtures(self, commands=None, device=''):
def load_from_file(*args, **kwargs):
module, commands = args
output = list()
for item in commands:
try:
command = item['command']
except ValueError:
command = item
filename = '%s.txt' % str(command).split(' | ')[0].replace(' ', '_')
output.append(load_fixture('nxos_acl_interface', filename))
return output
self.run_commands.side_effect = load_from_file
self.load_config.return_value = None
def test_nxos_acl_interface(self):
set_module_args(dict(name='ANSIBLE', interface='ethernet1/41', direction='egress'))
result = self.execute_module(changed=True)
self.assertEqual(result['commands'], ['interface ethernet1/41', 'ip access-group ANSIBLE out'])
def test_nxos_acl_interface_remove(self):
set_module_args(dict(name='copp-system-p-acl-bgp', interface='ethernet1/41',
direction='egress', state='absent'))
result = self.execute_module(changed=True)
self.assertEqual(result['commands'], ['interface ethernet1/41', 'no ip access-group copp-system-p-acl-bgp out'])
| gpl-3.0 |
40223110/2015CDAFinal_test2 | static/Brython3.1.0-20150301-090019/Lib/_sysconfigdata.py | 731 | 18167 | build_time_vars={'HAVE_SYS_WAIT_H': 1, 'HAVE_UTIL_H': 0, 'HAVE_SYMLINKAT': 1, 'HAVE_LIBSENDFILE': 0, 'SRCDIRS': 'Parser Grammar Objects Python Modules Mac', 'SIZEOF_OFF_T': 8, 'BASECFLAGS': '-Wno-unused-result', 'HAVE_UTIME_H': 1, 'EXTRAMACHDEPPATH': '', 'HAVE_SYS_TIME_H': 1, 'CFLAGSFORSHARED': '-fPIC', 'HAVE_HYPOT': 1, 'PGSRCS': '\\', 'HAVE_LIBUTIL_H': 0, 'HAVE_COMPUTED_GOTOS': 1, 'HAVE_LUTIMES': 1, 'HAVE_MAKEDEV': 1, 'HAVE_REALPATH': 1, 'HAVE_LINUX_TIPC_H': 1, 'MULTIARCH': 'i386-linux-gnu', 'HAVE_GETWD': 1, 'HAVE_GCC_ASM_FOR_X64': 0, 'HAVE_INET_PTON': 1, 'HAVE_GETHOSTBYNAME_R_6_ARG': 1, 'SIZEOF__BOOL': 1, 'HAVE_ZLIB_COPY': 1, 'ASDLGEN': 'python3.3 ../Parser/asdl_c.py', 'GRAMMAR_INPUT': '../Grammar/Grammar', 'HOST_GNU_TYPE': 'i686-pc-linux-gnu', 'HAVE_SCHED_RR_GET_INTERVAL': 1, 'HAVE_BLUETOOTH_H': 0, 'HAVE_MKFIFO': 1, 'TIMEMODULE_LIB': 0, 'LIBM': '-lm', 'PGENOBJS': '\\ \\', 'PYTHONFRAMEWORK': '', 'GETPGRP_HAVE_ARG': 0, 'HAVE_MMAP': 1, 'SHLIB_SUFFIX': '.so', 'SIZEOF_FLOAT': 4, 'HAVE_RENAMEAT': 1, 'HAVE_LANGINFO_H': 1, 'HAVE_STDLIB_H': 1, 'PY_CORE_CFLAGS': '-Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security -I. -IInclude -I../Include -D_FORTIFY_SOURCE=2 -fPIC -DPy_BUILD_CORE', 'HAVE_BROKEN_PIPE_BUF': 0, 'HAVE_CONFSTR': 1, 'HAVE_SIGTIMEDWAIT': 1, 'HAVE_FTELLO': 1, 'READELF': 'readelf', 'HAVE_SIGALTSTACK': 1, 'TESTTIMEOUT': 3600, 'PYTHONPATH': ':plat-i386-linux-gnu', 'SIZEOF_WCHAR_T': 4, 'LIBOBJS': '', 'HAVE_SYSCONF': 1, 'MAKESETUP': '../Modules/makesetup', 'HAVE_UTIMENSAT': 1, 'HAVE_FCHOWNAT': 1, 'HAVE_WORKING_TZSET': 1, 'HAVE_FINITE': 1, 'HAVE_ASINH': 1, 'HAVE_SETEUID': 1, 'CONFIGFILES': 'configure configure.ac acconfig.h pyconfig.h.in Makefile.pre.in', 'HAVE_SETGROUPS': 1, 'PARSER_OBJS': '\\ Parser/myreadline.o Parser/parsetok.o Parser/tokenizer.o', 'HAVE_MBRTOWC': 1, 'SIZEOF_INT': 4, 'HAVE_STDARG_PROTOTYPES': 1, 'TM_IN_SYS_TIME': 0, 'HAVE_SYS_TIMES_H': 1, 'HAVE_LCHOWN': 1, 'HAVE_SSIZE_T': 1, 'HAVE_PAUSE': 1, 'SYSLIBS': '-lm', 'POSIX_SEMAPHORES_NOT_ENABLED': 0, 'HAVE_DEVICE_MACROS': 1, 'BLDSHARED': 'i686-linux-gnu-gcc -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions -Wl,-Bsymbolic-functions -Wl,-z,relro -Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ', 'LIBSUBDIRS': 'tkinter tkinter/test tkinter/test/test_tkinter \\', 'HAVE_SYS_UN_H': 1, 'HAVE_SYS_STAT_H': 1, 'VPATH': '..', 'INCLDIRSTOMAKE': '/usr/include /usr/include /usr/include/python3.3m /usr/include/python3.3m', 'HAVE_BROKEN_SEM_GETVALUE': 0, 'HAVE_TIMEGM': 1, 'PACKAGE_VERSION': 0, 'MAJOR_IN_SYSMACROS': 0, 'HAVE_ATANH': 1, 'HAVE_GAI_STRERROR': 1, 'HAVE_SYS_POLL_H': 1, 'SIZEOF_PTHREAD_T': 4, 'SIZEOF_FPOS_T': 16, 'HAVE_CTERMID': 1, 'HAVE_TMPFILE': 1, 'HAVE_SETUID': 1, 'CXX': 'i686-linux-gnu-g++ -pthread', 'srcdir': '..', 'HAVE_UINT32_T': 1, 'HAVE_ADDRINFO': 1, 'HAVE_GETSPENT': 1, 'SIZEOF_DOUBLE': 8, 'HAVE_INT32_T': 1, 'LIBRARY_OBJS_OMIT_FROZEN': '\\', 'HAVE_FUTIMES': 1, 'CONFINCLUDEPY': '/usr/include/python3.3m', 'HAVE_RL_COMPLETION_APPEND_CHARACTER': 1, 'LIBFFI_INCLUDEDIR': '', 'HAVE_SETGID': 1, 'HAVE_UINT64_T': 1, 'EXEMODE': 755, 'UNIVERSALSDK': '', 'HAVE_LIBDL': 1, 'HAVE_GETNAMEINFO': 1, 'HAVE_STDINT_H': 1, 'COREPYTHONPATH': ':plat-i386-linux-gnu', 'HAVE_SOCKADDR_STORAGE': 1, 'HAVE_WAITID': 1, 'EXTRAPLATDIR': '@EXTRAPLATDIR@', 'HAVE_ACCEPT4': 1, 'RUNSHARED': 'LD_LIBRARY_PATH=/build/buildd/python3.3-3.3.1/build-shared:', 'EXE': '', 'HAVE_SIGACTION': 1, 'HAVE_CHOWN': 1, 'HAVE_GETLOGIN': 1, 'HAVE_TZNAME': 0, 'PACKAGE_NAME': 0, 'HAVE_GETPGID': 1, 'HAVE_GLIBC_MEMMOVE_BUG': 0, 'BUILD_GNU_TYPE': 'i686-pc-linux-gnu', 'HAVE_LINUX_CAN_H': 1, 'DYNLOADFILE': 'dynload_shlib.o', 'HAVE_PWRITE': 1, 'BUILDEXE': '', 'HAVE_OPENPTY': 1, 'HAVE_LOCKF': 1, 'HAVE_COPYSIGN': 1, 'HAVE_PREAD': 1, 'HAVE_DLOPEN': 1, 'HAVE_SYS_KERN_CONTROL_H': 0, 'PY_FORMAT_LONG_LONG': '"ll"', 'HAVE_TCSETPGRP': 1, 'HAVE_SETSID': 1, 'HAVE_STRUCT_STAT_ST_BIRTHTIME': 0, 'HAVE_STRING_H': 1, 'LDLIBRARY': 'libpython3.3m.so', 'INSTALL_SCRIPT': '/usr/bin/install -c', 'HAVE_SYS_XATTR_H': 1, 'HAVE_CURSES_IS_TERM_RESIZED': 1, 'HAVE_TMPNAM_R': 1, 'STRICT_SYSV_CURSES': "/* Don't use ncurses extensions */", 'WANT_SIGFPE_HANDLER': 1, 'HAVE_INT64_T': 1, 'HAVE_STAT_TV_NSEC': 1, 'HAVE_SYS_MKDEV_H': 0, 'HAVE_BROKEN_POLL': 0, 'HAVE_IF_NAMEINDEX': 1, 'HAVE_GETPWENT': 1, 'PSRCS': '\\', 'RANLIB': 'ranlib', 'HAVE_WCSCOLL': 1, 'WITH_NEXT_FRAMEWORK': 0, 'ASDLGEN_FILES': '../Parser/asdl.py ../Parser/asdl_c.py', 'HAVE_RL_PRE_INPUT_HOOK': 1, 'PACKAGE_URL': 0, 'SHLIB_EXT': 0, 'HAVE_SYS_LOADAVG_H': 0, 'HAVE_LIBIEEE': 0, 'HAVE_SEM_OPEN': 1, 'HAVE_TERM_H': 1, 'IO_OBJS': '\\', 'IO_H': 'Modules/_io/_iomodule.h', 'HAVE_STATVFS': 1, 'VERSION': '3.3', 'HAVE_GETC_UNLOCKED': 1, 'MACHDEPS': 'plat-i386-linux-gnu @EXTRAPLATDIR@', 'SUBDIRSTOO': 'Include Lib Misc', 'HAVE_SETREUID': 1, 'HAVE_ERFC': 1, 'HAVE_SETRESUID': 1, 'LINKFORSHARED': '-Xlinker -export-dynamic -Wl,-O1 -Wl,-Bsymbolic-functions', 'HAVE_SYS_TYPES_H': 1, 'HAVE_GETPAGESIZE': 1, 'HAVE_SETEGID': 1, 'HAVE_PTY_H': 1, 'HAVE_STRUCT_STAT_ST_FLAGS': 0, 'HAVE_WCHAR_H': 1, 'HAVE_FSEEKO': 1, 'Py_ENABLE_SHARED': 1, 'HAVE_SIGRELSE': 1, 'HAVE_PTHREAD_INIT': 0, 'FILEMODE': 644, 'HAVE_SYS_RESOURCE_H': 1, 'HAVE_READLINKAT': 1, 'PYLONG_BITS_IN_DIGIT': 0, 'LINKCC': 'i686-linux-gnu-gcc -pthread', 'HAVE_SETLOCALE': 1, 'HAVE_CHROOT': 1, 'HAVE_OPENAT': 1, 'HAVE_FEXECVE': 1, 'LDCXXSHARED': 'i686-linux-gnu-g++ -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions', 'DIST': 'README ChangeLog configure configure.ac acconfig.h pyconfig.h.in Makefile.pre.in Include Lib Misc Ext-dummy', 'HAVE_MKNOD': 1, 'PY_LDFLAGS': '-Wl,-Bsymbolic-functions -Wl,-z,relro', 'HAVE_BROKEN_MBSTOWCS': 0, 'LIBRARY_OBJS': '\\', 'HAVE_LOG1P': 1, 'SIZEOF_VOID_P': 4, 'HAVE_FCHOWN': 1, 'PYTHONFRAMEWORKPREFIX': '', 'HAVE_LIBDLD': 0, 'HAVE_TGAMMA': 1, 'HAVE_ERRNO_H': 1, 'HAVE_IO_H': 0, 'OTHER_LIBTOOL_OPT': '', 'HAVE_POLL_H': 1, 'PY_CPPFLAGS': '-I. -IInclude -I../Include -D_FORTIFY_SOURCE=2', 'XMLLIBSUBDIRS': 'xml xml/dom xml/etree xml/parsers xml/sax', 'GRAMMAR_H': 'Include/graminit.h', 'TANH_PRESERVES_ZERO_SIGN': 1, 'HAVE_GETLOADAVG': 1, 'UNICODE_DEPS': '\\ \\', 'HAVE_GETCWD': 1, 'MANDIR': '/usr/share/man', 'MACHDESTLIB': '/usr/lib/python3.3', 'GRAMMAR_C': 'Python/graminit.c', 'PGOBJS': '\\', 'HAVE_DEV_PTMX': 1, 'HAVE_UINTPTR_T': 1, 'HAVE_SCHED_SETAFFINITY': 1, 'PURIFY': '', 'HAVE_DECL_ISINF': 1, 'HAVE_RL_CALLBACK': 1, 'HAVE_WRITEV': 1, 'HAVE_GETHOSTBYNAME_R_5_ARG': 0, 'HAVE_SYS_AUDIOIO_H': 0, 'EXT_SUFFIX': '.cpython-33m.so', 'SIZEOF_LONG_LONG': 8, 'DLINCLDIR': '.', 'HAVE_PATHCONF': 1, 'HAVE_UNLINKAT': 1, 'MKDIR_P': '/bin/mkdir -p', 'HAVE_ALTZONE': 0, 'SCRIPTDIR': '/usr/lib', 'OPCODETARGETGEN_FILES': '\\', 'HAVE_GETSPNAM': 1, 'HAVE_SYS_TERMIO_H': 0, 'HAVE_ATTRIBUTE_FORMAT_PARSETUPLE': 0, 'HAVE_PTHREAD_H': 1, 'Py_DEBUG': 0, 'HAVE_STRUCT_STAT_ST_BLOCKS': 1, 'X87_DOUBLE_ROUNDING': 1, 'SIZEOF_TIME_T': 4, 'HAVE_DYNAMIC_LOADING': 1, 'HAVE_DIRECT_H': 0, 'SRC_GDB_HOOKS': '../Tools/gdb/libpython.py', 'HAVE_GETADDRINFO': 1, 'HAVE_BROKEN_NICE': 0, 'HAVE_DIRENT_H': 1, 'HAVE_WCSXFRM': 1, 'HAVE_RL_COMPLETION_DISPLAY_MATCHES_HOOK': 1, 'HAVE_FSTATVFS': 1, 'PYTHON': 'python', 'HAVE_OSX105_SDK': 0, 'BINDIR': '/usr/bin', 'TESTPYTHON': 'LD_LIBRARY_PATH=/build/buildd/python3.3-3.3.1/build-shared: ./python', 'ARFLAGS': 'rc', 'PLATDIR': 'plat-i386-linux-gnu', 'HAVE_ASM_TYPES_H': 1, 'PY3LIBRARY': 'libpython3.so', 'HAVE_PLOCK': 0, 'FLOCK_NEEDS_LIBBSD': 0, 'WITH_TSC': 0, 'HAVE_LIBREADLINE': 1, 'MACHDEP': 'linux', 'HAVE_SELECT': 1, 'LDFLAGS': '-Wl,-Bsymbolic-functions -Wl,-z,relro', 'HAVE_HSTRERROR': 1, 'SOABI': 'cpython-33m', 'HAVE_GETTIMEOFDAY': 1, 'HAVE_LIBRESOLV': 0, 'HAVE_UNSETENV': 1, 'HAVE_TM_ZONE': 1, 'HAVE_GETPGRP': 1, 'HAVE_FLOCK': 1, 'HAVE_SYS_BSDTTY_H': 0, 'SUBDIRS': '', 'PYTHONFRAMEWORKINSTALLDIR': '', 'PACKAGE_BUGREPORT': 0, 'HAVE_CLOCK': 1, 'HAVE_GETPEERNAME': 1, 'SIZEOF_PID_T': 4, 'HAVE_CONIO_H': 0, 'HAVE_FSTATAT': 1, 'HAVE_NETPACKET_PACKET_H': 1, 'HAVE_WAIT3': 1, 'DESTPATH': '', 'HAVE_STAT_TV_NSEC2': 0, 'HAVE_GETRESGID': 1, 'HAVE_UCS4_TCL': 0, 'SIGNED_RIGHT_SHIFT_ZERO_FILLS': 0, 'HAVE_TIMES': 1, 'HAVE_UNAME': 1, 'HAVE_ERF': 1, 'SIZEOF_SHORT': 2, 'HAVE_NCURSES_H': 1, 'HAVE_SYS_SENDFILE_H': 1, 'HAVE_CTERMID_R': 0, 'HAVE_TMPNAM': 1, 'prefix': '/usr', 'HAVE_NICE': 1, 'WITH_THREAD': 1, 'LN': 'ln', 'TESTRUNNER': 'LD_LIBRARY_PATH=/build/buildd/python3.3-3.3.1/build-shared: ./python ../Tools/scripts/run_tests.py', 'HAVE_SIGINTERRUPT': 1, 'HAVE_SETPGID': 1, 'RETSIGTYPE': 'void', 'HAVE_SCHED_GET_PRIORITY_MAX': 1, 'HAVE_SYS_SYS_DOMAIN_H': 0, 'HAVE_SYS_DIR_H': 0, 'HAVE__GETPTY': 0, 'HAVE_BLUETOOTH_BLUETOOTH_H': 1, 'HAVE_BIND_TEXTDOMAIN_CODESET': 1, 'HAVE_POLL': 1, 'PYTHON_OBJS': '\\', 'HAVE_WAITPID': 1, 'USE_INLINE': 1, 'HAVE_FUTIMENS': 1, 'USE_COMPUTED_GOTOS': 1, 'MAINCC': 'i686-linux-gnu-gcc -pthread', 'HAVE_SOCKETPAIR': 1, 'HAVE_PROCESS_H': 0, 'HAVE_SETVBUF': 1, 'HAVE_FDOPENDIR': 1, 'CONFINCLUDEDIR': '/usr/include', 'BINLIBDEST': '/usr/lib/python3.3', 'HAVE_SYS_IOCTL_H': 1, 'HAVE_SYSEXITS_H': 1, 'LDLAST': '', 'HAVE_SYS_FILE_H': 1, 'HAVE_RL_COMPLETION_SUPPRESS_APPEND': 1, 'HAVE_RL_COMPLETION_MATCHES': 1, 'HAVE_TCGETPGRP': 1, 'SIZEOF_SIZE_T': 4, 'HAVE_EPOLL_CREATE1': 1, 'HAVE_SYS_SELECT_H': 1, 'HAVE_CLOCK_GETTIME': 1, 'CFLAGS': '-Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ', 'HAVE_SNPRINTF': 1, 'BLDLIBRARY': '-lpython3.3m', 'PARSER_HEADERS': '\\', 'SO': '.so', 'LIBRARY': 'libpython3.3m.a', 'HAVE_FPATHCONF': 1, 'HAVE_TERMIOS_H': 1, 'HAVE_BROKEN_PTHREAD_SIGMASK': 0, 'AST_H': 'Include/Python-ast.h', 'HAVE_GCC_UINT128_T': 0, 'HAVE_ACOSH': 1, 'MODOBJS': 'Modules/_threadmodule.o Modules/signalmodule.o Modules/arraymodule.o Modules/mathmodule.o Modules/_math.o Modules/_struct.o Modules/timemodule.o Modules/_randommodule.o Modules/atexitmodule.o Modules/_elementtree.o Modules/_pickle.o Modules/_datetimemodule.o Modules/_bisectmodule.o Modules/_heapqmodule.o Modules/unicodedata.o Modules/fcntlmodule.o Modules/spwdmodule.o Modules/grpmodule.o Modules/selectmodule.o Modules/socketmodule.o Modules/_posixsubprocess.o Modules/md5module.o Modules/sha1module.o Modules/sha256module.o Modules/sha512module.o Modules/syslogmodule.o Modules/binascii.o Modules/zlibmodule.o Modules/pyexpat.o Modules/posixmodule.o Modules/errnomodule.o Modules/pwdmodule.o Modules/_sre.o Modules/_codecsmodule.o Modules/_weakref.o Modules/_functoolsmodule.o Modules/operator.o Modules/_collectionsmodule.o Modules/itertoolsmodule.o Modules/_localemodule.o Modules/_iomodule.o Modules/iobase.o Modules/fileio.o Modules/bytesio.o Modules/bufferedio.o Modules/textio.o Modules/stringio.o Modules/zipimport.o Modules/faulthandler.o Modules/symtablemodule.o Modules/xxsubtype.o', 'AST_C': 'Python/Python-ast.c', 'HAVE_SYS_NDIR_H': 0, 'DESTDIRS': '/usr /usr/lib /usr/lib/python3.3 /usr/lib/python3.3/lib-dynload', 'HAVE_SIGNAL_H': 1, 'PACKAGE_TARNAME': 0, 'HAVE_GETPRIORITY': 1, 'INCLUDEDIR': '/usr/include', 'HAVE_INTTYPES_H': 1, 'SIGNAL_OBJS': '', 'HAVE_READV': 1, 'HAVE_SETHOSTNAME': 1, 'MODLIBS': '-lrt -lexpat -L/usr/lib -lz -lexpat', 'CC': 'i686-linux-gnu-gcc -pthread', 'HAVE_LCHMOD': 0, 'SIZEOF_UINTPTR_T': 4, 'LIBPC': '/usr/lib/i386-linux-gnu/pkgconfig', 'BYTESTR_DEPS': '\\', 'HAVE_MKDIRAT': 1, 'LIBPL': '/usr/lib/python3.3/config-3.3m-i386-linux-gnu', 'HAVE_SHADOW_H': 1, 'HAVE_SYS_EVENT_H': 0, 'INSTALL': '/usr/bin/install -c', 'HAVE_GCC_ASM_FOR_X87': 1, 'HAVE_BROKEN_UNSETENV': 0, 'BASECPPFLAGS': '', 'DOUBLE_IS_BIG_ENDIAN_IEEE754': 0, 'HAVE_STRUCT_STAT_ST_RDEV': 1, 'HAVE_SEM_UNLINK': 1, 'BUILDPYTHON': 'python', 'HAVE_RL_CATCH_SIGNAL': 1, 'HAVE_DECL_TZNAME': 0, 'RESSRCDIR': 'Mac/Resources/framework', 'HAVE_PTHREAD_SIGMASK': 1, 'HAVE_UTIMES': 1, 'DISTDIRS': 'Include Lib Misc Ext-dummy', 'HAVE_FDATASYNC': 1, 'HAVE_USABLE_WCHAR_T': 0, 'PY_FORMAT_SIZE_T': '"z"', 'HAVE_SCHED_SETSCHEDULER': 1, 'VA_LIST_IS_ARRAY': 0, 'HAVE_LINUX_NETLINK_H': 1, 'HAVE_SETREGID': 1, 'HAVE_STROPTS_H': 1, 'LDVERSION': '3.3m', 'abs_builddir': '/build/buildd/python3.3-3.3.1/build-shared', 'SITEPATH': '', 'HAVE_GETHOSTBYNAME': 0, 'HAVE_SIGPENDING': 1, 'HAVE_KQUEUE': 0, 'HAVE_SYNC': 1, 'HAVE_GETSID': 1, 'HAVE_ROUND': 1, 'HAVE_STRFTIME': 1, 'AST_H_DIR': 'Include', 'HAVE_PIPE2': 1, 'AST_C_DIR': 'Python', 'TESTPYTHONOPTS': '', 'HAVE_DEV_PTC': 0, 'GETTIMEOFDAY_NO_TZ': 0, 'HAVE_NET_IF_H': 1, 'HAVE_SENDFILE': 1, 'HAVE_SETPGRP': 1, 'HAVE_SEM_GETVALUE': 1, 'CONFIGURE_LDFLAGS': '-Wl,-Bsymbolic-functions -Wl,-z,relro', 'DLLLIBRARY': '', 'PYTHON_FOR_BUILD': './python -E', 'SETPGRP_HAVE_ARG': 0, 'HAVE_INET_ATON': 1, 'INSTALL_SHARED': '/usr/bin/install -c -m 555', 'WITH_DOC_STRINGS': 1, 'OPCODETARGETS_H': '\\', 'HAVE_INITGROUPS': 1, 'HAVE_LINKAT': 1, 'BASEMODLIBS': '', 'SGI_ABI': '', 'HAVE_SCHED_SETPARAM': 1, 'OPT': '-DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes', 'HAVE_POSIX_FADVISE': 1, 'datarootdir': '/usr/share', 'HAVE_MEMRCHR': 1, 'HGTAG': '', 'HAVE_MEMMOVE': 1, 'HAVE_GETRESUID': 1, 'DOUBLE_IS_ARM_MIXED_ENDIAN_IEEE754': 0, 'HAVE_LSTAT': 1, 'AR': 'ar', 'HAVE_WAIT4': 1, 'HAVE_SYS_MODEM_H': 0, 'INSTSONAME': 'libpython3.3m.so.1.0', 'HAVE_SYS_STATVFS_H': 1, 'HAVE_LGAMMA': 1, 'HAVE_PROTOTYPES': 1, 'HAVE_SYS_UIO_H': 1, 'MAJOR_IN_MKDEV': 0, 'QUICKTESTOPTS': '-x test_subprocess test_io test_lib2to3 \\', 'HAVE_SYS_DEVPOLL_H': 0, 'HAVE_CHFLAGS': 0, 'HAVE_FSYNC': 1, 'HAVE_FCHMOD': 1, 'INCLUDEPY': '/usr/include/python3.3m', 'HAVE_SEM_TIMEDWAIT': 1, 'LDLIBRARYDIR': '', 'HAVE_STRUCT_TM_TM_ZONE': 1, 'HAVE_CURSES_H': 1, 'TIME_WITH_SYS_TIME': 1, 'HAVE_DUP2': 1, 'ENABLE_IPV6': 1, 'WITH_VALGRIND': 0, 'HAVE_SETITIMER': 1, 'THREADOBJ': 'Python/thread.o', 'LOCALMODLIBS': '-lrt -lexpat -L/usr/lib -lz -lexpat', 'HAVE_MEMORY_H': 1, 'HAVE_GETITIMER': 1, 'HAVE_C99_BOOL': 1, 'INSTALL_DATA': '/usr/bin/install -c -m 644', 'PGEN': 'Parser/pgen', 'HAVE_GRP_H': 1, 'HAVE_WCSFTIME': 1, 'AIX_GENUINE_CPLUSPLUS': 0, 'HAVE_LIBINTL_H': 1, 'SHELL': '/bin/sh', 'HAVE_UNISTD_H': 1, 'EXTRATESTOPTS': '', 'HAVE_EXECV': 1, 'HAVE_FSEEK64': 0, 'MVWDELCH_IS_EXPRESSION': 1, 'DESTSHARED': '/usr/lib/python3.3/lib-dynload', 'OPCODETARGETGEN': '\\', 'LIBDEST': '/usr/lib/python3.3', 'CCSHARED': '-fPIC', 'HAVE_EXPM1': 1, 'HAVE_DLFCN_H': 1, 'exec_prefix': '/usr', 'HAVE_READLINK': 1, 'WINDOW_HAS_FLAGS': 1, 'HAVE_FTELL64': 0, 'HAVE_STRLCPY': 0, 'MACOSX_DEPLOYMENT_TARGET': '', 'HAVE_SYS_SYSCALL_H': 1, 'DESTLIB': '/usr/lib/python3.3', 'LDSHARED': 'i686-linux-gnu-gcc -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions -Wl,-Bsymbolic-functions -Wl,-z,relro -Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ', 'HGVERSION': '', 'PYTHON_HEADERS': '\\', 'HAVE_STRINGS_H': 1, 'DOUBLE_IS_LITTLE_ENDIAN_IEEE754': 1, 'HAVE_POSIX_FALLOCATE': 1, 'HAVE_DIRFD': 1, 'HAVE_LOG2': 1, 'HAVE_GETPID': 1, 'HAVE_ALARM': 1, 'MACHDEP_OBJS': '', 'HAVE_SPAWN_H': 1, 'HAVE_FORK': 1, 'HAVE_SETRESGID': 1, 'HAVE_FCHMODAT': 1, 'HAVE_CLOCK_GETRES': 1, 'MACHDEPPATH': ':plat-i386-linux-gnu', 'STDC_HEADERS': 1, 'HAVE_SETPRIORITY': 1, 'LIBC': '', 'HAVE_SYS_EPOLL_H': 1, 'HAVE_SYS_UTSNAME_H': 1, 'HAVE_PUTENV': 1, 'HAVE_CURSES_RESIZE_TERM': 1, 'HAVE_FUTIMESAT': 1, 'WITH_DYLD': 0, 'INSTALL_PROGRAM': '/usr/bin/install -c', 'LIBS': '-lpthread -ldl -lutil', 'HAVE_TRUNCATE': 1, 'TESTOPTS': '', 'PROFILE_TASK': '../Tools/pybench/pybench.py -n 2 --with-gc --with-syscheck', 'HAVE_CURSES_RESIZETERM': 1, 'ABIFLAGS': 'm', 'HAVE_GETGROUPLIST': 1, 'OBJECT_OBJS': '\\', 'HAVE_MKNODAT': 1, 'HAVE_ST_BLOCKS': 1, 'HAVE_STRUCT_STAT_ST_GEN': 0, 'SYS_SELECT_WITH_SYS_TIME': 1, 'SHLIBS': '-lpthread -ldl -lutil', 'HAVE_GETGROUPS': 1, 'MODULE_OBJS': '\\', 'PYTHONFRAMEWORKDIR': 'no-framework', 'HAVE_FCNTL_H': 1, 'HAVE_LINK': 1, 'HAVE_SIGWAIT': 1, 'HAVE_GAMMA': 1, 'HAVE_SYS_LOCK_H': 0, 'HAVE_FORKPTY': 1, 'HAVE_SOCKADDR_SA_LEN': 0, 'HAVE_TEMPNAM': 1, 'HAVE_STRUCT_STAT_ST_BLKSIZE': 1, 'HAVE_MKFIFOAT': 1, 'HAVE_SIGWAITINFO': 1, 'HAVE_FTIME': 1, 'HAVE_EPOLL': 1, 'HAVE_SYS_SOCKET_H': 1, 'HAVE_LARGEFILE_SUPPORT': 1, 'CONFIGURE_CFLAGS': '-g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security', 'HAVE_PTHREAD_DESTRUCTOR': 0, 'CONFIGURE_CPPFLAGS': '-D_FORTIFY_SOURCE=2', 'HAVE_SYMLINK': 1, 'HAVE_LONG_LONG': 1, 'HAVE_IEEEFP_H': 0, 'LIBDIR': '/usr/lib', 'HAVE_PTHREAD_KILL': 1, 'TESTPATH': '', 'HAVE_STRDUP': 1, 'POBJS': '\\', 'NO_AS_NEEDED': '-Wl,--no-as-needed', 'HAVE_LONG_DOUBLE': 1, 'HGBRANCH': '', 'DISTFILES': 'README ChangeLog configure configure.ac acconfig.h pyconfig.h.in Makefile.pre.in', 'PTHREAD_SYSTEM_SCHED_SUPPORTED': 1, 'HAVE_FACCESSAT': 1, 'AST_ASDL': '../Parser/Python.asdl', 'CPPFLAGS': '-I. -IInclude -I../Include -D_FORTIFY_SOURCE=2', 'HAVE_MKTIME': 1, 'HAVE_NDIR_H': 0, 'PY_CFLAGS': '-Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ', 'LIBOBJDIR': 'Python/', 'HAVE_LINUX_CAN_RAW_H': 1, 'HAVE_GETHOSTBYNAME_R_3_ARG': 0, 'PACKAGE_STRING': 0, 'GNULD': 'yes', 'LOG1P_DROPS_ZERO_SIGN': 0, 'HAVE_FTRUNCATE': 1, 'WITH_LIBINTL': 0, 'HAVE_MREMAP': 1, 'HAVE_DECL_ISNAN': 1, 'HAVE_KILLPG': 1, 'SIZEOF_LONG': 4, 'HAVE_DECL_ISFINITE': 1, 'HAVE_IPA_PURE_CONST_BUG': 0, 'WITH_PYMALLOC': 1, 'abs_srcdir': '/build/buildd/python3.3-3.3.1/build-shared/..', 'HAVE_FCHDIR': 1, 'HAVE_BROKEN_POSIX_SEMAPHORES': 0, 'AC_APPLE_UNIVERSAL_BUILD': 0, 'PGENSRCS': '\\ \\', 'DIRMODE': 755, 'HAVE_GETHOSTBYNAME_R': 1, 'HAVE_LCHFLAGS': 0, 'HAVE_SYS_PARAM_H': 1, 'SIZEOF_LONG_DOUBLE': 12, 'CONFIG_ARGS': "'--enable-shared' '--prefix=/usr' '--enable-ipv6' '--enable-loadable-sqlite-extensions' '--with-dbmliborder=bdb:gdbm' '--with-computed-gotos' '--with-system-expat' '--with-system-ffi' '--with-fpectl' 'CC=i686-linux-gnu-gcc' 'CFLAGS=-g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ' 'LDFLAGS=-Wl,-Bsymbolic-functions -Wl,-z,relro' 'CPPFLAGS=-D_FORTIFY_SOURCE=2'", 'HAVE_SCHED_H': 1, 'HAVE_KILL': 1}
| gpl-3.0 |
semplice/alan2 | alan-menu-updater.py | 1 | 2227 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# alan2 - An openbox menu builder
# Copyright (C) 2013 Semplice Project
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Authors:
# Eugenio "g7" Paolantonio <[email protected]>
#
import quickstart.translations
tr = quickstart.translations.Translation("alan2")
tr.install()
import alan.core.main as main
import alan.core.config as config
import argparse
import os
# Create and parse arguments
parser = argparse.ArgumentParser()
parser.add_argument(
"extension",
help="the extension to process"
)
parser.add_argument(
"-i", "--directory",
help="directory where to look for configuration files (default: ~/.config)",
default="~/.config"
)
parser.add_argument(
"-p", "--profile",
help="the profile to use"
)
parser.add_argument(
"-t", "--target",
help="the target file. Defaults to ~/.config/alan-menus/<extension>.xml"
)
args = parser.parse_args()
## Welcome to alan2!
DIRECTORY = os.path.expanduser(args.directory)
DEFAULT_PATH=os.path.join(DIRECTORY, "alan-menus")
# Generate directory if it doesn't exist
if not os.path.exists(DEFAULT_PATH):
os.makedirs(DEFAULT_PATH)
# If target is not specified, use our default
if not args.target:
args.target = os.path.join(DEFAULT_PATH, "%s.xml" % args.extension)
# Get extension configuration
configuration = config.Configuration(args.extension, DIRECTORY, args.profile)
# Import extension
extension_module = main.import_extension(args.extension)
# Get extension object
extension = extension_module.Extension(configuration=configuration)
# Generate menu
extension.generate()
# Write menu
extension.write_menu(args.target)
| gpl-3.0 |
shaon/eutester | testcases/cloud_admin/testcase_template.py | 6 | 1143 | #!/usr/bin/python
from eucaops import Eucaops
from eutester.eutestcase import EutesterTestCase
class SampleTest(EutesterTestCase):
def __init__(self):
self.setuptestcase()
self.setup_parser()
self.get_args()
# Setup basic eutester object
self.tester = Eucaops( config_file=self.args.config,password=self.args.password)
def clean_method(self):
pass
def MyTest(self):
"""
This is where the test description goes
"""
for machine in self.tester.get_component_machines("clc"):
machine.sys("ifconfig")
if __name__ == "__main__":
testcase = SampleTest()
### Use the list of tests passed from config/command line to determine what subset of tests to run
### or use a predefined list
list = testcase.args.tests or ["MyTest"]
### Convert test suite methods to EutesterUnitTest objects
unit_list = [ ]
for test in list:
unit_list.append( testcase.create_testunit_by_name(test) )
### Run the EutesterUnitTest objects
result = testcase.run_test_case_list(unit_list,clean_on_exit=True)
exit(result) | bsd-2-clause |
theochem/horton | scripts/horton-esp-test.py | 4 | 3918 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# HORTON: Helpful Open-source Research TOol for N-fermion systems.
# Copyright (C) 2011-2017 The HORTON Development Team
#
# This file is part of HORTON.
#
# HORTON is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# HORTON is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
# --
import sys, argparse, os, numpy as np
from horton import log, __version__
from horton.scripts.common import parse_h5, store_args, check_output, \
write_script_output
from horton.scripts.espfit import load_charges, load_cost
# All, except underflows, is *not* fine.
np.seterr(divide='raise', over='raise', invalid='raise')
def parse_args():
parser = argparse.ArgumentParser(prog='horton-esp-test.py',
description='Test how well charges reproduce the ESP.')
parser.add_argument('-V', '--version', action='version',
version="%%(prog)s (HORTON version %s)" % __version__)
parser.add_argument('cost',
help='The location of the cost function in the form '
'"file.h5:group/cost". This argument must be the same as the '
'output argument of the script horton-esp-cost.py.')
parser.add_argument('charges', type=str,
help='The atomic charges to be used in the form '
'"file.h5:group/charges". ')
parser.add_argument('output', type=str,
help='The output destination in the form file.h5:group. The colon and '
'the group name are optional. When omitted, the root group of the '
'HDF5 file is used.')
parser.add_argument('--overwrite', default=False, action='store_true',
help='Overwrite existing output in the HDF5 file')
parser.add_argument('--qtot', '-q', default=None, type=float,
help='The total charge of the system. When given, the charges from the '
'HDF5 file are corrected.')
return parser.parse_args()
def main():
args = parse_args()
fn_h5, grp_name = parse_h5(args.output, 'output')
# check if the group is already present (and not empty) in the output file
if check_output(fn_h5, grp_name, args.overwrite):
return
# Load the cost function from the HDF5 file
cost, used_volume = load_cost(args.cost)
# Load the charges from the HDF5 file
charges = load_charges(args.charges)
# Fix total charge if requested
if args.qtot is not None:
charges -= (charges.sum() - args.qtot)/len(charges)
# Store parameters in output
results = {}
results['qtot'] = charges.sum()
# Fitness of the charges
results['cost'] = cost.value_charges(charges)
if results['cost'] < 0:
results['rmsd'] = 0.0
else:
results['rmsd'] = (results['cost']/used_volume)**0.5
# Worst case stuff
results['cost_worst'] = cost.worst(0.0)
if results['cost_worst'] < 0:
results['rmsd_worst'] = 0.0
else:
results['rmsd_worst'] = (results['cost_worst']/used_volume)**0.5
# Write some things on screen
if log.do_medium:
log('RMSD charges: %10.5e' % np.sqrt((charges**2).mean()))
log('RMSD ESP: %10.5e' % results['rmsd'])
log('Worst RMSD ESP: %10.5e' % results['rmsd_worst'])
log.hline()
# Store the results in an HDF5 file
write_script_output(fn_h5, grp_name, results, args)
if __name__ == '__main__':
main()
| gpl-3.0 |
cynnjjs/HangingMobile | revision2.py | 1 | 7778 | #!/usr/bin/python
# ---------------------------------------------------------------------------
# File: revision2.py
# Author: Yining Chen
# Modified from IBM's cplex mixed integer programming example mipex1.py
# ---------------------------------------------------------------------------
# Vector x: 2*n*n+7*n total
# 0 to (n*n-1): x(i->j); n*n entries
# (n*n) to (2*n*n-1): f(i->j); n*n entries
# (2*n*n) to (2*n*n-1+n): x extern n entries
# (2*n*n+n) to (2*n*n-1+7*n): f extern (+x, -x, +y, -y, +z, -z) 6*n entries
# ---------------------------------------------------------------------------
# Equations: 2*n*n+3*n total
# x, y, z directions equilibrium * n balls 3*n entries
# f(i)-x(i) if-else clause n*n+n entries
# f(a->b) = f(b->a) n*(n-1)/2 entries
# x(a->b) = x(b->a) n*(n-1)/2 entries
from __future__ import print_function
import sys
import cplex
import math
from cplex.exceptions import CplexError
# constants
m1 = 9999
m2 = 8888
verysmall = 0.0
# inputs
n = 4
my_balls_x = [0.0, 0.0, 0.0, 0.0]
my_balls_y = [0.0, 1.0, 0.0, 1.0]
my_balls_z = [1.0, 1,0, 0.0, 0.0]
my_balls_g = [1.0, 1.0, 1.0, 1.0]
# fill in my_obj
len_sum = 0.0;
my_obj=[0.0 for x in range(2*n*n+7*n)]
my_colnames=["" for x in range(2*n*n+7*n)]
for i in range(0,n):
for j in range(0,n):
# my_obj for each edge (i->j) is -edge_length
my_obj[i*n+j] = -math.sqrt((my_balls_x[i]-my_balls_x[j])*(my_balls_x[i]-my_balls_x[j])+(my_balls_y[i]-my_balls_y[j])*(my_balls_y[i]-my_balls_y[j])+(my_balls_z[i]-my_balls_z[j])*(my_balls_z[i]-my_balls_z[j]))
# summing up all edge_lengths
len_sum = len_sum - my_obj[i*n+j]
my_colnames[i*n+j]="x("+str(i)+","+str(j)+")"
m = n*n -1
for i in range(n):
for j in range(0,n):
m+=1
my_colnames[m]="f("+str(i)+","+str(j)+")"
for i in range(n):
m = m+1
# my_obj for each external edge is -len_sum-1.0
my_obj[m]= -len_sum-1.0
my_colnames[m]="xex("+str(i)+")"
for i in range(n*6):
m = m+1
my_colnames[m]="fex("+str(i/6)+","+str(i%6+1)+")"
# fill in my_ub, my_lb, my_ctype
my_ctype = ""
my_ub=[0.0 for x in range(2*n*n+7*n)]
my_lb=[0.0 for x in range(2*n*n+7*n)]
for i in range(0,n):
for j in range(0,n):
# x(i->j) is either 0 or 1 when i!=j
# x(i->i) has to be 0
if i!=j:
my_ub[i*n+j] = 1.1
else:
my_ub[i*n+j] = 0.1
my_lb[i*n+j] = -0.1
my_ctype = my_ctype + "I"
m = n*n -1
for i in range(0, n*n):
m = m+1
# each f is non-negative and has no upper bound
my_ub[m] = cplex.infinity
my_lb[m] = 0.0
my_ctype = my_ctype + "C"
for i in range(0, n):
m = m+1
# x_external(i) is either 0 or 1
my_ub[m] = 1.1
my_lb[m] = -0.1
my_ctype = my_ctype + "I"
for i in range(0, n*6):
m = m+1
# each f_external is non-negative and has no upper bound
my_ub[m] = cplex.infinity
my_lb[m] = 0.0
my_ctype = my_ctype + "C"
# fill in my_rhs, my_sense, my_rownames
my_sense = ""
my_rhs = [0.0 for x in range(2*n*n+3*n)]
my_rownames = ["r" for x in range(2*n*n+3*n)]
for i in range(2*n*n+3*n):
my_rownames[i]="r("+str(i)+")"
for i in range(n):
# equilibrium in x, y, z directions
my_rhs[i*3] = 0.0
my_rhs[i*3+1] = 0.0
my_rhs[i*3+2] = my_balls_g[i]
my_sense = my_sense + "EEE"
m = n*3-1
for i in range(n*n+n):
# when x(i) is 0, f(i) has to be 0
m = m+1
my_rhs[m] = verysmall
my_sense = my_sense + "L"
for i in range(n*(n-1)):
# Newton's third law
m = m+1
my_rhs[m] = 0.0
my_sense = my_sense + "E"
def populatebyrow(prob):
prob.objective.set_sense(prob.objective.sense.maximize)
prob.variables.add(obj=my_obj, lb=my_lb, ub=my_ub, types=my_ctype,
names=my_colnames)
# fill in rows
rows=[[[] for x in range(2)] for x in range(2*n*n+3*n)]
# 3*n equilibrium
for i in range(0,n):
# rows[i*3]: [[(n + 2) entries],[(n + 2) entries]], equilibrium in x direction
# rows[i*3+1]: [[(n + 2) entries],[(n + 2) entries]], equilibrium in y direction
# rows[i*3+2]: [[(n + 2) entries],[(n + 2) entries]], equilibrium in z direction
rows[i*3][0]=[]
rows[i*3][1]=[]
rows[i*3+1][0]=[]
rows[i*3+1][1]=[]
rows[i*3+2][0]=[]
rows[i*3+2][1]=[]
for j in range(0,n):
if i!=j :
rows[i*3][0]+=[my_colnames[n*n + j*n+i]]
rows[i*3][1]+=[-(my_balls_x[j]-my_balls_x[i])/my_obj[j*n+i]]
rows[i*3+1][0]+=[my_colnames[n*n + j*n+i]]
rows[i*3+1][1]+=[-(my_balls_y[j]-my_balls_y[i])/my_obj[j*n+i]]
rows[i*3+2][0]+=[my_colnames[n*n + j*n+i]]
rows[i*3+2][1]+=[-(my_balls_z[j]-my_balls_z[i])/my_obj[j*n+i]]
# add +x, -x
rows[i*3][0]+=[my_colnames[2*n*n+n+i*6],my_colnames[2*n*n+n+i*6+1]]
rows[i*3][1]+=[1.0, -1.0]
# add +y, -y
rows[i*3+1][0]+=[my_colnames[2*n*n+n+i*6+2],my_colnames[2*n*n+n+i*6+3]]
rows[i*3+1][1]+=[1.0, -1.0]
# add +z, -z
rows[i*3+2][0]+=[my_colnames[2*n*n+n+i*6+4],my_colnames[2*n*n+n+i*6+5]]
rows[i*3+2][1]+=[1.0, -1.0]
# when x(i) is 0, f(i) has to be 0 for internal fs
m = n*3-1
for i in range(0,n):
for j in range(0,n):
m+=1
rows[m][0]=[my_colnames[n*n + i*n+j], my_colnames[i*n+j]]
rows[m][1]=[1.0,-m2]
# when x(i) is 0, f(i) has to be 0 for external fs
for i in range(n):
m+=1
rows[m][0]=[my_colnames[2*n*n+n+i*6], my_colnames[2*n*n+n+i*6+1],my_colnames[2*n*n+n+i*6+2],my_colnames[2*n*n+n+i*6+3],my_colnames[2*n*n+n+i*6+4],my_colnames[2*n*n+n+i*6+5],my_colnames[2*n*n+i]]
rows[m][1]=[1.0,1.0,1.0,1.0,1.0,1.0,-m2]
# f(a,b)=f(b,a)
for i in range(0,n-1):
for j in range(i+1,n):
m+=1
rows[m][0]=[my_colnames[n*n + j*n+i], my_colnames[n*n + i*n+j]]
rows[m][1]=[1.0,-1.0]
# x(a,b)=x(b,a)
for i in range(0,n-1):
for j in range(i+1,n):
m+=1
rows[m][0]=[my_colnames[j*n+i], my_colnames[i*n+j]]
rows[m][1]=[1.0,-1.0]
print("Constraints Printout:")
for i in range(2*n*n+7*n):
print("Column ",i,my_lb[i],"<=",my_colnames[i],"<=",my_ub[i],"weight =",my_obj[i],"type =",my_ctype[i])
print()
print("Equations Printout:")
for i in range(2*n*n+3*n):
print(i,rows[i],my_sense[i],my_rhs[i])
print()
prob.linear_constraints.add(lin_expr=rows, senses=my_sense,
rhs=my_rhs, names=my_rownames)
def main():
try:
my_prob = cplex.Cplex()
handle = populatebyrow(my_prob)
my_prob.solve()
except CplexError as exc:
print(exc)
return
print()
# solution.get_status() returns an integer code
print("Solution status = ", my_prob.solution.get_status(), ":", end=' ')
# the following line prints the corresponding string
print(my_prob.solution.status[my_prob.solution.get_status()])
print("Solution value = ", my_prob.solution.get_objective_value())
numcols = my_prob.variables.get_num()
numrows = my_prob.linear_constraints.get_num()
slack = my_prob.solution.get_linear_slacks()
x = my_prob.solution.get_values()
for j in range(numrows):
print("Row %d: Slack = %10f" % (j, slack[j]))
for j in range(numcols):
print("Column %d %s: Value = %10f" % (j, my_colnames[j],x[j]))
if __name__ == "__main__":
main()
| mit |
paulrouget/servo | tests/wpt/web-platform-tests/tools/third_party/py/testing/path/common.py | 55 | 16410 | import py
import sys
import pytest
class CommonFSTests(object):
def test_constructor_equality(self, path1):
p = path1.__class__(path1)
assert p == path1
def test_eq_nonstring(self, path1):
p1 = path1.join('sampledir')
p2 = path1.join('sampledir')
assert p1 == p2
def test_new_identical(self, path1):
assert path1 == path1.new()
def test_join(self, path1):
p = path1.join('sampledir')
strp = str(p)
assert strp.endswith('sampledir')
assert strp.startswith(str(path1))
def test_join_normalized(self, path1):
newpath = path1.join(path1.sep+'sampledir')
strp = str(newpath)
assert strp.endswith('sampledir')
assert strp.startswith(str(path1))
newpath = path1.join((path1.sep*2) + 'sampledir')
strp = str(newpath)
assert strp.endswith('sampledir')
assert strp.startswith(str(path1))
def test_join_noargs(self, path1):
newpath = path1.join()
assert path1 == newpath
def test_add_something(self, path1):
p = path1.join('sample')
p = p + 'dir'
assert p.check()
assert p.exists()
assert p.isdir()
assert not p.isfile()
def test_parts(self, path1):
newpath = path1.join('sampledir', 'otherfile')
par = newpath.parts()[-3:]
assert par == [path1, path1.join('sampledir'), newpath]
revpar = newpath.parts(reverse=True)[:3]
assert revpar == [newpath, path1.join('sampledir'), path1]
def test_common(self, path1):
other = path1.join('sampledir')
x = other.common(path1)
assert x == path1
#def test_parents_nonexisting_file(self, path1):
# newpath = path1 / 'dirnoexist' / 'nonexisting file'
# par = list(newpath.parents())
# assert par[:2] == [path1 / 'dirnoexist', path1]
def test_basename_checks(self, path1):
newpath = path1.join('sampledir')
assert newpath.check(basename='sampledir')
assert newpath.check(notbasename='xyz')
assert newpath.basename == 'sampledir'
def test_basename(self, path1):
newpath = path1.join('sampledir')
assert newpath.check(basename='sampledir')
assert newpath.basename, 'sampledir'
def test_dirname(self, path1):
newpath = path1.join('sampledir')
assert newpath.dirname == str(path1)
def test_dirpath(self, path1):
newpath = path1.join('sampledir')
assert newpath.dirpath() == path1
def test_dirpath_with_args(self, path1):
newpath = path1.join('sampledir')
assert newpath.dirpath('x') == path1.join('x')
def test_newbasename(self, path1):
newpath = path1.join('samplefile')
newbase = newpath.new(basename="samplefile2")
assert newbase.basename == "samplefile2"
assert newbase.dirpath() == newpath.dirpath()
def test_not_exists(self, path1):
assert not path1.join('does_not_exist').check()
assert path1.join('does_not_exist').check(exists=0)
def test_exists(self, path1):
assert path1.join("samplefile").check()
assert path1.join("samplefile").check(exists=1)
assert path1.join("samplefile").exists()
assert path1.join("samplefile").isfile()
assert not path1.join("samplefile").isdir()
def test_dir(self, path1):
#print repr(path1.join("sampledir"))
assert path1.join("sampledir").check(dir=1)
assert path1.join('samplefile').check(notdir=1)
assert not path1.join("samplefile").check(dir=1)
assert path1.join("samplefile").exists()
assert not path1.join("samplefile").isdir()
assert path1.join("samplefile").isfile()
def test_fnmatch_file(self, path1):
assert path1.join("samplefile").check(fnmatch='s*e')
assert path1.join("samplefile").fnmatch('s*e')
assert not path1.join("samplefile").fnmatch('s*x')
assert not path1.join("samplefile").check(fnmatch='s*x')
#def test_fnmatch_dir(self, path1):
# pattern = path1.sep.join(['s*file'])
# sfile = path1.join("samplefile")
# assert sfile.check(fnmatch=pattern)
def test_relto(self, path1):
l=path1.join("sampledir", "otherfile")
assert l.relto(path1) == l.sep.join(["sampledir", "otherfile"])
assert l.check(relto=path1)
assert path1.check(notrelto=l)
assert not path1.check(relto=l)
def test_bestrelpath(self, path1):
curdir = path1
sep = curdir.sep
s = curdir.bestrelpath(curdir)
assert s == "."
s = curdir.bestrelpath(curdir.join("hello", "world"))
assert s == "hello" + sep + "world"
s = curdir.bestrelpath(curdir.dirpath().join("sister"))
assert s == ".." + sep + "sister"
assert curdir.bestrelpath(curdir.dirpath()) == ".."
assert curdir.bestrelpath("hello") == "hello"
def test_relto_not_relative(self, path1):
l1=path1.join("bcde")
l2=path1.join("b")
assert not l1.relto(l2)
assert not l2.relto(l1)
@py.test.mark.xfail("sys.platform.startswith('java')")
def test_listdir(self, path1):
l = path1.listdir()
assert path1.join('sampledir') in l
assert path1.join('samplefile') in l
py.test.raises(py.error.ENOTDIR,
"path1.join('samplefile').listdir()")
def test_listdir_fnmatchstring(self, path1):
l = path1.listdir('s*dir')
assert len(l)
assert l[0], path1.join('sampledir')
def test_listdir_filter(self, path1):
l = path1.listdir(lambda x: x.check(dir=1))
assert path1.join('sampledir') in l
assert not path1.join('samplefile') in l
def test_listdir_sorted(self, path1):
l = path1.listdir(lambda x: x.check(basestarts="sample"), sort=True)
assert path1.join('sampledir') == l[0]
assert path1.join('samplefile') == l[1]
assert path1.join('samplepickle') == l[2]
def test_visit_nofilter(self, path1):
l = []
for i in path1.visit():
l.append(i.relto(path1))
assert "sampledir" in l
assert path1.sep.join(["sampledir", "otherfile"]) in l
def test_visit_norecurse(self, path1):
l = []
for i in path1.visit(None, lambda x: x.basename != "sampledir"):
l.append(i.relto(path1))
assert "sampledir" in l
assert not path1.sep.join(["sampledir", "otherfile"]) in l
@pytest.mark.parametrize('fil', ['*dir', u'*dir',
pytest.mark.skip("sys.version_info <"
" (3,6)")(b'*dir')])
def test_visit_filterfunc_is_string(self, path1, fil):
l = []
for i in path1.visit(fil):
l.append(i.relto(path1))
assert len(l), 2
assert "sampledir" in l
assert "otherdir" in l
@py.test.mark.xfail("sys.platform.startswith('java')")
def test_visit_ignore(self, path1):
p = path1.join('nonexisting')
assert list(p.visit(ignore=py.error.ENOENT)) == []
def test_visit_endswith(self, path1):
l = []
for i in path1.visit(lambda x: x.check(endswith="file")):
l.append(i.relto(path1))
assert path1.sep.join(["sampledir", "otherfile"]) in l
assert "samplefile" in l
def test_endswith(self, path1):
assert path1.check(notendswith='.py')
x = path1.join('samplefile')
assert x.check(endswith='file')
def test_cmp(self, path1):
path1 = path1.join('samplefile')
path2 = path1.join('samplefile2')
assert (path1 < path2) == ('samplefile' < 'samplefile2')
assert not (path1 < path1)
def test_simple_read(self, path1):
x = path1.join('samplefile').read('r')
assert x == 'samplefile\n'
def test_join_div_operator(self, path1):
newpath = path1 / '/sampledir' / '/test//'
newpath2 = path1.join('sampledir', 'test')
assert newpath == newpath2
def test_ext(self, path1):
newpath = path1.join('sampledir.ext')
assert newpath.ext == '.ext'
newpath = path1.join('sampledir')
assert not newpath.ext
def test_purebasename(self, path1):
newpath = path1.join('samplefile.py')
assert newpath.purebasename == 'samplefile'
def test_multiple_parts(self, path1):
newpath = path1.join('samplefile.py')
dirname, purebasename, basename, ext = newpath._getbyspec(
'dirname,purebasename,basename,ext')
assert str(path1).endswith(dirname) # be careful with win32 'drive'
assert purebasename == 'samplefile'
assert basename == 'samplefile.py'
assert ext == '.py'
def test_dotted_name_ext(self, path1):
newpath = path1.join('a.b.c')
ext = newpath.ext
assert ext == '.c'
assert newpath.ext == '.c'
def test_newext(self, path1):
newpath = path1.join('samplefile.py')
newext = newpath.new(ext='.txt')
assert newext.basename == "samplefile.txt"
assert newext.purebasename == "samplefile"
def test_readlines(self, path1):
fn = path1.join('samplefile')
contents = fn.readlines()
assert contents == ['samplefile\n']
def test_readlines_nocr(self, path1):
fn = path1.join('samplefile')
contents = fn.readlines(cr=0)
assert contents == ['samplefile', '']
def test_file(self, path1):
assert path1.join('samplefile').check(file=1)
def test_not_file(self, path1):
assert not path1.join("sampledir").check(file=1)
assert path1.join("sampledir").check(file=0)
def test_non_existent(self, path1):
assert path1.join("sampledir.nothere").check(dir=0)
assert path1.join("sampledir.nothere").check(file=0)
assert path1.join("sampledir.nothere").check(notfile=1)
assert path1.join("sampledir.nothere").check(notdir=1)
assert path1.join("sampledir.nothere").check(notexists=1)
assert not path1.join("sampledir.nothere").check(notfile=0)
# pattern = path1.sep.join(['s*file'])
# sfile = path1.join("samplefile")
# assert sfile.check(fnmatch=pattern)
def test_size(self, path1):
url = path1.join("samplefile")
assert url.size() > len("samplefile")
def test_mtime(self, path1):
url = path1.join("samplefile")
assert url.mtime() > 0
def test_relto_wrong_type(self, path1):
py.test.raises(TypeError, "path1.relto(42)")
def test_load(self, path1):
p = path1.join('samplepickle')
obj = p.load()
assert type(obj) is dict
assert obj.get('answer',None) == 42
def test_visit_filesonly(self, path1):
l = []
for i in path1.visit(lambda x: x.check(file=1)):
l.append(i.relto(path1))
assert not "sampledir" in l
assert path1.sep.join(["sampledir", "otherfile"]) in l
def test_visit_nodotfiles(self, path1):
l = []
for i in path1.visit(lambda x: x.check(dotfile=0)):
l.append(i.relto(path1))
assert "sampledir" in l
assert path1.sep.join(["sampledir", "otherfile"]) in l
assert not ".dotfile" in l
def test_visit_breadthfirst(self, path1):
l = []
for i in path1.visit(bf=True):
l.append(i.relto(path1))
for i, p in enumerate(l):
if path1.sep in p:
for j in range(i, len(l)):
assert path1.sep in l[j]
break
else:
py.test.fail("huh")
def test_visit_sort(self, path1):
l = []
for i in path1.visit(bf=True, sort=True):
l.append(i.relto(path1))
for i, p in enumerate(l):
if path1.sep in p:
break
assert l[:i] == sorted(l[:i])
assert l[i:] == sorted(l[i:])
def test_endswith(self, path1):
def chk(p):
return p.check(endswith="pickle")
assert not chk(path1)
assert not chk(path1.join('samplefile'))
assert chk(path1.join('somepickle'))
def test_copy_file(self, path1):
otherdir = path1.join('otherdir')
initpy = otherdir.join('__init__.py')
copied = otherdir.join('copied')
initpy.copy(copied)
try:
assert copied.check()
s1 = initpy.read()
s2 = copied.read()
assert s1 == s2
finally:
if copied.check():
copied.remove()
def test_copy_dir(self, path1):
otherdir = path1.join('otherdir')
copied = path1.join('newdir')
try:
otherdir.copy(copied)
assert copied.check(dir=1)
assert copied.join('__init__.py').check(file=1)
s1 = otherdir.join('__init__.py').read()
s2 = copied.join('__init__.py').read()
assert s1 == s2
finally:
if copied.check(dir=1):
copied.remove(rec=1)
def test_remove_file(self, path1):
d = path1.ensure('todeleted')
assert d.check()
d.remove()
assert not d.check()
def test_remove_dir_recursive_by_default(self, path1):
d = path1.ensure('to', 'be', 'deleted')
assert d.check()
p = path1.join('to')
p.remove()
assert not p.check()
def test_ensure_dir(self, path1):
b = path1.ensure_dir("001", "002")
assert b.basename == "002"
assert b.isdir()
def test_mkdir_and_remove(self, path1):
tmpdir = path1
py.test.raises(py.error.EEXIST, tmpdir.mkdir, 'sampledir')
new = tmpdir.join('mktest1')
new.mkdir()
assert new.check(dir=1)
new.remove()
new = tmpdir.mkdir('mktest')
assert new.check(dir=1)
new.remove()
assert tmpdir.join('mktest') == new
def test_move_file(self, path1):
p = path1.join('samplefile')
newp = p.dirpath('moved_samplefile')
p.move(newp)
try:
assert newp.check(file=1)
assert not p.check()
finally:
dp = newp.dirpath()
if hasattr(dp, 'revert'):
dp.revert()
else:
newp.move(p)
assert p.check()
def test_move_dir(self, path1):
source = path1.join('sampledir')
dest = path1.join('moveddir')
source.move(dest)
assert dest.check(dir=1)
assert dest.join('otherfile').check(file=1)
assert not source.join('sampledir').check()
def test_fspath_protocol_match_strpath(self, path1):
assert path1.__fspath__() == path1.strpath
def test_fspath_func_match_strpath(self, path1):
try:
from os import fspath
except ImportError:
from py._path.common import fspath
assert fspath(path1) == path1.strpath
@py.test.mark.skip("sys.version_info < (3,6)")
def test_fspath_open(self, path1):
f = path1.join('opentestfile')
open(f)
@py.test.mark.skip("sys.version_info < (3,6)")
def test_fspath_fsencode(self, path1):
from os import fsencode
assert fsencode(path1) == fsencode(path1.strpath)
def setuptestfs(path):
if path.join('samplefile').check():
return
#print "setting up test fs for", repr(path)
samplefile = path.ensure('samplefile')
samplefile.write('samplefile\n')
execfile = path.ensure('execfile')
execfile.write('x=42')
execfilepy = path.ensure('execfile.py')
execfilepy.write('x=42')
d = {1:2, 'hello': 'world', 'answer': 42}
path.ensure('samplepickle').dump(d)
sampledir = path.ensure('sampledir', dir=1)
sampledir.ensure('otherfile')
otherdir = path.ensure('otherdir', dir=1)
otherdir.ensure('__init__.py')
module_a = otherdir.ensure('a.py')
module_a.write('from .b import stuff as result\n')
module_b = otherdir.ensure('b.py')
module_b.write('stuff="got it"\n')
module_c = otherdir.ensure('c.py')
module_c.write('''import py;
import otherdir.a
value = otherdir.a.result
''')
module_d = otherdir.ensure('d.py')
module_d.write('''import py;
from otherdir import a
value2 = a.result
''')
| mpl-2.0 |
hynnet/openwrt-mt7620 | staging_dir/target-mipsel_r2_uClibc-0.9.33.2/usr/lib/python2.7/macurl2path.py | 332 | 3275 | """Macintosh-specific module for conversion between pathnames and URLs.
Do not import directly; use urllib instead."""
import urllib
import os
__all__ = ["url2pathname","pathname2url"]
def url2pathname(pathname):
"""OS-specific conversion from a relative URL of the 'file' scheme
to a file system path; not recommended for general use."""
#
# XXXX The .. handling should be fixed...
#
tp = urllib.splittype(pathname)[0]
if tp and tp != 'file':
raise RuntimeError, 'Cannot convert non-local URL to pathname'
# Turn starting /// into /, an empty hostname means current host
if pathname[:3] == '///':
pathname = pathname[2:]
elif pathname[:2] == '//':
raise RuntimeError, 'Cannot convert non-local URL to pathname'
components = pathname.split('/')
# Remove . and embedded ..
i = 0
while i < len(components):
if components[i] == '.':
del components[i]
elif components[i] == '..' and i > 0 and \
components[i-1] not in ('', '..'):
del components[i-1:i+1]
i = i-1
elif components[i] == '' and i > 0 and components[i-1] != '':
del components[i]
else:
i = i+1
if not components[0]:
# Absolute unix path, don't start with colon
rv = ':'.join(components[1:])
else:
# relative unix path, start with colon. First replace
# leading .. by empty strings (giving ::file)
i = 0
while i < len(components) and components[i] == '..':
components[i] = ''
i = i + 1
rv = ':' + ':'.join(components)
# and finally unquote slashes and other funny characters
return urllib.unquote(rv)
def pathname2url(pathname):
"""OS-specific conversion from a file system path to a relative URL
of the 'file' scheme; not recommended for general use."""
if '/' in pathname:
raise RuntimeError, "Cannot convert pathname containing slashes"
components = pathname.split(':')
# Remove empty first and/or last component
if components[0] == '':
del components[0]
if components[-1] == '':
del components[-1]
# Replace empty string ('::') by .. (will result in '/../' later)
for i in range(len(components)):
if components[i] == '':
components[i] = '..'
# Truncate names longer than 31 bytes
components = map(_pncomp2url, components)
if os.path.isabs(pathname):
return '/' + '/'.join(components)
else:
return '/'.join(components)
def _pncomp2url(component):
component = urllib.quote(component[:31], safe='') # We want to quote slashes
return component
def test():
for url in ["index.html",
"bar/index.html",
"/foo/bar/index.html",
"/foo/bar/",
"/"]:
print '%r -> %r' % (url, url2pathname(url))
for path in ["drive:",
"drive:dir:",
"drive:dir:file",
"drive:file",
"file",
":file",
":dir:",
":dir:file"]:
print '%r -> %r' % (path, pathname2url(path))
if __name__ == '__main__':
test()
| gpl-2.0 |
jzoldak/edx-platform | lms/djangoapps/certificates/tests/factories.py | 23 | 3036 | # Factories are self documenting
# pylint: disable=missing-docstring
from uuid import uuid4
from factory.django import DjangoModelFactory
from certificates.models import (
GeneratedCertificate, CertificateStatuses, CertificateHtmlViewConfiguration, CertificateWhitelist,
CertificateInvalidation,
)
from student.models import LinkedInAddToProfileConfiguration
class GeneratedCertificateFactory(DjangoModelFactory):
class Meta(object):
model = GeneratedCertificate
course_id = None
status = CertificateStatuses.unavailable
mode = GeneratedCertificate.MODES.honor
name = ''
verify_uuid = uuid4().hex
class CertificateWhitelistFactory(DjangoModelFactory):
class Meta(object):
model = CertificateWhitelist
course_id = None
whitelist = True
notes = 'Test Notes'
class CertificateInvalidationFactory(DjangoModelFactory):
class Meta(object):
model = CertificateInvalidation
notes = 'Test Notes'
active = True
class CertificateHtmlViewConfigurationFactory(DjangoModelFactory):
class Meta(object):
model = CertificateHtmlViewConfiguration
enabled = True
configuration = """{
"default": {
"accomplishment_class_append": "accomplishment-certificate",
"platform_name": "edX",
"company_about_url": "http://www.edx.org/about-us",
"company_privacy_url": "http://www.edx.org/edx-privacy-policy",
"company_tos_url": "http://www.edx.org/edx-terms-service",
"company_verified_certificate_url": "http://www.edx.org/verified-certificate",
"document_stylesheet_url_application": "/static/certificates/sass/main-ltr.css",
"logo_src": "/static/certificates/images/logo-edx.png",
"logo_url": "http://www.edx.org"
},
"honor": {
"certificate_type": "Honor Code",
"certificate_title": "Certificate of Achievement",
"logo_url": "http://www.edx.org/honor_logo.png"
},
"verified": {
"certificate_type": "Verified",
"certificate_title": "Verified Certificate of Achievement"
},
"xseries": {
"certificate_title": "XSeries Certificate of Achievement",
"certificate_type": "XSeries"
},
"microsites": {
"test-site": {
"company_about_url": "http://www.test-site.org/about-us",
"company_privacy_url": "http://www.test-site.org/edx-privacy-policy",
"company_tos_url": "http://www.test-site.org/edx-terms-service"
}
}
}"""
class LinkedInAddToProfileConfigurationFactory(DjangoModelFactory):
class Meta(object):
model = LinkedInAddToProfileConfiguration
enabled = True
company_identifier = "0_0dPSPyS070e0HsE9HNz_13_d11_"
trk_partner_name = 'unittest'
| agpl-3.0 |
ssorgatem/qiime | scripts/parallel_multiple_rarefactions.py | 15 | 4939 | #!/usr/bin/env python
# File created on 14 Jul 2012
from __future__ import division
__author__ = "Greg Caporaso"
__copyright__ = "Copyright 2011, The QIIME project"
__credits__ = ["Greg Caporaso"]
__license__ = "GPL"
__version__ = "1.9.1-dev"
__maintainer__ = "Greg Caporaso"
__email__ = "[email protected]"
from qiime.util import (parse_command_line_parameters, make_option)
from os.path import split, splitext, join
from qiime.util import get_options_lookup
from qiime.parallel.multiple_rarefactions import ParallelMultipleRarefactions
options_lookup = get_options_lookup()
script_info = {}
script_info['brief_description'] = """Parallel multiple file rarefaction"""
script_info[
'script_description'] = """This script performs like the multiple_rarefactions.py script, but is intended to make use of multicore/multiprocessor environments to perform analyses in parallel."""
script_info['script_usage'] = []
script_info['script_usage'].append(
("""OTU tables of different depths""",
"""Build rarefied otu tables containing 10 (-m) to 140 (-x) sequences in steps of 10 (-s) with 2 (-n) repetions per number of sequences, from otu_table.biom (-i). Write the output files to the rarefied_otu_tables directory (-o, will be created if it doesn't exist). The name of the output files will be of the form rarefaction_<num_seqs>_<reptition_number>.biom. ALWAYS SPECIFY ABSOLUTE FILE PATHS (absolute path represented here as $PWD, but will generally look something like /home/ubuntu/my_analysis/).""",
"""%prog -o $PWD/rarefied_otu_tables/ -m 10 -x 140 -s 10 -n 2 -i $PWD/otu_table.biom"""))
script_info['script_usage'].append(
("""OTU tables of the same depth""",
"""Build 8 rarefied otu tables each containing exactly 100 sequences per sample (even depth rarefaction). ALWAYS SPECIFY ABSOLUTE FILE PATHS (absolute path represented here as $PWD, but will generally look something like /home/ubuntu/my_analysis/).""",
"""%prog -o $PWD/even_otu_tables/ -m 100 -x 100 -n 8 -i $PWD/otu_table.biom"""))
script_info[
'output_description'] = """The result of parallel_multiple_rarefactions.py consists of a number of files, which depend on the minimum/maximum number of sequences per samples, steps and iterations. The files have the same otu table format as the input otu_table.biom, and are named in the following way: rarefaction_100_0.txt, where "100" corresponds to the sequences per sample and "0" for the iteration."""
script_info['required_options'] = [
make_option('-i', '--input_path', type='existing_filepath',
help='input filepath, (the otu table) [REQUIRED]'),
make_option('-o', '--output_path', type='new_dirpath',
help="write output rarefied otu tables here makes dir if it doesn't exist [REQUIRED]"),
make_option('-m', '--min', type=int, help='min seqs/sample [REQUIRED]'),
make_option('-x', '--max', type=int,
help='max seqs/sample (inclusive) [REQUIRED]'),
]
script_info['optional_options'] = [
make_option('-n', '--num_reps', dest='num_reps', default=10, type=int,
help='num iterations at each seqs/sample level [default: %default]'),
make_option(
'--suppress_lineages_included', default=False, action="store_true",
help='Exclude taxonomic (lineage) information for each OTU.'),
make_option('-s', '--step', type=int, default=1,
help='levels: min, min+step... for level <= max [default: %default]'),
make_option('--subsample_multinomial', default=False, action='store_true',
help='subsample using subsampling with replacement [default: %default]'),
options_lookup['retain_temp_files'],
options_lookup['suppress_submit_jobs'],
options_lookup['poll_directly'],
options_lookup['cluster_jobs_fp'],
options_lookup['suppress_polling'],
options_lookup['job_prefix'],
options_lookup['seconds_to_sleep'],
options_lookup['jobs_to_start']
]
script_info['version'] = __version__
def main():
option_parser, opts, args = parse_command_line_parameters(**script_info)
# create dict of command-line options
params = eval(str(opts))
if not opts.step > 0:
option_parser.error(("Error: step size must be greater than 0.\n"
"If min = max, just leave step size at 1."))
parallel_runner = ParallelMultipleRarefactions(
cluster_jobs_fp=opts.cluster_jobs_fp,
jobs_to_start=opts.jobs_to_start,
retain_temp_files=opts.retain_temp_files,
suppress_polling=opts.suppress_polling,
seconds_to_sleep=opts.seconds_to_sleep)
parallel_runner(opts.input_path,
opts.output_path,
params,
job_prefix=opts.job_prefix,
poll_directly=opts.poll_directly,
suppress_submit_jobs=opts.suppress_submit_jobs)
if __name__ == "__main__":
main()
| gpl-2.0 |
8u1a/plaso | plaso/engine/worker.py | 2 | 27179 | # -*- coding: utf-8 -*-
"""The event extraction worker."""
import logging
import os
from dfvfs.analyzer import analyzer
from dfvfs.lib import definitions as dfvfs_definitions
from dfvfs.lib import errors as dfvfs_errors
from dfvfs.path import factory as path_spec_factory
from dfvfs.resolver import resolver as path_spec_resolver
import pysigscan
from plaso.engine import collector
from plaso.engine import profiler
from plaso.engine import queue
from plaso.lib import definitions
from plaso.lib import errors
from plaso.hashers import manager as hashers_manager
from plaso.parsers import manager as parsers_manager
class BaseEventExtractionWorker(queue.ItemQueueConsumer):
"""Class that defines the event extraction worker base.
This class is designed to watch a queue for path specifications of files
and directories (file entries) and data streams for which events need to
be extracted.
The event extraction worker needs to determine if a parser suitable
for parsing a particular file entry or data stream is available. All
extracted event objects are pushed on a storage queue for further processing.
"""
_DEFAULT_HASH_READ_SIZE = 4096
# TSK metadata files that need special handling.
_METADATA_FILE_LOCATIONS_TSK = frozenset([
u'/$AttrDef',
u'/$BadClus',
u'/$Bitmap',
u'/$Boot',
u'/$Extend/$ObjId',
u'/$Extend/$Quota',
u'/$Extend/$Reparse',
u'/$Extend/$RmMetadata/$Repair',
u'/$Extend/$RmMetadata/$TxfLog/$Tops',
u'/$LogFile',
u'/$MFT',
u'/$MFTMirr',
u'/$Secure',
u'/$UpCase',
u'/$Volume',
])
def __init__(
self, identifier, path_spec_queue, event_queue_producer,
parse_error_queue_producer, parser_mediator, resolver_context=None):
"""Initializes the event extraction worker object.
Args:
identifier: The identifier, usually an incrementing integer.
path_spec_queue: The path specification queue (instance of Queue).
This queue contains the path specifications (instances
of dfvfs.PathSpec) of the file entries that need
to be processed.
event_queue_producer: The event object queue producer (instance of
ItemQueueProducer).
parse_error_queue_producer: The parse error queue producer (instance of
ItemQueueProducer).
parser_mediator: A parser mediator object (instance of ParserMediator).
resolver_context: Optional resolver context (instance of dfvfs.Context).
The default is None.
"""
super(BaseEventExtractionWorker, self).__init__(path_spec_queue)
self._compressed_stream_path_spec = None
self._current_display_name = u''
self._current_file_entry = None
self._enable_debug_output = False
self._identifier = identifier
self._identifier_string = u'Worker_{0:d}'.format(identifier)
self._file_scanner = None
self._filestat_parser_object = None
self._hasher_names = None
self._non_sigscan_parser_names = None
self._open_files = False
self._parser_mediator = parser_mediator
self._parser_objects = None
self._process_archive_files = False
self._produced_number_of_path_specs = 0
self._resolver_context = resolver_context
self._specification_store = None
self._event_queue_producer = event_queue_producer
self._parse_error_queue_producer = parse_error_queue_producer
# Attributes that contain the current status of the worker.
self._status = definitions.PROCESSING_STATUS_INITIALIZED
# Attributes for profiling.
self._enable_profiling = False
self._memory_profiler = None
self._parsers_profiler = None
self._profiling_sample = 0
self._profiling_sample_rate = 1000
def _ConsumeItem(self, path_spec, **unused_kwargs):
"""Consumes an item callback for ConsumeItems.
Args:
path_spec: a path specification (instance of dfvfs.PathSpec).
Raises:
QueueFull: If a queue is full.
"""
self._ProcessPathSpec(path_spec)
# TODO: work-around for now the compressed stream path spec
# needs to be processed after the current path spec.
if self._compressed_stream_path_spec:
self._ProcessPathSpec(self._compressed_stream_path_spec)
self._compressed_stream_path_spec = None
def _GetSignatureMatchParserNames(self, file_object):
"""Determines if a file-like object matches one of the known signatures.
Args:
file_object: the file-like object whose contents will be checked
for known signatures.
Returns:
A list of parser names for which the file entry matches their
known signatures.
"""
parser_name_list = []
scan_state = pysigscan.scan_state()
self._file_scanner.scan_file_object(scan_state, file_object)
for scan_result in scan_state.scan_results:
format_specification = (
self._specification_store.GetSpecificationBySignature(
scan_result.identifier))
if format_specification.identifier not in parser_name_list:
parser_name_list.append(format_specification.identifier)
return parser_name_list
def _HashDataStream(self, file_entry, data_stream_name=u''):
"""Hashes the contents of a specific data stream of a file entry.
The resulting digest hashes are set in the parser mediator as attributes
that are added to produced event objects. Note that some file systems
allow directories to have data streams, e.g. NTFS.
Args:
file_entry: the file entry relating to the data to be hashed (instance of
dfvfs.FileEntry)
data_stream_name: optional data stream name. The default is
an empty string which represents the default
data stream.
"""
if not self._hasher_names:
return
logging.debug(u'[HashDataStream] hashing file: {0:s}'.format(
self._current_display_name))
file_object = file_entry.GetFileObject(data_stream_name=data_stream_name)
if not file_object:
return
# Make sure frame.f_locals does not keep a reference to file_entry.
file_entry = None
try:
digest_hashes = hashers_manager.HashersManager.HashFileObject(
self._hasher_names, file_object,
buffer_size=self._DEFAULT_HASH_READ_SIZE)
finally:
file_object.close()
if self._enable_profiling:
self._ProfilingSampleMemory()
for hash_name, digest_hash_string in iter(digest_hashes.items()):
attribute_name = u'{0:s}_hash'.format(hash_name)
self._parser_mediator.AddEventAttribute(
attribute_name, digest_hash_string)
logging.debug(
u'[HashDataStream] digest {0:s} calculated for file: {1:s}.'.format(
digest_hash_string, self._current_display_name))
logging.debug(
u'[HashDataStream] completed hashing file: {0:s}'.format(
self._current_display_name))
def _IsMetadataFile(self, file_entry):
"""Determines if the file entry is a metadata file.
Args:
file_entry: a file entry object (instance of dfvfs.FileEntry).
Returns:
A boolean value indicating if the file entry is a metadata file.
"""
if (file_entry.type_indicator == dfvfs_definitions.TYPE_INDICATOR_TSK and
file_entry.path_spec.location in self._METADATA_FILE_LOCATIONS_TSK):
return True
return False
def _ParseFileEntryWithParser(
self, parser_object, file_entry, file_object=None):
"""Parses a file entry with a specific parser.
Args:
parser_object: a parser object (instance of BaseParser).
file_entry: a file entry object (instance of dfvfs.FileEntry).
file_object: optional file-like object to parse. If not set the parser
will use the parser mediator to open the file entry's
default data stream as a file-like object
"""
self._parser_mediator.ClearParserChain()
reference_count = self._resolver_context.GetFileObjectReferenceCount(
file_entry.path_spec)
if self._parsers_profiler:
self._parsers_profiler.StartTiming(parser_object.NAME)
try:
parser_object.UpdateChainAndParse(
self._parser_mediator, file_object=file_object)
# We catch the IOError so we can determine the parser that generated
# the error.
except (dfvfs_errors.BackEndError, IOError) as exception:
logging.warning(
u'{0:s} unable to parse file: {1:s} with error: {2:s}'.format(
parser_object.NAME, self._current_display_name, exception))
except errors.UnableToParseFile as exception:
logging.debug(
u'{0:s} unable to parse file: {1:s} with error: {2:s}'.format(
parser_object.NAME, self._current_display_name, exception))
finally:
if self._parsers_profiler:
self._parsers_profiler.StopTiming(parser_object.NAME)
if reference_count != self._resolver_context.GetFileObjectReferenceCount(
file_entry.path_spec):
logging.warning((
u'[{0:s}] did not explicitly close file-object for file: '
u'{1:s}.').format(parser_object.NAME, self._current_display_name))
def _ProcessArchiveFile(self, file_entry):
"""Processes an archive file (file that contains file entries).
Args:
file_entry: A file entry object (instance of dfvfs.FileEntry).
Returns:
A boolean indicating if the file is an archive file.
"""
try:
type_indicators = analyzer.Analyzer.GetArchiveTypeIndicators(
file_entry.path_spec, resolver_context=self._resolver_context)
except IOError as exception:
logging.warning((
u'Analyzer failed to determine archive type indicators '
u'for file: {0:s} with error: {1:s}').format(
self._current_display_name, exception))
# Make sure frame.f_locals does not keep a reference to file_entry.
file_entry = None
return False
number_of_type_indicators = len(type_indicators)
if number_of_type_indicators == 0:
return False
if number_of_type_indicators > 1:
logging.debug((
u'Found multiple format type indicators: {0:s} for '
u'archive file: {1:s}').format(
type_indicators, self._current_display_name))
for type_indicator in type_indicators:
if type_indicator == dfvfs_definitions.TYPE_INDICATOR_TAR:
archive_path_spec = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_TAR, location=u'/',
parent=file_entry.path_spec)
elif type_indicator == dfvfs_definitions.TYPE_INDICATOR_ZIP:
archive_path_spec = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_ZIP, location=u'/',
parent=file_entry.path_spec)
else:
logging.debug((
u'Unsupported archive format type indicator: {0:s} for '
u'archive file: {1:s}').format(
type_indicator, self._current_display_name))
archive_path_spec = None
if archive_path_spec and self._process_archive_files:
try:
file_system = path_spec_resolver.Resolver.OpenFileSystem(
archive_path_spec, resolver_context=self._resolver_context)
try:
# TODO: make sure to handle the abort here.
# TODO: change this to pass the archive file path spec to
# the collector process and have the collector implement a maximum
# path spec "depth" to prevent ZIP bombs and equiv.
file_system_collector = collector.FileSystemCollector(self._queue)
file_system_collector.Collect(file_system, archive_path_spec)
self._produced_number_of_path_specs += (
file_system_collector.number_of_produced_items)
finally:
file_system.Close()
# Make sure frame.f_locals does not keep a reference to file_entry.
file_entry = None
except IOError:
logging.warning(u'Unable to process archive file:\n{0:s}'.format(
self._current_display_name))
return True
def _ProcessCompressedStreamFile(self, file_entry):
"""Processes an compressed stream file (file that contains file entries).
Args:
file_entry: A file entry object (instance of dfvfs.FileEntry).
Returns:
A boolean indicating if the file is a compressed stream file.
"""
try:
type_indicators = analyzer.Analyzer.GetCompressedStreamTypeIndicators(
file_entry.path_spec, resolver_context=self._resolver_context)
except IOError as exception:
logging.warning((
u'Analyzer failed to determine compressed stream type indicators '
u'for file: {0:s} with error: {1:s}').format(
self._current_display_name, exception))
# Make sure frame.f_locals does not keep a reference to file_entry.
file_entry = None
return False
number_of_type_indicators = len(type_indicators)
if number_of_type_indicators == 0:
return False
if number_of_type_indicators > 1:
logging.debug((
u'Found multiple format type indicators: {0:s} for '
u'compressed stream file: {1:s}').format(
type_indicators, self._current_display_name))
for type_indicator in type_indicators:
if type_indicator == dfvfs_definitions.TYPE_INDICATOR_BZIP2:
compressed_stream_path_spec = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_COMPRESSED_STREAM,
compression_method=dfvfs_definitions.COMPRESSION_METHOD_BZIP2,
parent=file_entry.path_spec)
elif type_indicator == dfvfs_definitions.TYPE_INDICATOR_GZIP:
compressed_stream_path_spec = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_GZIP, parent=file_entry.path_spec)
else:
logging.debug((
u'Unsupported compressed stream format type indicators: {0:s} for '
u'compressed stream file: {1:s}').format(
type_indicator, self._current_display_name))
compressed_stream_path_spec = None
if compressed_stream_path_spec:
# TODO: disabled for now since it can cause a deadlock.
# self._queue.PushItem(compressed_stream_path_spec)
# self._produced_number_of_path_specs += 1
# TODO: work-around for now the compressed stream path spec
# needs to be processed after the current path spec.
self._compressed_stream_path_spec = compressed_stream_path_spec
return True
def _ProcessDataStream(self, file_entry, data_stream_name=u''):
"""Processes a specific data stream of a file entry.
Args:
file_entry: A file entry object (instance of dfvfs.FileEntry).
data_stream_name: optional data stream name. The default is
an empty string which represents the default
data stream.
"""
file_object = file_entry.GetFileObject(data_stream_name=data_stream_name)
if not file_object:
return
try:
parser_name_list = self._GetSignatureMatchParserNames(file_object)
if not parser_name_list:
parser_name_list = self._non_sigscan_parser_names
for parser_name in parser_name_list:
parser_object = self._parser_objects.get(parser_name, None)
if not parser_object:
logging.warning(u'No such parser: {0:s}'.format(parser_name))
continue
logging.debug((
u'[ProcessDataStream] parsing file: {0:s} with parser: '
u'{1:s}').format(self._current_display_name, parser_name))
self._ParseFileEntryWithParser(
parser_object, file_entry, file_object=file_object)
finally:
file_object.close()
# Make sure frame.f_locals does not keep a reference to file_entry.
file_entry = None
def _ProcessFileEntry(self, file_entry, data_stream_name=u''):
"""Processes a specific data stream of a file entry.
Args:
file_entry: A file entry object (instance of dfvfs.FileEntry).
data_stream_name: optional data stream name. The default is
an empty string which represents the default
data stream.
Raises:
RuntimeError: if the parser object is missing.
"""
self._current_file_entry = file_entry
self._current_display_name = self._parser_mediator.GetDisplayName(
file_entry)
reference_count = self._resolver_context.GetFileObjectReferenceCount(
file_entry.path_spec)
self._parser_mediator.SetFileEntry(file_entry)
logging.debug(u'[ProcessFileEntry] parsing file: {0:s}'.format(
self._current_display_name))
is_metadata_file = self._IsMetadataFile(file_entry)
# Not every file entry has a data stream. In such cases we want to
# extract the metadata only.
has_data_stream = file_entry.HasDataStream(data_stream_name)
try:
if has_data_stream:
self._HashDataStream(file_entry, data_stream_name=data_stream_name)
# We always want to use the filestat parser if set but we only want
# to invoke it once per file entry, so we only use it if we are
# processing the default (nameless) data stream.
if not data_stream_name and self._filestat_parser_object:
self._ParseFileEntryWithParser(self._filestat_parser_object, file_entry)
is_archive = False
is_compressed_stream = False
if not is_metadata_file and file_entry.IsFile():
is_compressed_stream = self._ProcessCompressedStreamFile(file_entry)
if not is_compressed_stream:
is_archive = self._ProcessArchiveFile(file_entry)
if (has_data_stream and not is_archive and not is_compressed_stream and
not is_metadata_file):
self._ProcessDataStream(file_entry, data_stream_name=data_stream_name)
finally:
if reference_count != self._resolver_context.GetFileObjectReferenceCount(
file_entry.path_spec):
# Clean up after parsers that do not call close explicitly.
if self._resolver_context.ForceRemoveFileObject(file_entry.path_spec):
logging.warning(
u'File-object not explicitly closed for file: {0:s}'.format(
self._current_display_name))
# We do not clear self._current_file_entry or self._current_display_name
# here to allow the foreman to see which file was previously processed.
self._parser_mediator.ResetFileEntry()
# Make sure frame.f_locals does not keep a reference to file_entry.
file_entry = None
if self._enable_profiling:
self._ProfilingSampleMemory()
logging.debug(u'[ParseFileEntry] done processing: {0:s}'.format(
self._current_display_name))
def _ProcessPathSpec(self, path_spec):
"""Processes a path specification.
Args:
path_spec: A path specification object (instance of dfvfs.PathSpec).
"""
try:
file_entry = path_spec_resolver.Resolver.OpenFileEntry(
path_spec, resolver_context=self._resolver_context)
if file_entry is None:
logging.warning(
u'Unable to open file entry with path spec: {0:s}'.format(
path_spec.comparable))
return
# Note that data stream can be set but contain None, we'll set it
# to an empty string here.
data_stream_name = getattr(path_spec, u'data_stream', None)
if not data_stream_name:
data_stream_name = u''
self._ProcessFileEntry(file_entry, data_stream_name=data_stream_name)
except IOError as exception:
logging.warning(
u'Unable to process path spec: {0:s} with error: {1:s}'.format(
path_spec.comparable, exception))
except dfvfs_errors.CacheFullError:
# TODO: signal engine of failure.
self._abort = True
logging.error((
u'ABORT: detected cache full error while processing '
u'path spec: {0:s}').format(path_spec.comparable))
# All exceptions need to be caught here to prevent the worker
# form being killed by an uncaught exception.
except Exception as exception:
logging.warning(
u'Unhandled exception while processing path spec: {0:s}.'.format(
path_spec.comparable))
logging.exception(exception)
# TODO: Issue #314 - add a signal to the worker to indicate that
# the tool is run in single process mode with debug turned on and
# in that case start a pdb debugger here instead of just logging
# the exception.
# Make sure frame.f_locals does not keep a reference to file_entry.
file_entry = None
def _ProfilingSampleMemory(self):
"""Create a memory profiling sample."""
if not self._memory_profiler:
return
self._profiling_sample += 1
if self._profiling_sample >= self._profiling_sample_rate:
self._memory_profiler.Sample()
self._profiling_sample = 0
def _ProfilingStart(self):
"""Starts the profiling."""
self._profiling_sample = 0
if self._memory_profiler:
self._memory_profiler.Start()
def _ProfilingStop(self):
"""Stops the profiling."""
if self._memory_profiler:
self._memory_profiler.Sample()
if self._parsers_profiler:
self._parsers_profiler.Write()
@property
def current_path_spec(self):
"""The current path specification."""
if not self._current_file_entry:
return
return self._current_file_entry.path_spec
def GetStatus(self):
"""Returns a dictionary containing the status."""
return {
u'consumed_number_of_path_specs': self.number_of_consumed_items,
u'display_name': self._current_display_name,
u'identifier': self._identifier_string,
u'number_of_events': self._parser_mediator.number_of_events,
u'processing_status': self._status,
u'produced_number_of_path_specs': self._produced_number_of_path_specs,
u'type': definitions.PROCESS_TYPE_WORKER}
def InitializeParserObjects(self, parser_filter_string=None):
"""Initializes the parser objects.
The parser_filter_string is a simple comma separated value string that
denotes a list of parser names to include and/or exclude. Each entry
can have the value of:
* Exact match of a list of parsers, or a preset (see
plaso/frontend/presets.py for a full list of available presets).
* A name of a single parser (case insensitive), eg. msiecfparser.
* A glob name for a single parser, eg: '*msie*' (case insensitive).
Args:
parser_filter_string: Optional parser filter string. The default is None.
"""
self._specification_store, non_sigscan_parser_names = (
parsers_manager.ParsersManager.GetSpecificationStore(
parser_filter_string=parser_filter_string))
self._non_sigscan_parser_names = []
for parser_name in non_sigscan_parser_names:
if parser_name == u'filestat':
continue
self._non_sigscan_parser_names.append(parser_name)
self._file_scanner = parsers_manager.ParsersManager.GetScanner(
self._specification_store)
self._parser_objects = parsers_manager.ParsersManager.GetParserObjects(
parser_filter_string=parser_filter_string)
self._filestat_parser_object = self._parser_objects.get(u'filestat', None)
if u'filestat' in self._parser_objects:
del self._parser_objects[u'filestat']
def Run(self):
"""Extracts event objects from file entries."""
self._parser_mediator.ResetCounters()
if self._enable_profiling:
self._ProfilingStart()
self._status = definitions.PROCESSING_STATUS_RUNNING
logging.debug(
u'Worker {0:d} (PID: {1:d}) started monitoring process queue.'.format(
self._identifier, os.getpid()))
self.ConsumeItems()
logging.debug(
u'Worker {0:d} (PID: {1:d}) stopped monitoring process queue.'.format(
self._identifier, os.getpid()))
self._status = definitions.PROCESSING_STATUS_COMPLETED
self._current_file_entry = None
if self._enable_profiling:
self._ProfilingStop()
def SetEnableDebugOutput(self, enable_debug_output):
"""Enables or disables debug output.
Args:
enable_debug_output: boolean value to indicate if the debug output
should be enabled.
"""
self._enable_debug_output = enable_debug_output
def SetEnableProfiling(
self, enable_profiling, profiling_sample_rate=1000,
profiling_type=u'all'):
"""Enables or disables profiling.
Args:
enable_profiling: boolean value to indicate if profiling should
be enabled.
profiling_sample_rate: optional integer indicating the profiling sample
rate. The value contains the number of files
processed. The default value is 1000.
profiling_type: optional profiling type. The default is 'all'.
"""
self._enable_profiling = enable_profiling
self._profiling_sample_rate = profiling_sample_rate
if self._enable_profiling:
if profiling_type in [u'all', u'memory'] and not self._memory_profiler:
self._memory_profiler = profiler.GuppyMemoryProfiler(self._identifier)
if profiling_type in [u'all', u'parsers'] and not self._parsers_profiler:
self._parsers_profiler = profiler.ParsersProfiler(self._identifier)
def SetFilterObject(self, filter_object):
"""Sets the filter object.
Args:
filter_object: the filter object (instance of objectfilter.Filter).
"""
self._parser_mediator.SetFilterObject(filter_object)
def SetHashers(self, hasher_names_string):
"""Initializes the hasher objects.
Args:
hasher_names_string: Comma separated string of names of
hashers to enable.
"""
names = hashers_manager.HashersManager.GetHasherNamesFromString(
hasher_names_string)
logging.debug(u'[SetHashers] Enabling hashers: {0:s}.'.format(names))
self._hasher_names = names
def SetMountPath(self, mount_path):
"""Sets the mount path.
Args:
mount_path: string containing the mount path.
"""
self._parser_mediator.SetMountPath(mount_path)
def SetProcessArchiveFiles(self, process_archive_files):
"""Sets the process archive files mode.
Args:
process_archive_files: boolean value to indicate if the worker should
scan for file entries inside files.
"""
self._process_archive_files = process_archive_files
def SetTextPrepend(self, text_prepend):
"""Sets the text prepend.
Args:
text_prepend: string that contains the text to prepend to every
event object.
"""
self._parser_mediator.SetTextPrepend(text_prepend)
def SignalAbort(self):
"""Signals the worker to abort."""
self._parser_mediator.SignalAbort()
super(BaseEventExtractionWorker, self).SignalAbort()
| apache-2.0 |
adelina-t/neutron | neutron/api/versions.py | 23 | 1958 | # Copyright 2011 Citrix Systems.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import oslo_i18n
from oslo_log import log as logging
import webob.dec
from neutron.api.views import versions as versions_view
from neutron import wsgi
LOG = logging.getLogger(__name__)
class Versions(object):
@classmethod
def factory(cls, global_config, **local_config):
return cls()
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
"""Respond to a request for all Neutron API versions."""
version_objs = [
{
"id": "v2.0",
"status": "CURRENT",
},
]
if req.path != '/':
language = req.best_match_language()
msg = _('Unknown API version specified')
msg = oslo_i18n.translate(msg, language)
return webob.exc.HTTPNotFound(explanation=msg)
builder = versions_view.get_view_builder(req)
versions = [builder.build(version) for version in version_objs]
response = dict(versions=versions)
metadata = {}
content_type = req.best_match_content_type()
body = (wsgi.Serializer(metadata=metadata).
serialize(response, content_type))
response = webob.Response()
response.content_type = content_type
response.body = wsgi.encode_body(body)
return response
| apache-2.0 |
simobasso/ansible | contrib/inventory/zone.py | 138 | 1466 | #!/usr/bin/env python
# (c) 2015, Dagobert Michelsen <[email protected]>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from subprocess import Popen,PIPE
import sys
import json
result = {}
result['all'] = {}
pipe = Popen(['zoneadm', 'list', '-ip'], stdout=PIPE, universal_newlines=True)
result['all']['hosts'] = []
for l in pipe.stdout.readlines():
# 1:work:running:/zones/work:3126dc59-9a07-4829-cde9-a816e4c5040e:native:shared
s = l.split(':')
if s[1] != 'global':
result['all']['hosts'].append(s[1])
result['all']['vars'] = {}
result['all']['vars']['ansible_connection'] = 'zone'
if len(sys.argv) == 2 and sys.argv[1] == '--list':
print(json.dumps(result))
elif len(sys.argv) == 3 and sys.argv[1] == '--host':
print(json.dumps({'ansible_connection': 'zone'}))
else:
print("Need an argument, either --list or --host <host>")
| gpl-3.0 |
mzhaom/dpdk | lib/librte_vhost/libvirt/qemu-wrap.py | 20 | 12379 | #!/usr/bin/python
#/*
# * BSD LICENSE
# *
# * Copyright(c) 2010-2014 Intel Corporation. All rights reserved.
# * All rights reserved.
# *
# * Redistribution and use in source and binary forms, with or without
# * modification, are permitted provided that the following conditions
# * are met:
# *
# * * Redistributions of source code must retain the above copyright
# * notice, this list of conditions and the following disclaimer.
# * * Redistributions in binary form must reproduce the above copyright
# * notice, this list of conditions and the following disclaimer in
# * the documentation and/or other materials provided with the
# * distribution.
# * * Neither the name of Intel Corporation nor the names of its
# * contributors may be used to endorse or promote products derived
# * from this software without specific prior written permission.
# *
# * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# */
#####################################################################
# This script is designed to modify the call to the QEMU emulator
# to support userspace vhost when starting a guest machine through
# libvirt with vhost enabled. The steps to enable this are as follows
# and should be run as root:
#
# 1. Place this script in a libvirtd's binary search PATH ($PATH)
# A good location would be in the same directory that the QEMU
# binary is located
#
# 2. Ensure that the script has the same owner/group and file
# permissions as the QEMU binary
#
# 3. Update the VM xml file using "virsh edit VM.xml"
#
# 3.a) Set the VM to use the launch script
#
# Set the emulator path contained in the
# <emulator><emulator/> tags
#
# e.g replace <emulator>/usr/bin/qemu-kvm<emulator/>
# with <emulator>/usr/bin/qemu-wrap.py<emulator/>
#
# 3.b) Set the VM's device's to use vhost-net offload
#
# <interface type="network">
# <model type="virtio"/>
# <driver name="vhost"/>
# <interface/>
#
# 4. Enable libvirt to access our userpace device file by adding it to
# controllers cgroup for libvirtd using the following steps
#
# 4.a) In /etc/libvirt/qemu.conf add/edit the following lines:
# 1) cgroup_controllers = [ ... "devices", ... ]
# 2) clear_emulator_capabilities = 0
# 3) user = "root"
# 4) group = "root"
# 5) cgroup_device_acl = [
# "/dev/null", "/dev/full", "/dev/zero",
# "/dev/random", "/dev/urandom",
# "/dev/ptmx", "/dev/kvm", "/dev/kqemu",
# "/dev/rtc", "/dev/hpet", "/dev/net/tun",
# "/dev/<devbase-name>",
# "/dev/hugepages",
# ]
#
# 4.b) Disable SELinux or set to permissive mode
#
# 4.c) Mount cgroup device controller
# "mkdir /dev/cgroup"
# "mount -t cgroup none /dev/cgroup -o devices"
#
# 4.d) Set hugetlbfs_mount variable - ( Optional )
# VMs using userspace vhost must use hugepage backed
# memory. This can be enabled in the libvirt XML
# config by adding a memory backing section to the
# XML config e.g.
# <memoryBacking>
# <hugepages/>
# </memoryBacking>
# This memory backing section should be added after the
# <memory> and <currentMemory> sections. This will add
# flags "-mem-prealloc -mem-path <path>" to the QEMU
# command line. The hugetlbfs_mount variable can be used
# to override the default <path> passed through by libvirt.
#
# if "-mem-prealloc" or "-mem-path <path>" are not passed
# through and a vhost device is detected then these options will
# be automatically added by this script. This script will detect
# the system hugetlbfs mount point to be used for <path>. The
# default <path> for this script can be overidden by the
# hugetlbfs_dir variable in the configuration section of this script.
#
#
# 4.e) Restart the libvirtd system process
# e.g. on Fedora "systemctl restart libvirtd.service"
#
#
# 4.f) Edit the Configuration Parameters section of this script
# to point to the correct emulator location and set any
# addition options
#
# The script modifies the libvirtd Qemu call by modifying/adding
# options based on the configuration parameters below.
# NOTE:
# emul_path and us_vhost_path must be set
# All other parameters are optional
#####################################################################
#############################################
# Configuration Parameters
#############################################
#Path to QEMU binary
emul_path = "/usr/local/bin/qemu-system-x86_64"
#Path to userspace vhost device file
# This filename should match the --dev-basename parameters of
# the command used to launch the userspace vhost sample application e.g.
# if the sample app lauch command is:
# ./build/vhost-switch ..... --dev-basename usvhost
# then this variable should be set to:
# us_vhost_path = "/dev/usvhost"
us_vhost_path = "/dev/usvhost"
#List of additional user defined emulation options. These options will
#be added to all Qemu calls
emul_opts_user = []
#List of additional user defined emulation options for vhost only.
#These options will only be added to vhost enabled guests
emul_opts_user_vhost = []
#For all VHOST enabled VMs, the VM memory is preallocated from hugetlbfs
# Set this variable to one to enable this option for all VMs
use_huge_all = 0
#Instead of autodetecting, override the hugetlbfs directory by setting
#this variable
hugetlbfs_dir = ""
#############################################
#############################################
# ****** Do Not Modify Below this Line ******
#############################################
import sys, os, subprocess
import time
import signal
#List of open userspace vhost file descriptors
fd_list = []
#additional virtio device flags when using userspace vhost
vhost_flags = [ "csum=off",
"gso=off",
"guest_tso4=off",
"guest_tso6=off",
"guest_ecn=off"
]
#String of the path to the Qemu process pid
qemu_pid = "/tmp/%d-qemu.pid" % os.getpid()
#############################################
# Signal haldler to kill Qemu subprocess
#############################################
def kill_qemu_process(signum, stack):
pidfile = open(qemu_pid, 'r')
pid = int(pidfile.read())
os.killpg(pid, signal.SIGTERM)
pidfile.close()
#############################################
# Find the system hugefile mount point.
# Note:
# if multiple hugetlbfs mount points exist
# then the first one found will be used
#############################################
def find_huge_mount():
if (len(hugetlbfs_dir)):
return hugetlbfs_dir
huge_mount = ""
if (os.access("/proc/mounts", os.F_OK)):
f = open("/proc/mounts", "r")
line = f.readline()
while line:
line_split = line.split(" ")
if line_split[2] == 'hugetlbfs':
huge_mount = line_split[1]
break
line = f.readline()
else:
print "/proc/mounts not found"
exit (1)
f.close
if len(huge_mount) == 0:
print "Failed to find hugetlbfs mount point"
exit (1)
return huge_mount
#############################################
# Get a userspace Vhost file descriptor
#############################################
def get_vhost_fd():
if (os.access(us_vhost_path, os.F_OK)):
fd = os.open( us_vhost_path, os.O_RDWR)
else:
print ("US-Vhost file %s not found" %us_vhost_path)
exit (1)
return fd
#############################################
# Check for vhostfd. if found then replace
# with our own vhost fd and append any vhost
# flags onto the end
#############################################
def modify_netdev_arg(arg):
global fd_list
vhost_in_use = 0
s = ''
new_opts = []
netdev_opts = arg.split(",")
for opt in netdev_opts:
#check if vhost is used
if "vhost" == opt[:5]:
vhost_in_use = 1
else:
new_opts.append(opt)
#if using vhost append vhost options
if vhost_in_use == 1:
#append vhost on option
new_opts.append('vhost=on')
#append vhostfd ption
new_fd = get_vhost_fd()
new_opts.append('vhostfd=' + str(new_fd))
fd_list.append(new_fd)
#concatenate all options
for opt in new_opts:
if len(s) > 0:
s+=','
s+=opt
return s
#############################################
# Main
#############################################
def main():
global fd_list
global vhost_in_use
new_args = []
num_cmd_args = len(sys.argv)
emul_call = ''
mem_prealloc_set = 0
mem_path_set = 0
num = 0;
#parse the parameters
while (num < num_cmd_args):
arg = sys.argv[num]
#Check netdev +1 parameter for vhostfd
if arg == '-netdev':
num_vhost_devs = len(fd_list)
new_args.append(arg)
num+=1
arg = sys.argv[num]
mod_arg = modify_netdev_arg(arg)
new_args.append(mod_arg)
#append vhost flags if this is a vhost device
# and -device is the next arg
# i.e -device -opt1,-opt2,...,-opt3,%vhost
if (num_vhost_devs < len(fd_list)):
num+=1
arg = sys.argv[num]
if arg == '-device':
new_args.append(arg)
num+=1
new_arg = sys.argv[num]
for flag in vhost_flags:
new_arg = ''.join([new_arg,',',flag])
new_args.append(new_arg)
else:
new_args.append(arg)
elif arg == '-mem-prealloc':
mem_prealloc_set = 1
new_args.append(arg)
elif arg == '-mem-path':
mem_path_set = 1
new_args.append(arg)
else:
new_args.append(arg)
num+=1
#Set Qemu binary location
emul_call+=emul_path
emul_call+=" "
#Add prealloc mem options if using vhost and not already added
if ((len(fd_list) > 0) and (mem_prealloc_set == 0)):
emul_call += "-mem-prealloc "
#Add mempath mem options if using vhost and not already added
if ((len(fd_list) > 0) and (mem_path_set == 0)):
#Detect and add hugetlbfs mount point
mp = find_huge_mount()
mp = "".join(["-mem-path ", mp])
emul_call += mp
emul_call += " "
#add user options
for opt in emul_opts_user:
emul_call += opt
emul_call += " "
#Add add user vhost only options
if len(fd_list) > 0:
for opt in emul_opts_user_vhost:
emul_call += opt
emul_call += " "
#Add updated libvirt options
iter_args = iter(new_args)
#skip 1st arg i.e. call to this script
next(iter_args)
for arg in iter_args:
emul_call+=str(arg)
emul_call+= " "
emul_call += "-pidfile %s " % qemu_pid
#Call QEMU
process = subprocess.Popen(emul_call, shell=True, preexec_fn=os.setsid)
for sig in [signal.SIGTERM, signal.SIGINT, signal.SIGHUP, signal.SIGQUIT]:
signal.signal(sig, kill_qemu_process)
process.wait()
#Close usvhost files
for fd in fd_list:
os.close(fd)
#Cleanup temporary files
if os.access(qemu_pid, os.F_OK):
os.remove(qemu_pid)
if __name__ == "__main__":
main()
| gpl-2.0 |
Micronaet/micronaet-addons-private | intervention_report_trip/wizard/wizard_create_intervent.py | 1 | 8852 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# Copyright (C) 2004-2012 Micronaet srl. All Rights Reserved
# d$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import os
import sys
import logging
import openerp
import openerp.netsvc as netsvc
import openerp.addons.decimal_precision as dp
from osv import fields, osv
from openerp.osv.fields import datetime as datetime_field
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
from openerp import SUPERUSER_ID
from openerp import tools
from openerp.tools.translate import _
from openerp.tools.float_utils import float_round as round
from openerp.tools import (DEFAULT_SERVER_DATE_FORMAT,
DEFAULT_SERVER_DATETIME_FORMAT,
DATETIME_FORMATS_MAP,
float_compare)
_logger = logging.getLogger(__name__)
class intervent_report_create_wizard(osv.osv_memory):
''' Wizard for create new intervent from invoice and linked to the document
'''
_name = "intervent.report.create.wizard"
_description = "Wizard intervent creation"
def get_account_id(self, cr, uid, context=None):
''' Funzione che recupera l'id conto analitico
cercando un conto analitico con code="EXPLAN"
'''
# 1. cerchi in account.analytic.account il codice "EXPLAN"
# 2. testi la lista di ritorno
# return item_id[0]
# else:
# return oggetto.create
item_id = self.pool.get("account.analytic.account").search(cr, uid, [
('code', '=', 'EXPLAN'),
], context=context)
if item_id:
return item_id[0]
else:
item_id = self.pool.get("account.analytic.account").create(
cr, uid, {
'name':'Conto Explan',
'type':'normal',
'use_timesheets':True,
'code':'EXPLAN',
}, context=context)
return item_id
def button_create(self, cr, uid, ids, context=None):
''' Create intervent
'''
if context is None:
context = {}
wiz_proxy = self.browse(cr, uid, ids)[0]
data = {
'intervention_request':'Richiesta telefonica intervento',
#'code':'Codice intervento',
#'date_end',
'google_from': 'company',
'manual_total': False,
'user_id': context.get('trip_user_id', False),
'google_to': 'company',
'trip_require': wiz_proxy.mode == "customer",
'intervent_partner_id': wiz_proxy.partner_id.id,
'partner_id': wiz_proxy.partner_id.id,
'break_require': False,
'not_in_report': True,
'name': 'Richiesta intervento generico',
'mode': wiz_proxy.mode,
'invoice_id': False,
'intervent_duration': wiz_proxy.intervent_duration,
'manual_total_internal': False,
'extra_planned': True,
'trip_id': context.get('active_id', False),
#'message_ids': '',
'trip_hour': wiz_proxy.partner_id.trip_duration,
'date_start': wiz_proxy.datetime,
'date': wiz_proxy.datetime[:10],
'state': 'close',
'intervention': 'Intervento generico',
'ref': '', #TODO vedere se inserirlo
'break_hour': 0.0,
#'move_id': '',
'internal_note': '',
#'amount': '', #TODO
#'unit_amount':, #TODO
'intervent_total':
wiz_proxy.intervent_duration + \
wiz_proxy.partner_id.trip_duration \
if wiz_proxy.mode == "customer" else 0.0,
#'line_id': '',
#'to_invoice': , #TODO
'account_id': self.get_account_id(cr, uid, context=context) , #wiz_proxy.account_id.id,
}
res = self.pool.get ("hr.analytic.timesheet").on_change_user_id(
cr, uid, [], data['user_id'],)
data.update(res.get('value', {}))
self.pool.get ("hr.analytic.timesheet").create(
cr, uid, data, context=context)
self.pool.get("hr.analytic.timesheet.trip").calculate_step_list(
cr, uid, [data['trip_id']], context=context)
return False
def onchange_datetime(self, cr, uid, ids, dt, context=None):
''' Read the appointment list of user for date selected
'''
if context is None:
context = {}
res = {"value": {}, 'warning': {}}
user_id = context.get('trip_user_id', False) # context passed from button in hr.analytic.timesheet.trip and then from on_change function
trip_date = context.get('trip_date', False) # context passed from button in hr.analytic.timesheet.trip and then from on_change function
if dt[:10] != trip_date:
res['warning']['title'] = 'Attenzione:'
res['warning']['message'] = \
'Utilizzare come data la data del viaggio: %s' % trip_date
#dt="%s %s" %(trip_date, dt[11:])
return res
if user_id and dt:
intervent_pool = self.pool.get("hr.analytic.timesheet")
domain = [
('user_id','=', user_id),
('date_start', '>=', "%s 00:00:00" % dt[:10]),
('date_start', '<=', "%s 23:59:59" % dt[:10]),
]
intervent_ids = intervent_pool.search(cr, uid, domain)
situation = ""
for rapportino in intervent_pool.browse(
cr, uid, intervent_ids, context=None):
date_start = datetime_field.context_timestamp(cr, uid,
timestamp=datetime.strptime(
rapportino.date_start, DEFAULT_SERVER_DATETIME_FORMAT),
context=context)
date_start = date_start.strftime(
DEFAULT_SERVER_DATETIME_FORMAT)
situation += "%s [%.2f] %s %s\n" % (
date_start[-8:],
rapportino.intervent_duration,
rapportino.intervent_partner_id.name,
"DAL CLIENTE" if \
rapportino.mode == "customer" else "IN AZIENDA")
res["value"]["situation"]=situation
#res["value"]["datetime"]=datetime
return res
def default_datetime(self, cr, uid, context=None):
''' Funzione che calcola dalla data il valore di default
'''
#import pdb; pdb.set_trace()
return "%s 08:00:00" %(context.get('trip_date',False))
_columns = {
#'user_id': fields.many2one('res.users', 'User', required=True),
'partner_id': fields.many2one('res.partner', 'Partner', required=True),
'datetime': fields.datetime('Data e ora', required=True),
#'account_id': fields.many2one('account.analytic.account', 'Analytic Account', readonly=False, required=True),
'name': fields.char('Descrizione', size=64, required=True, select=True),
'mode': fields.selection([
('phone','Phone'),
('customer','Customer address'),
('connection','Tele assistence'),
('company','Company address'),
],'Mode', select=True, required=True),
'situation': fields.text('User situation',),
'intervent_duration': fields.float(
'Intervent duration', digits=(8, 2), required=True),
}
_defaults = {
'intervent_duration': lambda *a: 1.0,
'name': lambda *a: 'Intervento generico',
'mode': lambda *a: 'customer',
'datetime': lambda s, cr, uid, c: s.default_datetime(cr, uid, context=c),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
Tranzystorek/servo | tests/wpt/css-tests/tools/py/py/_io/terminalwriter.py | 175 | 12542 | """
Helper functions for writing to terminals and files.
"""
import sys, os
import py
py3k = sys.version_info[0] >= 3
from py.builtin import text, bytes
win32_and_ctypes = False
colorama = None
if sys.platform == "win32":
try:
import colorama
except ImportError:
try:
import ctypes
win32_and_ctypes = True
except ImportError:
pass
def _getdimensions():
import termios,fcntl,struct
call = fcntl.ioctl(1,termios.TIOCGWINSZ,"\000"*8)
height,width = struct.unpack( "hhhh", call ) [:2]
return height, width
def get_terminal_width():
height = width = 0
try:
height, width = _getdimensions()
except py.builtin._sysex:
raise
except:
# pass to fallback below
pass
if width == 0:
# FALLBACK:
# * some exception happened
# * or this is emacs terminal which reports (0,0)
width = int(os.environ.get('COLUMNS', 80))
# XXX the windows getdimensions may be bogus, let's sanify a bit
if width < 40:
width = 80
return width
terminal_width = get_terminal_width()
# XXX unify with _escaped func below
def ansi_print(text, esc, file=None, newline=True, flush=False):
if file is None:
file = sys.stderr
text = text.rstrip()
if esc and not isinstance(esc, tuple):
esc = (esc,)
if esc and sys.platform != "win32" and file.isatty():
text = (''.join(['\x1b[%sm' % cod for cod in esc]) +
text +
'\x1b[0m') # ANSI color code "reset"
if newline:
text += '\n'
if esc and win32_and_ctypes and file.isatty():
if 1 in esc:
bold = True
esc = tuple([x for x in esc if x != 1])
else:
bold = False
esctable = {() : FOREGROUND_WHITE, # normal
(31,): FOREGROUND_RED, # red
(32,): FOREGROUND_GREEN, # green
(33,): FOREGROUND_GREEN|FOREGROUND_RED, # yellow
(34,): FOREGROUND_BLUE, # blue
(35,): FOREGROUND_BLUE|FOREGROUND_RED, # purple
(36,): FOREGROUND_BLUE|FOREGROUND_GREEN, # cyan
(37,): FOREGROUND_WHITE, # white
(39,): FOREGROUND_WHITE, # reset
}
attr = esctable.get(esc, FOREGROUND_WHITE)
if bold:
attr |= FOREGROUND_INTENSITY
STD_OUTPUT_HANDLE = -11
STD_ERROR_HANDLE = -12
if file is sys.stderr:
handle = GetStdHandle(STD_ERROR_HANDLE)
else:
handle = GetStdHandle(STD_OUTPUT_HANDLE)
oldcolors = GetConsoleInfo(handle).wAttributes
attr |= (oldcolors & 0x0f0)
SetConsoleTextAttribute(handle, attr)
while len(text) > 32768:
file.write(text[:32768])
text = text[32768:]
if text:
file.write(text)
SetConsoleTextAttribute(handle, oldcolors)
else:
file.write(text)
if flush:
file.flush()
def should_do_markup(file):
if os.environ.get('PY_COLORS') == '1':
return True
if os.environ.get('PY_COLORS') == '0':
return False
return hasattr(file, 'isatty') and file.isatty() \
and os.environ.get('TERM') != 'dumb' \
and not (sys.platform.startswith('java') and os._name == 'nt')
class TerminalWriter(object):
_esctable = dict(black=30, red=31, green=32, yellow=33,
blue=34, purple=35, cyan=36, white=37,
Black=40, Red=41, Green=42, Yellow=43,
Blue=44, Purple=45, Cyan=46, White=47,
bold=1, light=2, blink=5, invert=7)
# XXX deprecate stringio argument
def __init__(self, file=None, stringio=False, encoding=None):
if file is None:
if stringio:
self.stringio = file = py.io.TextIO()
else:
file = py.std.sys.stdout
elif py.builtin.callable(file) and not (
hasattr(file, "write") and hasattr(file, "flush")):
file = WriteFile(file, encoding=encoding)
if hasattr(file, "isatty") and file.isatty() and colorama:
file = colorama.AnsiToWin32(file).stream
self.encoding = encoding or getattr(file, 'encoding', "utf-8")
self._file = file
self.fullwidth = get_terminal_width()
self.hasmarkup = should_do_markup(file)
self._lastlen = 0
def _escaped(self, text, esc):
if esc and self.hasmarkup:
text = (''.join(['\x1b[%sm' % cod for cod in esc]) +
text +'\x1b[0m')
return text
def markup(self, text, **kw):
esc = []
for name in kw:
if name not in self._esctable:
raise ValueError("unknown markup: %r" %(name,))
if kw[name]:
esc.append(self._esctable[name])
return self._escaped(text, tuple(esc))
def sep(self, sepchar, title=None, fullwidth=None, **kw):
if fullwidth is None:
fullwidth = self.fullwidth
# the goal is to have the line be as long as possible
# under the condition that len(line) <= fullwidth
if sys.platform == "win32":
# if we print in the last column on windows we are on a
# new line but there is no way to verify/neutralize this
# (we may not know the exact line width)
# so let's be defensive to avoid empty lines in the output
fullwidth -= 1
if title is not None:
# we want 2 + 2*len(fill) + len(title) <= fullwidth
# i.e. 2 + 2*len(sepchar)*N + len(title) <= fullwidth
# 2*len(sepchar)*N <= fullwidth - len(title) - 2
# N <= (fullwidth - len(title) - 2) // (2*len(sepchar))
N = (fullwidth - len(title) - 2) // (2*len(sepchar))
fill = sepchar * N
line = "%s %s %s" % (fill, title, fill)
else:
# we want len(sepchar)*N <= fullwidth
# i.e. N <= fullwidth // len(sepchar)
line = sepchar * (fullwidth // len(sepchar))
# in some situations there is room for an extra sepchar at the right,
# in particular if we consider that with a sepchar like "_ " the
# trailing space is not important at the end of the line
if len(line) + len(sepchar.rstrip()) <= fullwidth:
line += sepchar.rstrip()
self.line(line, **kw)
def write(self, msg, **kw):
if msg:
if not isinstance(msg, (bytes, text)):
msg = text(msg)
if self.hasmarkup and kw:
markupmsg = self.markup(msg, **kw)
else:
markupmsg = msg
write_out(self._file, markupmsg)
def line(self, s='', **kw):
self.write(s, **kw)
self._checkfill(s)
self.write('\n')
def reline(self, line, **kw):
if not self.hasmarkup:
raise ValueError("cannot use rewrite-line without terminal")
self.write(line, **kw)
self._checkfill(line)
self.write('\r')
self._lastlen = len(line)
def _checkfill(self, line):
diff2last = self._lastlen - len(line)
if diff2last > 0:
self.write(" " * diff2last)
class Win32ConsoleWriter(TerminalWriter):
def write(self, msg, **kw):
if msg:
if not isinstance(msg, (bytes, text)):
msg = text(msg)
oldcolors = None
if self.hasmarkup and kw:
handle = GetStdHandle(STD_OUTPUT_HANDLE)
oldcolors = GetConsoleInfo(handle).wAttributes
default_bg = oldcolors & 0x00F0
attr = default_bg
if kw.pop('bold', False):
attr |= FOREGROUND_INTENSITY
if kw.pop('red', False):
attr |= FOREGROUND_RED
elif kw.pop('blue', False):
attr |= FOREGROUND_BLUE
elif kw.pop('green', False):
attr |= FOREGROUND_GREEN
elif kw.pop('yellow', False):
attr |= FOREGROUND_GREEN|FOREGROUND_RED
else:
attr |= oldcolors & 0x0007
SetConsoleTextAttribute(handle, attr)
write_out(self._file, msg)
if oldcolors:
SetConsoleTextAttribute(handle, oldcolors)
class WriteFile(object):
def __init__(self, writemethod, encoding=None):
self.encoding = encoding
self._writemethod = writemethod
def write(self, data):
if self.encoding:
data = data.encode(self.encoding, "replace")
self._writemethod(data)
def flush(self):
return
if win32_and_ctypes:
TerminalWriter = Win32ConsoleWriter
import ctypes
from ctypes import wintypes
# ctypes access to the Windows console
STD_OUTPUT_HANDLE = -11
STD_ERROR_HANDLE = -12
FOREGROUND_BLACK = 0x0000 # black text
FOREGROUND_BLUE = 0x0001 # text color contains blue.
FOREGROUND_GREEN = 0x0002 # text color contains green.
FOREGROUND_RED = 0x0004 # text color contains red.
FOREGROUND_WHITE = 0x0007
FOREGROUND_INTENSITY = 0x0008 # text color is intensified.
BACKGROUND_BLACK = 0x0000 # background color black
BACKGROUND_BLUE = 0x0010 # background color contains blue.
BACKGROUND_GREEN = 0x0020 # background color contains green.
BACKGROUND_RED = 0x0040 # background color contains red.
BACKGROUND_WHITE = 0x0070
BACKGROUND_INTENSITY = 0x0080 # background color is intensified.
SHORT = ctypes.c_short
class COORD(ctypes.Structure):
_fields_ = [('X', SHORT),
('Y', SHORT)]
class SMALL_RECT(ctypes.Structure):
_fields_ = [('Left', SHORT),
('Top', SHORT),
('Right', SHORT),
('Bottom', SHORT)]
class CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure):
_fields_ = [('dwSize', COORD),
('dwCursorPosition', COORD),
('wAttributes', wintypes.WORD),
('srWindow', SMALL_RECT),
('dwMaximumWindowSize', COORD)]
_GetStdHandle = ctypes.windll.kernel32.GetStdHandle
_GetStdHandle.argtypes = [wintypes.DWORD]
_GetStdHandle.restype = wintypes.HANDLE
def GetStdHandle(kind):
return _GetStdHandle(kind)
SetConsoleTextAttribute = ctypes.windll.kernel32.SetConsoleTextAttribute
SetConsoleTextAttribute.argtypes = [wintypes.HANDLE, wintypes.WORD]
SetConsoleTextAttribute.restype = wintypes.BOOL
_GetConsoleScreenBufferInfo = \
ctypes.windll.kernel32.GetConsoleScreenBufferInfo
_GetConsoleScreenBufferInfo.argtypes = [wintypes.HANDLE,
ctypes.POINTER(CONSOLE_SCREEN_BUFFER_INFO)]
_GetConsoleScreenBufferInfo.restype = wintypes.BOOL
def GetConsoleInfo(handle):
info = CONSOLE_SCREEN_BUFFER_INFO()
_GetConsoleScreenBufferInfo(handle, ctypes.byref(info))
return info
def _getdimensions():
handle = GetStdHandle(STD_OUTPUT_HANDLE)
info = GetConsoleInfo(handle)
# Substract one from the width, otherwise the cursor wraps
# and the ending \n causes an empty line to display.
return info.dwSize.Y, info.dwSize.X - 1
def write_out(fil, msg):
# XXX sometimes "msg" is of type bytes, sometimes text which
# complicates the situation. Should we try to enforce unicode?
try:
# on py27 and above writing out to sys.stdout with an encoding
# should usually work for unicode messages (if the encoding is
# capable of it)
fil.write(msg)
except UnicodeEncodeError:
# on py26 it might not work because stdout expects bytes
if fil.encoding:
try:
fil.write(msg.encode(fil.encoding))
except UnicodeEncodeError:
# it might still fail if the encoding is not capable
pass
else:
fil.flush()
return
# fallback: escape all unicode characters
msg = msg.encode("unicode-escape").decode("ascii")
fil.write(msg)
fil.flush()
| mpl-2.0 |
aexeagmbh/swampdragon | swampdragon/pubsub_providers/mock_publisher.py | 14 | 1454 | import json
subscribers = {}
class MockPublisher(object):
def __init__(self):
self.subscribers = subscribers
def publish(self, channel, message):
subs = subscribers.get(channel)
if not subs:
return
for subscriber in subs:
if isinstance(message, str):
message = json.dumps(message)
subscriber.published_data.append(message)
def _get_channels_from_subscriptions(self, base_channel):
channels = [key for key in self.subscribers.keys() if key.startswith(base_channel)]
return channels
def get_channels(self, base_channel):
return self._get_channels_from_subscriptions(base_channel)
def subscribe(self, channels, subscriber):
for c in channels:
if c not in subscribers.keys():
subscribers[c] = []
subscribers[c].append(subscriber)
def unsubscribe(self, channels, subscriber):
if not isinstance(channels, list):
return self.unsubscribe([channels], subscriber)
for channel in channels:
subscribers[channel].remove(subscriber)
empty_channels = [k for (k, v) in subscribers.items() if not v]
for k in empty_channels:
del subscribers[k]
def remove_subscriber(self, subscriber):
channels = [c for c in subscribers if subscriber in subscribers[c]]
self.unsubscribe(channels, subscriber)
| bsd-3-clause |
richese/i3pystatus | i3pystatus/core/__init__.py | 3 | 4792 | import logging
import os
import sys
from threading import Thread
from i3pystatus.core import io, util
from i3pystatus.core.exceptions import ConfigError
from i3pystatus.core.imputil import ClassFinder
from i3pystatus.core.modules import Module
DEFAULT_LOG_FORMAT = '%(asctime)s [%(levelname)-8s][%(name)s %(lineno)d] %(message)s'
log = logging.getLogger(__name__)
class CommandEndpoint:
"""
Endpoint for i3bar click events: http://i3wm.org/docs/i3bar-protocol.html#_click_events
:param modules: dict-like object with item access semantics via .get()
:param io_handler_factory: function creating a file-like object returning a JSON generator on .read()
"""
def __init__(self, modules, io_handler_factory, io):
self.modules = modules
self.io_handler_factory = io_handler_factory
self.io = io
self.thread = Thread(target=self._command_endpoint)
self.thread.daemon = True
def start(self):
"""Starts the background thread"""
self.thread.start()
def _command_endpoint(self):
for cmd in self.io_handler_factory().read():
target_module = self.modules.get(cmd["instance"])
button = cmd["button"]
kwargs = {"button_id": button}
try:
kwargs.update({"pos_x": cmd["x"],
"pos_y": cmd["y"]})
except Exception:
continue
if target_module:
target_module.on_click(button, **kwargs)
target_module.run()
self.io.async_refresh()
class Status:
"""
The main class used for registering modules and managing I/O
:param bool standalone: Whether i3pystatus should read i3status-compatible input from `input_stream`.
:param int interval: Update interval in seconds.
:param input_stream: A file-like object that provides the input stream, if `standalone` is False.
:param bool click_events: Enable click events, if `standalone` is True.
:param str logfile: Path to log file that will be used by i3pystatus.
:param tuple internet_check: Address of server that will be used to check for internet connection by :py:class:`.internet`.
"""
def __init__(self, standalone=True, click_events=True, interval=1,
input_stream=None, logfile=None, internet_check=None,
logformat=DEFAULT_LOG_FORMAT):
self.standalone = standalone
self.click_events = standalone and click_events
input_stream = input_stream or sys.stdin
logger = logging.getLogger("i3pystatus")
if logfile:
for handler in logger.handlers:
logger.removeHandler(handler)
logfile = os.path.expandvars(logfile)
handler = logging.FileHandler(logfile, delay=True)
logger.addHandler(handler)
logger.setLevel(logging.CRITICAL)
if logformat:
for index in range(len(logger.handlers)):
logger.handlers[index].setFormatter(logging.Formatter(logformat))
if internet_check:
util.internet.address = internet_check
self.modules = util.ModuleList(self, ClassFinder(Module))
if self.standalone:
self.io = io.StandaloneIO(self.click_events, self.modules, interval)
if self.click_events:
self.command_endpoint = CommandEndpoint(
self.modules,
lambda: io.JSONIO(io=io.IOHandler(sys.stdin, open(os.devnull, "w")), skiplines=1),
self.io)
else:
self.io = io.IOHandler(input_stream)
def register(self, module, *args, **kwargs):
"""
Register a new module.
:param module: Either a string module name, or a module class,
or a module instance (in which case args and kwargs are
invalid).
:param kwargs: Settings for the module.
:returns: module instance
"""
from i3pystatus.text import Text
if not module:
return
try:
return self.modules.append(module, *args, **kwargs)
except Exception as e:
log.exception(e)
return self.modules.append(Text(
color="#FF0000",
text="{i3py_mod}: Fatal Error - {ex}({msg})".format(
i3py_mod=module,
ex=e.__class__.__name__,
msg=e
)
))
def run(self):
"""
Run main loop.
"""
if self.click_events:
self.command_endpoint.start()
for j in io.JSONIO(self.io).read():
for module in self.modules:
module.inject(j)
| mit |
whitehorse-io/encarnia | evennia/evennia/players/migrations/0004_auto_20150403_2339.py | 12 | 1937 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import evennia.players.manager
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('players', '0003_auto_20150209_2234'),
]
operations = [
migrations.DeleteModel(
name='DefaultGuest',
),
migrations.DeleteModel(
name='DefaultPlayer',
),
migrations.AlterModelManagers(
name='playerdb',
managers=[
(b'objects', evennia.players.manager.PlayerDBManager()),
],
),
migrations.AlterField(
model_name='playerdb',
name='email',
field=models.EmailField(max_length=254, verbose_name='email address', blank=True),
),
migrations.AlterField(
model_name='playerdb',
name='groups',
field=models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Group', blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', verbose_name='groups'),
),
migrations.AlterField(
model_name='playerdb',
name='last_login',
field=models.DateTimeField(null=True, verbose_name='last login', blank=True),
),
migrations.AlterField(
model_name='playerdb',
name='username',
field=models.CharField(error_messages={'unique': 'A user with that username already exists.'}, max_length=30, validators=[django.core.validators.RegexValidator('^[\\w.@+-]+$', 'Enter a valid username. This value may contain only letters, numbers and @/./+/-/_ characters.', 'invalid')], help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.', unique=True, verbose_name='username'),
),
]
| mit |
patrickm/chromium.src | tools/telemetry/telemetry/value/value_backcompat.py | 36 | 1984 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Backward compatibility for old results API.
This module helps convert the old PageMeasurementResults API into the new
style one. This exists as a bridging solution so we can change the underlying
implementation and update the PageMeasurementResults API once we know the
underlying implementation is solid.
"""
from telemetry import value as value_module
from telemetry.value import histogram
from telemetry.value import list_of_scalar_values
from telemetry.value import scalar
def ConvertOldCallingConventionToValue(page, trace_name, units,
value, chart_name, data_type):
value_name = value_module.ValueNameFromTraceAndChartName(
trace_name, chart_name)
if data_type == 'default':
if isinstance(value, list):
return list_of_scalar_values.ListOfScalarValues(
page, value_name, units, value, important=True)
else:
return scalar.ScalarValue(page, value_name, units,
value, important=True)
elif data_type == 'unimportant':
if isinstance(value, list):
return list_of_scalar_values.ListOfScalarValues(
page, value_name, units, value, important=False)
else:
return scalar.ScalarValue(page, value_name, units,
value, important=False)
elif data_type == 'histogram':
assert isinstance(value, basestring)
return histogram.HistogramValue(
page, value_name, units, raw_value_json=value, important=True)
elif data_type == 'unimportant-histogram':
assert isinstance(value, basestring)
return histogram.HistogramValue(
page, value_name, units, raw_value_json=value, important=False)
elif data_type == 'informational':
raise NotImplementedError()
else:
raise ValueError('Unrecognized data type %s', data_type)
| bsd-3-clause |
zjffdu/zeppelin | python/src/main/resources/python/bootstrap_sql.py | 60 | 1189 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Setup SQL over Pandas DataFrames
# It requires next dependencies to be installed:
# - pandas
# - pandasql
from __future__ import print_function
try:
from pandasql import sqldf
pysqldf = lambda q: sqldf(q, globals())
except ImportError:
pysqldf = lambda q: print("Can not run SQL over Pandas DataFrame" +
"Make sure 'pandas' and 'pandasql' libraries are installed")
| apache-2.0 |
laiyuncong8404/RFAUtils | utils/ScreenShot.py | 2 | 1648 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os,re,time,platform,subprocess
#判断系统类型,windows使用findstr,linux使用grep
system = platform.system()
if system is "Windows":
find_util = "findstr"
else:
find_util = "grep"
#判断是否设置环境变量ANDROID_HOME
if "ANDROID_HOME" in os.environ:
if system == "Windows":
command = os.path.join(os.environ["ANDROID_HOME"], "platform-tools", "adb.exe")
else:
command = os.path.join(os.environ["ANDROID_HOME"], "platform-tools", "adb")
else:
raise EnvironmentError(
"Adb not found in $ANDROID_HOME path: %s." %os.environ["ANDROID_HOME"])
#adb命令
def adb(args):
cmd = "%s %s" %(command, str(args))
return subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
#adb shell命令
def shell(args):
cmd = "%s shell %s" %(command, str(args))
return subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
#时间戳
def timestamp():
return time.strftime('%Y-%m-%d-%H-%M-%S',time.localtime(time.time()))
#截取当前屏幕,截屏文件保存至当前目录下的screen文件夹中
PATH = lambda p: os.path.abspath(p)
def screenshot():
# path = PATH("%sScreenShot" %os.getcwd())
path = PATH("../AutoTest/ScreenShot")
shell("screencap -p /data/local/tmp/tmp.png").wait()
if not os.path.isdir(path):
os.makedirs(path)
adb("pull /data/local/tmp/tmp.png %s" %PATH("%s/%s.png" %(path, timestamp()))).wait()
shell("rm /data/local/tmp/tmp.png")
if __name__ == "__main__":
screenshot()
print "success"
| mit |
pawaranand/phrerp | erpnext/setup/doctype/item_group/test_item_group.py | 40 | 6965 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import unittest
import frappe
from frappe.utils.nestedset import NestedSetRecursionError, NestedSetMultipleRootsError, \
NestedSetChildExistsError, NestedSetInvalidMergeError, rebuild_tree, get_ancestors_of
test_records = frappe.get_test_records('Item Group')
class TestItem(unittest.TestCase):
def test_basic_tree(self, records=None):
min_lft = 1
max_rgt = frappe.db.sql("select max(rgt) from `tabItem Group`")[0][0]
if not records:
records = test_records[2:]
for item_group in records:
lft, rgt, parent_item_group = frappe.db.get_value("Item Group", item_group["item_group_name"],
["lft", "rgt", "parent_item_group"])
if parent_item_group:
parent_lft, parent_rgt = frappe.db.get_value("Item Group", parent_item_group,
["lft", "rgt"])
else:
# root
parent_lft = min_lft - 1
parent_rgt = max_rgt + 1
self.assertTrue(lft)
self.assertTrue(rgt)
self.assertTrue(lft < rgt)
self.assertTrue(parent_lft < parent_rgt)
self.assertTrue(lft > parent_lft)
self.assertTrue(rgt < parent_rgt)
self.assertTrue(lft >= min_lft)
self.assertTrue(rgt <= max_rgt)
no_of_children = self.get_no_of_children(item_group["item_group_name"])
self.assertTrue(rgt == (lft + 1 + (2 * no_of_children)))
no_of_children = self.get_no_of_children(parent_item_group)
self.assertTrue(parent_rgt == (parent_lft + 1 + (2 * no_of_children)))
def get_no_of_children(self, item_group):
def get_no_of_children(item_groups, no_of_children):
children = []
for ig in item_groups:
children += frappe.db.sql_list("""select name from `tabItem Group`
where ifnull(parent_item_group, '')=%s""", ig or '')
if len(children):
return get_no_of_children(children, no_of_children + len(children))
else:
return no_of_children
return get_no_of_children([item_group], 0)
def test_recursion(self):
group_b = frappe.get_doc("Item Group", "_Test Item Group B")
group_b.parent_item_group = "_Test Item Group B - 3"
self.assertRaises(NestedSetRecursionError, group_b.save)
# cleanup
group_b.parent_item_group = "All Item Groups"
group_b.save()
def test_rebuild_tree(self):
rebuild_tree("Item Group", "parent_item_group")
self.test_basic_tree()
def move_it_back(self):
group_b = frappe.get_doc("Item Group", "_Test Item Group B")
group_b.parent_item_group = "All Item Groups"
group_b.save()
self.test_basic_tree()
def test_move_group_into_another(self):
# before move
old_lft, old_rgt = frappe.db.get_value("Item Group", "_Test Item Group C", ["lft", "rgt"])
# put B under C
group_b = frappe.get_doc("Item Group", "_Test Item Group B")
lft, rgt = group_b.lft, group_b.rgt
group_b.parent_item_group = "_Test Item Group C"
group_b.save()
self.test_basic_tree()
# after move
new_lft, new_rgt = frappe.db.get_value("Item Group", "_Test Item Group C", ["lft", "rgt"])
# lft should reduce
self.assertEquals(old_lft - new_lft, rgt - lft + 1)
# adjacent siblings, hence rgt diff will be 0
self.assertEquals(new_rgt - old_rgt, 0)
self.move_it_back()
def test_move_group_into_root(self):
group_b = frappe.get_doc("Item Group", "_Test Item Group B")
group_b.parent_item_group = ""
self.assertRaises(NestedSetMultipleRootsError, group_b.save)
# trick! works because it hasn't been rolled back :D
self.test_basic_tree()
self.move_it_back()
def print_tree(self):
import json
print json.dumps(frappe.db.sql("select name, lft, rgt from `tabItem Group` order by lft"), indent=1)
def test_move_leaf_into_another_group(self):
# before move
old_lft, old_rgt = frappe.db.get_value("Item Group", "_Test Item Group C", ["lft", "rgt"])
group_b_3 = frappe.get_doc("Item Group", "_Test Item Group B - 3")
lft, rgt = group_b_3.lft, group_b_3.rgt
# child of right sibling is moved into it
group_b_3.parent_item_group = "_Test Item Group C"
group_b_3.save()
self.test_basic_tree()
new_lft, new_rgt = frappe.db.get_value("Item Group", "_Test Item Group C", ["lft", "rgt"])
# lft should remain the same
self.assertEquals(old_lft - new_lft, 0)
# rgt should increase
self.assertEquals(new_rgt - old_rgt, rgt - lft + 1)
# move it back
group_b_3 = frappe.get_doc("Item Group", "_Test Item Group B - 3")
group_b_3.parent_item_group = "_Test Item Group B"
group_b_3.save()
self.test_basic_tree()
def test_delete_leaf(self):
# for checking later
parent_item_group = frappe.db.get_value("Item Group", "_Test Item Group B - 3", "parent_item_group")
rgt = frappe.db.get_value("Item Group", parent_item_group, "rgt")
ancestors = get_ancestors_of("Item Group", "_Test Item Group B - 3")
ancestors = frappe.db.sql("""select name, rgt from `tabItem Group`
where name in ({})""".format(", ".join(["%s"]*len(ancestors))), tuple(ancestors), as_dict=True)
frappe.delete_doc("Item Group", "_Test Item Group B - 3")
records_to_test = test_records[2:]
del records_to_test[4]
self.test_basic_tree(records=records_to_test)
# rgt of each ancestor would reduce by 2
for item_group in ancestors:
new_lft, new_rgt = frappe.db.get_value("Item Group", item_group.name, ["lft", "rgt"])
self.assertEquals(new_rgt, item_group.rgt - 2)
# insert it back
frappe.copy_doc(test_records[6]).insert()
self.test_basic_tree()
def test_delete_group(self):
# cannot delete group with child, but can delete leaf
self.assertRaises(NestedSetChildExistsError, frappe.delete_doc, "Item Group", "_Test Item Group B")
def test_merge_groups(self):
frappe.rename_doc("Item Group", "_Test Item Group B", "_Test Item Group C", merge=True)
records_to_test = test_records[2:]
del records_to_test[1]
self.test_basic_tree(records=records_to_test)
# insert Group B back
frappe.copy_doc(test_records[3]).insert()
self.test_basic_tree()
# move its children back
for name in frappe.db.sql_list("""select name from `tabItem Group`
where parent_item_group='_Test Item Group C'"""):
doc = frappe.get_doc("Item Group", name)
doc.parent_item_group = "_Test Item Group B"
doc.save()
self.test_basic_tree()
def test_merge_leaves(self):
frappe.rename_doc("Item Group", "_Test Item Group B - 2", "_Test Item Group B - 1", merge=True)
records_to_test = test_records[2:]
del records_to_test[3]
self.test_basic_tree(records=records_to_test)
# insert Group B - 2back
frappe.copy_doc(test_records[5]).insert()
self.test_basic_tree()
def test_merge_leaf_into_group(self):
self.assertRaises(NestedSetInvalidMergeError, frappe.rename_doc, "Item Group", "_Test Item Group B - 3",
"_Test Item Group B", merge=True)
def test_merge_group_into_leaf(self):
self.assertRaises(NestedSetInvalidMergeError, frappe.rename_doc, "Item Group", "_Test Item Group B",
"_Test Item Group B - 3", merge=True)
| agpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.