repo
stringclasses 856
values | pull_number
int64 3
127k
| instance_id
stringlengths 12
58
| issue_numbers
sequencelengths 1
5
| base_commit
stringlengths 40
40
| patch
stringlengths 67
1.54M
| test_patch
stringlengths 0
107M
| problem_statement
stringlengths 3
307k
| hints_text
stringlengths 0
908k
| created_at
timestamp[s] |
---|---|---|---|---|---|---|---|---|---|
feast-dev/feast | 4,004 | feast-dev__feast-4004 | [
"4000"
] | a0f7472f200300f3a45aa404922dd67bb4ad237f | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -52,7 +52,6 @@
"pandas>=1.4.3,<3",
# Higher than 4.23.4 seems to cause a seg fault
"protobuf>=4.24.0,<5.0.0",
- "proto-plus>=1.20.0,<2",
"pyarrow>=4",
"pydantic>=2.0.0",
"pygments>=2.12.0,<3",
@@ -70,8 +69,6 @@
# https://github.com/dask/dask/issues/10996
"dask>=2021.1.0,<2024.3.0",
"bowler", # Needed for automatic repo upgrades
- # FastAPI does not correctly pull starlette dependency on httpx see thread(https://github.com/tiangolo/fastapi/issues/5656).
- "httpx>=0.23.3",
"importlib-resources>=6.0.0,<7",
"importlib_metadata>=6.8.0,<7",
]
@@ -163,11 +160,12 @@
"black>=22.6.0,<23",
"isort>=5,<6",
"grpcio-testing>=1.56.2,<2",
+ # FastAPI does not correctly pull starlette dependency on httpx see thread(https://github.com/tiangolo/fastapi/issues/5656).
+ "httpx>=0.23.3",
"minio==7.1.0",
"mock==2.0.0",
"moto<5",
"mypy>=1.4.1",
- "avro==1.10.0",
"urllib3>=1.25.4,<3",
"psutil==5.9.0",
"py>=1.11.0", # https://github.com/pytest-dev/pytest/issues/10420
@@ -215,14 +213,8 @@
+ GRPCIO_REQUIRED
)
-
-# rtd builds fail because of mysql not being installed in their environment.
-# We can add mysql there, but it's not strictly needed. This will be faster for builds.
-DOCS_REQUIRED = CI_REQUIRED.copy()
-for _r in MYSQL_REQUIRED:
- DOCS_REQUIRED.remove(_r)
-
-DEV_REQUIRED = ["grpcio-testing~=1.0"] + CI_REQUIRED
+DOCS_REQUIRED = CI_REQUIRED
+DEV_REQUIRED = CI_REQUIRED
# Get git repo root directory
repo_root = str(pathlib.Path(__file__).resolve().parent)
| Update the behavior of install-python-ci-dependencies
## Expected Behavior
There's a very confusing and probably unnecessary python setup.py develop call in install-python-ci-dependencies make command that's causing this error. It should just be a call to only build protos instead
## Current Behavior
install-python-ci-dependencies:
python -m piptools sync sdk/python/requirements/py$(PYTHON)-ci-requirements.txt
COMPILE_GO=true python setup.py develop
## Steps to reproduce
### Specifications
- Version:
- Platform:
- Subsystem:
## Possible Solution
install-python-ci-dependencies:
python -m piptools sync sdk/python/requirements/py$(PYTHON)-ci-requirements.txt
pip install --no-deps -e .
python setup.py build_python_protos --inplace
| 2024-03-09T08:04:22 |
||
feast-dev/feast | 4,024 | feast-dev__feast-4024 | [
"3770"
] | 6d9156b3d6372d654048ea2bfb7eec3f3908d038 | diff --git a/sdk/python/feast/constants.py b/sdk/python/feast/constants.py
--- a/sdk/python/feast/constants.py
+++ b/sdk/python/feast/constants.py
@@ -49,3 +49,6 @@
# Environment variable for feature server docker image tag
DOCKER_IMAGE_TAG_ENV_NAME: str = "FEAST_SERVER_DOCKER_IMAGE_TAG"
+
+# Default feature server registry ttl (seconds)
+DEFAULT_FEATURE_SERVER_REGISTRY_TTL = 5
diff --git a/sdk/python/feast/feature_server.py b/sdk/python/feast/feature_server.py
--- a/sdk/python/feast/feature_server.py
+++ b/sdk/python/feast/feature_server.py
@@ -1,10 +1,10 @@
import json
+import sys
import threading
import traceback
import warnings
from typing import List, Optional
-import gunicorn.app.base
import pandas as pd
from dateutil import parser
from fastapi import FastAPI, HTTPException, Request, Response, status
@@ -15,6 +15,7 @@
import feast
from feast import proto_json, utils
+from feast.constants import DEFAULT_FEATURE_SERVER_REGISTRY_TTL
from feast.data_source import PushMode
from feast.errors import PushSourceNotFoundException
from feast.protos.feast.serving.ServingService_pb2 import GetOnlineFeaturesRequest
@@ -45,7 +46,10 @@ class MaterializeIncrementalRequest(BaseModel):
feature_views: Optional[List[str]] = None
-def get_app(store: "feast.FeatureStore", registry_ttl_sec: int = 5):
+def get_app(
+ store: "feast.FeatureStore",
+ registry_ttl_sec: int = DEFAULT_FEATURE_SERVER_REGISTRY_TTL,
+):
proto_json.patch()
app = FastAPI()
@@ -202,24 +206,27 @@ def materialize_incremental(body=Depends(get_body)):
return app
-class FeastServeApplication(gunicorn.app.base.BaseApplication):
- def __init__(self, store: "feast.FeatureStore", **options):
- self._app = get_app(
- store=store,
- registry_ttl_sec=options.get("registry_ttl_sec", 5),
- )
- self._options = options
- super().__init__()
+if sys.platform != "win32":
+ import gunicorn.app.base
- def load_config(self):
- for key, value in self._options.items():
- if key.lower() in self.cfg.settings and value is not None:
- self.cfg.set(key.lower(), value)
+ class FeastServeApplication(gunicorn.app.base.BaseApplication):
+ def __init__(self, store: "feast.FeatureStore", **options):
+ self._app = get_app(
+ store=store,
+ registry_ttl_sec=options["registry_ttl_sec"],
+ )
+ self._options = options
+ super().__init__()
+
+ def load_config(self):
+ for key, value in self._options.items():
+ if key.lower() in self.cfg.settings and value is not None:
+ self.cfg.set(key.lower(), value)
- self.cfg.set("worker_class", "uvicorn.workers.UvicornWorker")
+ self.cfg.set("worker_class", "uvicorn.workers.UvicornWorker")
- def load(self):
- return self._app
+ def load(self):
+ return self._app
def start_server(
@@ -229,13 +236,19 @@ def start_server(
no_access_log: bool,
workers: int,
keep_alive_timeout: int,
- registry_ttl_sec: int = 5,
+ registry_ttl_sec: int,
):
- FeastServeApplication(
- store=store,
- bind=f"{host}:{port}",
- accesslog=None if no_access_log else "-",
- workers=workers,
- keepalive=keep_alive_timeout,
- registry_ttl_sec=registry_ttl_sec,
- ).run()
+ if sys.platform != "win32":
+ FeastServeApplication(
+ store=store,
+ bind=f"{host}:{port}",
+ accesslog=None if no_access_log else "-",
+ workers=workers,
+ keepalive=keep_alive_timeout,
+ registry_ttl_sec=registry_ttl_sec,
+ ).run()
+ else:
+ import uvicorn
+
+ app = get_app(store, registry_ttl_sec)
+ uvicorn.run(app, host=host, port=port, access_log=(not no_access_log))
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -65,7 +65,7 @@
"typeguard>=4.0.0",
"fastapi>=0.68.0",
"uvicorn[standard]>=0.14.0,<1",
- "gunicorn",
+ "gunicorn; platform_system != 'Windows'",
# https://github.com/dask/dask/issues/10996
"dask>=2021.1.0,<2024.3.0",
"bowler", # Needed for automatic repo upgrades
| ModuleNotFoundError: No module named 'fcntl'
I pip installed feast and tried feast init my_project
got the following error
anaconda3\envs\feast_env\lib\site-packages\gunicorn\util.py", line 8, in <module>
import fcntl
ModuleNotFoundError: No module named 'fcntl'
- Version: 0.34.1
- Platform: windows 10
- Subsystem:
I tried researching on that error it shows
The fcntl module is not available on Windows. The functionality it exposes does not exist on that platform.
Please help me solve the above issue
| Hello, I have encountered the same issue as you. I have noticed that the reason for this error is due to the use of the Gunicorn library in the new version of Feast, which does not support Windows. Therefore, I resolved this issue by downgrade the version of Feast.
`pip install feast==0.31.0`
This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions.
| 2024-03-21T05:12:55 |
|
feast-dev/feast | 4,025 | feast-dev__feast-4025 | [
"4020"
] | d8d75676cbaf565b6a6a097f33c49f56b852dcd7 | diff --git a/sdk/python/feast/infra/registry/contrib/postgres/postgres_registry_store.py b/sdk/python/feast/infra/registry/contrib/postgres/postgres_registry_store.py
--- a/sdk/python/feast/infra/registry/contrib/postgres/postgres_registry_store.py
+++ b/sdk/python/feast/infra/registry/contrib/postgres/postgres_registry_store.py
@@ -1,3 +1,4 @@
+import warnings
from typing import Optional
import psycopg2
@@ -37,6 +38,11 @@ def __init__(self, config: PostgresRegistryConfig, registry_path: str):
sslcert_path=getattr(config, "sslcert_path", None),
sslrootcert_path=getattr(config, "sslrootcert_path", None),
)
+ warnings.warn(
+ "PostgreSQLRegistryStore is deprecated and will be removed in the future releases. Please use SqlRegistry instead.",
+ DeprecationWarning,
+ )
+
self.table_name = config.path
self.cache_ttl_seconds = config.cache_ttl_seconds
| Deprecate PostgreSQLRegistryStore
Right now we have 2 ways to use postgres as a registry backend. The first is with scalable `SqlRegistry` that uses `sqlalchemy`, another is an older option of using `PostgreSQLRegistryStore` which keeps the whole proto in a single table. Since we are [recommending](https://docs.feast.dev/tutorials/using-scalable-registry) the scalable registry anyway, we should deprecate `PostgreSQLRegistryStore` and remove it soon after. Or maybe remove it directly? It's under contribs as of now.
| I agree it make sense to remove the `PostgreSQLRegistryStore`. but it might cause incompatibility if someone still specify it in `registry_store_type`. probably need to point it to the `sqlregistry` as well.
Do you mean rewiring one registry_store_type to point to other? I think that'll be too hard to figure out as the way they store protos in the database is completely different, plus the way they are configured from `RepoConfig` is also not exactly the same. Better to deprecate it and throw a warning instructing users to move to sql registry themselves.
i mean to update https://github.com/feast-dev/feast/blob/a0f7472f200300f3a45aa404922dd67bb4ad237f/sdk/python/feast/infra/registry/registry.py#L59 to point to sqlregistry directly. not sure if that will work.
That will fail, `SqlRegistry` expects the protos to be stored in a particular way. It won't be able to read the existing registry data written previously by `PostgreSQLRegistryStore`.
> That will fail, `SqlRegistry` expects the protos to be stored in a particular way. It won't be able to read the existing registry data written previously by `PostgreSQLRegistryStore`.
I see. let's deprecate it then. | 2024-03-21T06:22:52 |
|
feast-dev/feast | 4,026 | feast-dev__feast-4026 | [
"3804"
] | d8d75676cbaf565b6a6a097f33c49f56b852dcd7 | diff --git a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py
--- a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py
+++ b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py
@@ -94,7 +94,7 @@ def pull_latest_from_table_or_query(
FROM (
SELECT {a_field_string},
ROW_NUMBER() OVER({partition_by_join_key_string} ORDER BY {timestamp_desc_string}) AS _feast_row
- FROM ({from_expression}) a
+ FROM {from_expression} a
WHERE a."{timestamp_field}" BETWEEN '{start_date}'::timestamptz AND '{end_date}'::timestamptz
) b
WHERE _feast_row = 1
| bug:postgres source invalid `pull_latest_from_table_or_query` query output
I would submit a PR but I'm on an M1 and had some issues getting the env running. Submitting a proposed solution for the sake of available time.
## Expected Behavior
When using `PostgreSQLSource`, you should be able to use a table successfully. When running `feast masterialize [START_DATE] [END_DATE]`, as a user I would expect the table to be selected on successfully with no errors.
```
source=PostgreSQLSource(
name="source_user_events_v0",
timestamp_field="created_at",
table="offline_store.user_events"
)
```
## Current Behavior
[pull_latest_from_table_or_query](https://github.com/feast-dev/feast/blob/9df2224283e04760116b61bed3c8bfa7f17cbf7e/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py#L60) attempts to pull the latest data using the outputted string from [get_table_query_string](https://github.com/feast-dev/feast/blob/9df2224283e04760116b61bed3c8bfa7f17cbf7e/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres_source.py#L119). However on line [97](https://github.com/feast-dev/feast/blob/9df2224283e04760116b61bed3c8bfa7f17cbf7e/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py#L97) of `postgres.py` the query includes open and close parens assuming an inner query. These params should be removed from the query and handled by `get_table_query_string` (which it is).
## Steps to reproduce
Create a source as part of a feature view. Reference an existing table.
```
source=PostgreSQLSource(
name="source_user_events_v0",
timestamp_field="created_at",
table="offline_store.user_events"
)
```
execute `feast materialize [START_DATE] [END_DATE]`
expected output.
```
root@3b9fc17aa598:/usr/app# feast materialize 2023-10-16T00:00:00 2023-10-18T00:00:00
/usr/local/lib/python3.9/site-packages/feast/repo_config.py:233: RuntimeWarning: `entity_key_serialization_version` is either not specified in the feature_store.yaml, or is specified to a value <= 1.This serialization version may cause errors when trying to write fields with the `Long` data type into the online store. Specifying `entity_key_serialization_version` to 2 is recommended for new projects.
warnings.warn(
Materializing 1 feature views from 2023-10-16 00:00:00+00:00 to 2023-10-18 00:00:00+00:00 into the postgres online store.
fv_user_events_v0:
Traceback (most recent call last):
File "/usr/local/bin/feast", line 8, in <module>
sys.exit(cli())
File "/usr/local/lib/python3.9/site-packages/click/core.py", line 1157, in __call__
return self.main(*args, **kwargs)
File "/usr/local/lib/python3.9/site-packages/click/core.py", line 1078, in main
rv = self.invoke(ctx)
File "/usr/local/lib/python3.9/site-packages/click/core.py", line 1688, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/lib/python3.9/site-packages/click/core.py", line 1434, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/lib/python3.9/site-packages/click/core.py", line 783, in invoke
return __callback(*args, **kwargs)
File "/usr/local/lib/python3.9/site-packages/click/decorators.py", line 33, in new_func
return f(get_current_context(), *args, **kwargs)
File "/usr/local/lib/python3.9/site-packages/feast/cli.py", line 546, in materialize_command
store.materialize(
File "/usr/local/lib/python3.9/site-packages/feast/usage.py", line 299, in wrapper
raise exc.with_traceback(traceback)
File "/usr/local/lib/python3.9/site-packages/feast/usage.py", line 288, in wrapper
return func(*args, **kwargs)
File "/usr/local/lib/python3.9/site-packages/feast/feature_store.py", line 1395, in materialize
provider.materialize_single_feature_view(
File "/usr/local/lib/python3.9/site-packages/feast/infra/passthrough_provider.py", line 254, in materialize_single_feature_view
raise e
File "/usr/local/lib/python3.9/site-packages/feast/infra/materialization/local_engine.py", line 156, in _materialize_one
table = offline_job.to_arrow()
File "/usr/local/lib/python3.9/site-packages/feast/infra/offline_stores/offline_store.py", line 122, in to_arrow
return self._to_arrow_internal(timeout=timeout)
File "/usr/local/lib/python3.9/site-packages/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py", line 282, in _to_arrow_internal
cur.execute(query)
psycopg2.errors.SyntaxError: syntax error at or near ")"
LINE 8: FROM (offline_store.user_events) a
^
root@3b9fc17aa598:/usr/app#
```
### Specifications
- Version: 0.34.1
- Platform: Linux
- Subsystem: Ubuntu 20.04
## Possible Solution
Update this [query](https://github.com/feast-dev/feast/blob/9df2224283e04760116b61bed3c8bfa7f17cbf7e/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py#L90C1-L101C16) from:
```
query = f"""
SELECT
{b_field_string}
{f", {repr(DUMMY_ENTITY_VAL)} AS {DUMMY_ENTITY_ID}" if not join_key_columns else ""}
FROM (
SELECT {a_field_string},
ROW_NUMBER() OVER({partition_by_join_key_string} ORDER BY {timestamp_desc_string}) AS _feast_row
FROM ({from_expression}) a
WHERE a."{timestamp_field}" BETWEEN '{start_date}'::timestamptz AND '{end_date}'::timestamptz
) b
WHERE _feast_row = 1
"""
```
to:
```
query = f"""
SELECT
{b_field_string}
{f", {repr(DUMMY_ENTITY_VAL)} AS {DUMMY_ENTITY_ID}" if not join_key_columns else ""}
FROM (
SELECT {a_field_string},
ROW_NUMBER() OVER({partition_by_join_key_string} ORDER BY {timestamp_desc_string}) AS _feast_row
FROM {from_expression} a
WHERE a."{timestamp_field}" BETWEEN '{start_date}'::timestamptz AND '{end_date}'::timestamptz
) b
WHERE _feast_row = 1
"""
```
| hello! I ran exactly into the same issue and fixed it by applying the fix suggested by @david-dest01.
thanks for testing @danielsalvador
This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions.
Am I mistaken or was the PR to fix this never merged?
I'm experiencing the same issue at the moment.
@job-almekinders the ticket was closed by maintainers - it was never resolved. https://github.com/feast-dev/feast/pull/3807
I'll re-open one tomorrow one tomorrow, after rebasing on master! | 2024-03-21T08:59:38 |
|
feast-dev/feast | 4,065 | feast-dev__feast-4065 | [
"4062"
] | d82d1ecb534ab35b901c36e920666196eae0ac79 | diff --git a/sdk/python/feast/infra/registry/sql.py b/sdk/python/feast/infra/registry/sql.py
--- a/sdk/python/feast/infra/registry/sql.py
+++ b/sdk/python/feast/infra/registry/sql.py
@@ -205,7 +205,7 @@ def teardown(self):
saved_datasets,
validation_references,
}:
- with self.engine.connect() as conn:
+ with self.engine.begin() as conn:
stmt = delete(t)
conn.execute(stmt)
@@ -399,7 +399,7 @@ def apply_feature_service(
)
def delete_data_source(self, name: str, project: str, commit: bool = True):
- with self.engine.connect() as conn:
+ with self.engine.begin() as conn:
stmt = delete(data_sources).where(
data_sources.c.data_source_name == name,
data_sources.c.project_id == project,
@@ -441,7 +441,7 @@ def _list_on_demand_feature_views(self, project: str) -> List[OnDemandFeatureVie
)
def _list_project_metadata(self, project: str) -> List[ProjectMetadata]:
- with self.engine.connect() as conn:
+ with self.engine.begin() as conn:
stmt = select(feast_metadata).where(
feast_metadata.c.project_id == project,
)
@@ -449,8 +449,11 @@ def _list_project_metadata(self, project: str) -> List[ProjectMetadata]:
if rows:
project_metadata = ProjectMetadata(project_name=project)
for row in rows:
- if row["metadata_key"] == FeastMetadataKeys.PROJECT_UUID.value:
- project_metadata.project_uuid = row["metadata_value"]
+ if (
+ row._mapping["metadata_key"]
+ == FeastMetadataKeys.PROJECT_UUID.value
+ ):
+ project_metadata.project_uuid = row._mapping["metadata_value"]
break
# TODO(adchia): Add other project metadata in a structured way
return [project_metadata]
@@ -557,7 +560,7 @@ def apply_user_metadata(
table = self._infer_fv_table(feature_view)
name = feature_view.name
- with self.engine.connect() as conn:
+ with self.engine.begin() as conn:
stmt = select(table).where(
getattr(table.c, "feature_view_name") == name,
table.c.project_id == project,
@@ -612,11 +615,11 @@ def get_user_metadata(
table = self._infer_fv_table(feature_view)
name = feature_view.name
- with self.engine.connect() as conn:
+ with self.engine.begin() as conn:
stmt = select(table).where(getattr(table.c, "feature_view_name") == name)
row = conn.execute(stmt).first()
if row:
- return row["user_metadata"]
+ return row._mapping["user_metadata"]
else:
raise FeatureViewNotFoundException(feature_view.name, project=project)
@@ -674,7 +677,7 @@ def _apply_object(
name = name or (obj.name if hasattr(obj, "name") else None)
assert name, f"name needs to be provided for {obj}"
- with self.engine.connect() as conn:
+ with self.engine.begin() as conn:
update_datetime = datetime.utcnow()
update_time = int(update_datetime.timestamp())
stmt = select(table).where(
@@ -723,7 +726,7 @@ def _apply_object(
def _maybe_init_project_metadata(self, project):
# Initialize project metadata if needed
- with self.engine.connect() as conn:
+ with self.engine.begin() as conn:
update_datetime = datetime.utcnow()
update_time = int(update_datetime.timestamp())
stmt = select(feast_metadata).where(
@@ -732,7 +735,7 @@ def _maybe_init_project_metadata(self, project):
)
row = conn.execute(stmt).first()
if row:
- usage.set_current_project_uuid(row["metadata_value"])
+ usage.set_current_project_uuid(row._mapping["metadata_value"])
else:
new_project_uuid = f"{uuid.uuid4()}"
values = {
@@ -753,7 +756,7 @@ def _delete_object(
id_field_name: str,
not_found_exception: Optional[Callable],
):
- with self.engine.connect() as conn:
+ with self.engine.begin() as conn:
stmt = delete(table).where(
getattr(table.c, id_field_name) == name, table.c.project_id == project
)
@@ -777,13 +780,13 @@ def _get_object(
):
self._maybe_init_project_metadata(project)
- with self.engine.connect() as conn:
+ with self.engine.begin() as conn:
stmt = select(table).where(
getattr(table.c, id_field_name) == name, table.c.project_id == project
)
row = conn.execute(stmt).first()
if row:
- _proto = proto_class.FromString(row[proto_field_name])
+ _proto = proto_class.FromString(row._mapping[proto_field_name])
return python_class.from_proto(_proto)
if not_found_exception:
raise not_found_exception(name, project)
@@ -799,20 +802,20 @@ def _list_objects(
proto_field_name: str,
):
self._maybe_init_project_metadata(project)
- with self.engine.connect() as conn:
+ with self.engine.begin() as conn:
stmt = select(table).where(table.c.project_id == project)
rows = conn.execute(stmt).all()
if rows:
return [
python_class.from_proto(
- proto_class.FromString(row[proto_field_name])
+ proto_class.FromString(row._mapping[proto_field_name])
)
for row in rows
]
return []
def _set_last_updated_metadata(self, last_updated: datetime, project: str):
- with self.engine.connect() as conn:
+ with self.engine.begin() as conn:
stmt = select(feast_metadata).where(
feast_metadata.c.metadata_key
== FeastMetadataKeys.LAST_UPDATED_TIMESTAMP.value,
@@ -846,7 +849,7 @@ def _set_last_updated_metadata(self, last_updated: datetime, project: str):
conn.execute(insert_stmt)
def _get_last_updated_metadata(self, project: str):
- with self.engine.connect() as conn:
+ with self.engine.begin() as conn:
stmt = select(feast_metadata).where(
feast_metadata.c.metadata_key
== FeastMetadataKeys.LAST_UPDATED_TIMESTAMP.value,
@@ -855,13 +858,13 @@ def _get_last_updated_metadata(self, project: str):
row = conn.execute(stmt).first()
if not row:
return None
- update_time = int(row["last_updated_timestamp"])
+ update_time = int(row._mapping["last_updated_timestamp"])
return datetime.utcfromtimestamp(update_time)
def _get_all_projects(self) -> Set[str]:
projects = set()
- with self.engine.connect() as conn:
+ with self.engine.begin() as conn:
for table in {
entities,
data_sources,
@@ -872,6 +875,6 @@ def _get_all_projects(self) -> Set[str]:
stmt = select(table)
rows = conn.execute(stmt).all()
for row in rows:
- projects.add(row["project_id"])
+ projects.add(row._mapping["project_id"])
return projects
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -57,7 +57,7 @@
"pygments>=2.12.0,<3",
"PyYAML>=5.4.0,<7",
"requests",
- "SQLAlchemy[mypy]>1,<2",
+ "SQLAlchemy[mypy]>1",
"tabulate>=0.8.0,<1",
"tenacity>=7,<9",
"toml>=0.10.0,<1",
| SQLAlchemy 2 compatibility
**Is your feature request related to a problem? Please describe.**
There is a security vulnerability PVE-2022-51668 in SQLAlchemy < 2.0.0.b1.
https://data.safetycli.com/v/51668/f17/
However feast pinned the SQLAlchemy version to be < 2. We are struggling to have solve this vulnerability in our feast dependent projects.
https://github.com/feast-dev/feast/blob/master/setup.py#L60
**Describe the solution you'd like**
Change the version specification to be >1 only
**Describe alternatives you've considered**
NA
**Additional context**
NA
| โ feast git:(master) โ grep --include=\*.py -rnw ./sdk/python -e "sqlalchemy"
./sdk/python/feast/infra/registry/sql.py:9:from sqlalchemy import ( # type: ignore
./sdk/python/feast/infra/registry/sql.py:22:from sqlalchemy.engine import Engine
./sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py:5:from sqlalchemy import create_engine
./sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/mssql.py:12:import sqlalchemy
./sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/mssql.py:14:from sqlalchemy import create_engine
./sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/mssql.py:15:from sqlalchemy.engine import Engine
./sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/mssql.py:16:from sqlalchemy.orm import sessionmaker
./sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/mssql.py:393: engine: sqlalchemy.engine.Engine,
./sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/mssqlserver_source.py:8:from sqlalchemy import create_engine
https://docs.sqlalchemy.org/en/20/changelog/migration_20.html
Major two Warnings:
1, RemovedIn20Warning: The current statement is being autocommitted using implicit autocommit, which will be removed in SQLAlchemy 2.0. Use the .begin() method of Engine or Connection in order to use an explicit transaction for DML and DDL statements. (Background on SQLAlchemy 2.0 at: https://sqlalche.me/e/b8d9)
2, RemovedIn20Warning: Using non-integer/slice indices on Row is deprecated and will be removed in version 2.0; please use row._mapping[<key>], or the mappings() accessor on the Result object. (Background on SQLAlchemy 2.0 at: https://sqlalche.me/e/b8d9) | 2024-04-02T05:04:13 |
|
feast-dev/feast | 4,085 | feast-dev__feast-4085 | [
"4084"
] | 318a2b8bfc94f10c81206071fcb1d41f19683288 | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -48,7 +48,7 @@
"Jinja2>=2,<4",
"jsonschema",
"mmh3",
- "numpy>=1.22,<1.25",
+ "numpy>=1.22,<2",
"pandas>=1.4.3,<3",
# Higher than 4.23.4 seems to cause a seg fault
"protobuf>=4.24.0,<5.0.0",
| Remove numpy <1.25 dependency in setup.py
In setup.py, I can see that the dependency for pandas has already been updated from
"pandas>=1.4.3,<2" (which is still in the current PyPI version) to "pandas>=1.4.3,<3", but numpy hasn't, which will break the installation if I am using e.g. pandas 2.2.1, that requires numpy (>=1.26.0,<2)
## Problem
"numpy>=1.22,<1.25"
## Solution
"numpy>=1.22,<2"
## Steps to reproduce
poetry add git+https://github.com/feast-dev/feast.git
| @softwareentrepreneer thanks, looks like the pin was [added](https://github.com/feast-dev/feast/commit/774ed33a067bf9bf087520325b72f4f4d194106a) simply to get rid of some deprecation warnings, so this shouldn't be an issue. Would you like to open a PR? | 2024-04-09T09:43:27 |
|
feast-dev/feast | 4,090 | feast-dev__feast-4090 | [
"3886"
] | 318a2b8bfc94f10c81206071fcb1d41f19683288 | diff --git a/sdk/python/feast/constants.py b/sdk/python/feast/constants.py
--- a/sdk/python/feast/constants.py
+++ b/sdk/python/feast/constants.py
@@ -29,11 +29,11 @@
# Environment variable for registry
REGISTRY_ENV_NAME: str = "REGISTRY_BASE64"
-# Environment variable for toggling usage
+# Environment variable for toggling the Usage feature
FEAST_USAGE = "FEAST_USAGE"
-# Default value for FEAST_USAGE when environment variable is not set
-DEFAULT_FEAST_USAGE_VALUE = "True"
+# Environment variable for FEAST_USAGE_ENDPOINT
+FEAST_USAGE_ENDPOINT = "FEAST_USAGE_ENDPOINT"
# Environment variable for the path for overwriting universal test configs
FULL_REPO_CONFIGS_MODULE_ENV_NAME: str = "FULL_REPO_CONFIGS_MODULE"
diff --git a/sdk/python/feast/usage.py b/sdk/python/feast/usage.py
--- a/sdk/python/feast/usage.py
+++ b/sdk/python/feast/usage.py
@@ -30,15 +30,17 @@
import requests
from feast import flags_helper
-from feast.constants import DEFAULT_FEAST_USAGE_VALUE, FEAST_USAGE
+from feast.constants import FEAST_USAGE, FEAST_USAGE_ENDPOINT
from feast.version import get_version
-USAGE_ENDPOINT = "https://usage.feast.dev"
-
_logger = logging.getLogger(__name__)
_executor = concurrent.futures.ThreadPoolExecutor(max_workers=3)
-_is_enabled = os.getenv(FEAST_USAGE, default=DEFAULT_FEAST_USAGE_VALUE) == "True"
+_is_enabled = os.getenv(FEAST_USAGE, default="False") == "True"
+
+# Default usage endpoint value.
+# Will raise an exception if the configured value is not working.
+_usage_endpoint = os.getenv(FEAST_USAGE_ENDPOINT, default="")
_constant_attributes = {
"project_id": "",
@@ -177,7 +179,7 @@ def _set_installation_id():
def _export(event: typing.Dict[str, typing.Any]):
- _executor.submit(requests.post, USAGE_ENDPOINT, json=event, timeout=2)
+ _executor.submit(requests.post, _usage_endpoint, json=event, timeout=2)
def _produce_event(ctx: UsageContext):
| usage.feast.dev ssl cert wrong
Looks like ssl cert got updated yesterday, and it's set for *.netlify.app and causing issues in logs:

```
WARNING 2024-01-16 16:46:30,278 urllib3.connection.connection:547 | Certificate did not match expected hostname: usage.feast.dev. Certificate: {'subject': ((('countryName', 'US'),), (('stateOrProvinceName', 'California'),), (('localityName', 'San Francisco'),), (('organizationName', 'Netlify, Inc'),), (('commonName', '*.netlify.app'),)), 'issuer': ((('countryName', 'US'),), (('organizationName', 'DigiCert Inc'),), (('commonName', 'DigiCert Global G2 TLS RSA SHA256 2020 CA1'),)), 'version': 3, 'serialNumber': '03ED167A2659DA278E7B21CDBEB33E32', 'notBefore': 'Jan 15 00:00:00 2024 GMT', 'notAfter': 'Feb 14 23:59:59 2025 GMT', 'subjectAltName': (('DNS', '*.netlify.app'), ('DNS', 'netlify.app')), 'OCSP': ('http://ocsp.digicert.com/',), 'caIssuers': ('http://cacerts.digicert.com/DigiCertGlobalG2TLSRSASHA2562020CA1-1.crt',), 'crlDistributionPoints': ('http://crl3.digicert.com/DigiCertGlobalG2TLSRSASHA2562020CA1-1.crl', 'http://crl4.digicert.com/DigiCertGlobalG2TLSRSASHA2562020CA1-1.crl')}
```
| ran into the same issue. as a stopgap, i set `FEAST_USAGE` to `false` and it went away. I know I shouldn't really be advocating for people to work around feast telemetry, but it should keep things quiet until a real fix goes in.
This has already been fixed, right?
found similar warnings when using Feast in OpenShift/Kubernetes but setting environments ```FEAST_USAGE=False``` and it get disappeared.
When running "feast serve" the certificate warning still appears as of a pip download on Feb 25, 2024.
let's disable the feature of "usage.feast.dev". It will be more useful for the users who use Feast at Enterprise level. | 2024-04-10T03:01:26 |
|
feast-dev/feast | 4,116 | feast-dev__feast-4116 | [
"4030"
] | f2b4eb94add8f86afa4e168236e8fcd11968510e | diff --git a/sdk/python/feast/feature_view_projection.py b/sdk/python/feast/feature_view_projection.py
--- a/sdk/python/feast/feature_view_projection.py
+++ b/sdk/python/feast/feature_view_projection.py
@@ -53,7 +53,7 @@ def to_proto(self) -> FeatureViewProjectionProto:
def from_proto(proto: FeatureViewProjectionProto):
feature_view_projection = FeatureViewProjection(
name=proto.feature_view_name,
- name_alias=proto.feature_view_name_alias,
+ name_alias=proto.feature_view_name_alias or None,
features=[],
join_key_map=dict(proto.join_key_map),
desired_features=[],
| Bug: UserWarning when passing `FeatureService` object to both `apply()` and `get_online_features()` - Caused by difference in `FeatureViewProjection.from_proto()` and `FeatureViewProjection.from_definition()`
## Context
If a `FeatureService` object is created and is being passed to both the `apply()` and the `get_online_features()` method, the following user warning is thrown:
```
UserWarning: The FeatureService object that has been passed in as an argument is inconsistent with the version from the registry. Potentially a newer version of the FeatureService has been applied to the registry.
```
This is caused by a bug in the creation/retrieval of `feature_view_projections`, which is an attribute of `FeatureService`. An empty string is set to the `name_alias` value of `FeatureViewProjection` when calling [`from_proto`](https://github.com/feast-dev/feast/blob/master/sdk/python/feast/feature_view_projection.py#L52-L64). However, when creating the `FeatureViewProjection` by creating a `FeatureService` object (using the default value) `name_alias` is set to `None`, because it is created with [`FeatureViewProjection.from_definition()`](https://github.com/feast-dev/feast/blob/master/sdk/python/feast/feature_view_projection.py#L66-L73).
The warning is raised [here](https://github.com/feast-dev/feast/blob/master/sdk/python/feast/feature_store.py#L532-L541), because the `feature_service_from_registry` has the empty string value for the underlying `FeatureViewProjection.name_alias` object, while the for `_features` the underlying `FeatureViewProjection.name_alias` is valued `None`.
## Expected Behavior
I would expect that if a `FeatureViewProjection` encapsulated within a `FeatureService` has a `None` value for the `name_alias` attribute when being stored in the feature store, that it would also load with the same `None` value when calling `FeatureViewProjection.from_proto()`.
Then the comparison should also not fail, and the warning should not be thrown.
## Current Behavior
The `FeatureViewProjection` is loaded with an empty string for the value `name_alias` when `FeatureViewProjection.from_proto()` is called.
This causes the. comparison to fail, and the warning to be thrown.
## Steps to reproduce
docker-compose.yml
```
---
version: "3"
services:
db:
restart: always
image: postgres:16-alpine
container_name: feast_db
ports:
- "5432:5432"
environment:
- POSTGRES_DB=feature_store
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=test
```
feature_store.yaml
```
project: project_name
provider: local
registry:
registry_type: sql
path: postgresql://postgres:[email protected]:5432/feature_store
```
Python script
```
from datetime import timedelta
import pandas as pd
from feast import (
Entity,
FeatureService,
FeatureStore,
FeatureView,
Field,
SnowflakeSource,
ValueType,
)
from feast.types import Float32
feature_store = FeatureStore()
# Apply
entity = Entity(
name="entity",
join_keys=["ID"],
value_type=ValueType.STRING,
)
source = SnowflakeSource(
name="source_snowflake",
timestamp_field="EVENT_TIMESTAMP",
schema="TEMP",
table="FEAST_FEATURES",
)
feature_view = FeatureView(
name="feature_view__v1",
entities=[entity],
ttl=timedelta(days=0),
schema=[
Field(name="FEATURE", dtype=Float32),
],
online=True,
source=source,
)
feature_service = FeatureService(
name="feature_service__v1",
features=[feature_view],
)
feature_store.apply(
[
entity,
source,
feature_view,
feature_service,
]
)
# Inference
entity_rows = [{"ID": "ID1"}, {"ID": "ID2"}]
entity_df = pd.DataFrame(entity_rows)
entity_df["event_timestamp"] = pd.to_datetime("now", utc=True)
online_features = feature_store.get_online_features(
entity_rows=entity_rows,
features=feature_service,
).to_dict()
print(online_features)
```
### Specifications
Using postgres registry.
- Version: 0.36.0
- Platform: MacOS - M1
- Subsystem: Sonoma 14.1.1
## Possible Solution
I have little to no experience with proto, so I'm not sure whether it would be possible to load the None value directly from the proto definition.
One potential solution would be to check whether an empty string is loaded for this field when loading from proto, and then set it to None in the `from_proto` function.
| Tracked down the PR from where the mysterious `or ""` originates #2321 Still no idea why that was added, though. Normally, round-trip to and from proto should not change the object (at least as far as `==` operator is concerned), so it does look like a bug.
Thanks for your reply.
I'll try to open a PR for this :)
I see there might be some changes required for the `proto` definition as well. I'm however not very familiar with this, and I'm unfortunately not able to look into this right now.. If someone else has the knowledge and time to pick this up soon that would be highly appreciated ๐
@job-almekinders my initial though was that simply removing `or ""` from [to_proto](https://github.com/feast-dev/feast/blob/master/sdk/python/feast/feature_view_projection.py#L44) method would be sufficient, but I may be wrong... I'll take a stab at it later if no one picks it up | 2024-04-18T22:42:50 |
|
feast-dev/feast | 4,117 | feast-dev__feast-4117 | [
"4110"
] | f2b4eb94add8f86afa4e168236e8fcd11968510e | diff --git a/sdk/python/feast/feature_server.py b/sdk/python/feast/feature_server.py
--- a/sdk/python/feast/feature_server.py
+++ b/sdk/python/feast/feature_server.py
@@ -10,7 +10,7 @@
from fastapi import FastAPI, HTTPException, Request, Response, status
from fastapi.logger import logger
from fastapi.params import Depends
-from google.protobuf.json_format import MessageToDict, Parse
+from google.protobuf.json_format import MessageToDict
from pydantic import BaseModel
import feast
@@ -18,7 +18,6 @@
from feast.constants import DEFAULT_FEATURE_SERVER_REGISTRY_TTL
from feast.data_source import PushMode
from feast.errors import PushSourceNotFoundException
-from feast.protos.feast.serving.ServingService_pb2 import GetOnlineFeaturesRequest
# TODO: deprecate this in favor of push features
@@ -83,34 +82,25 @@ def shutdown_event():
@app.post("/get-online-features")
def get_online_features(body=Depends(get_body)):
try:
- # Validate and parse the request data into GetOnlineFeaturesRequest Protobuf object
- request_proto = GetOnlineFeaturesRequest()
- Parse(body, request_proto)
-
+ body = json.loads(body)
# Initialize parameters for FeatureStore.get_online_features(...) call
- if request_proto.HasField("feature_service"):
+ if "feature_service" in body:
features = store.get_feature_service(
- request_proto.feature_service, allow_cache=True
+ body["feature_service"], allow_cache=True
)
else:
- features = list(request_proto.features.val)
-
- full_feature_names = request_proto.full_feature_names
+ features = body["features"]
- batch_sizes = [len(v.val) for v in request_proto.entities.values()]
- num_entities = batch_sizes[0]
- if any(batch_size != num_entities for batch_size in batch_sizes):
- raise HTTPException(status_code=500, detail="Uneven number of columns")
+ full_feature_names = body.get("full_feature_names", False)
response_proto = store._get_online_features(
features=features,
- entity_values=request_proto.entities,
+ entity_values=body["entities"],
full_feature_names=full_feature_names,
- native_entity_values=False,
).proto
# Convert the Protobuf object to JSON and return it
- return MessageToDict( # type: ignore
+ return MessageToDict(
response_proto, preserving_proto_field_name=True, float_precision=18
)
except Exception as e:
| feast serve convert number to INT64
## Expected Behavior
when we use the get-online-features endpoint the request body numbers converted to Int64 type and while from python file calling get_online_feature function convert number to Int32 which leads to inconsistency between both use
if the entity value type is Int32 then feature servering with online feast serve command will not work expected behavior is to convert the value to Int32 while it is being converted to Int64
## Current Behavior
when using feast serve number are converted to Int64 type
## Steps to reproduce
create a entity with a column with type Int32 create a feature view with the entity apply the changes using feast apply materialize the data to online store start feature server using feast serve and call endpoint /get-online-feature to retrive feature ... expected response is the feature retrived while it show none due to type issue
### Specifications
- Version: 0.36
- Platform:
- Subsystem:
## Possible Solution
before converting the type to Int64 get the repo and convert the type to the entity data type defined
other solution is just write in doc to use Int64 and Float64 type while defining entity
| @Akshay-deqode thanks for filing the issue, I was able to reproduce. | 2024-04-19T10:36:09 |
|
alibaba/FederatedScope | 103 | alibaba__FederatedScope-103 | [
"102"
] | e9b4b38a9e07dbaf9248b147fd7bfb469cb7bb7a | diff --git a/federatedscope/core/worker/server.py b/federatedscope/core/worker/server.py
--- a/federatedscope/core/worker/server.py
+++ b/federatedscope/core/worker/server.py
@@ -245,6 +245,9 @@ def check_and_move_on(self,
min_received_num = self._cfg.federate.sample_client_num
assert min_received_num <= self.sample_client_num
+ if check_eval_result:
+ min_received_num = len(list(self.comm_manager.neighbors.keys()))
+
move_on_flag = True # To record whether moving to a new training round or finishing the evaluation
if self.check_buffer(self.state, min_received_num, check_eval_result):
| Incorrect Evaluation
In each round, multiple evaluation results are reported in the logs, each of which seems to be the results on a fraction of clients.
| 2022-05-24T06:15:29 |
||
alibaba/FederatedScope | 398 | alibaba__FederatedScope-398 | [
"397"
] | 000c7ad02abe134eeee907a65be76052eea21468 | diff --git a/federatedscope/core/communication.py b/federatedscope/core/communication.py
--- a/federatedscope/core/communication.py
+++ b/federatedscope/core/communication.py
@@ -186,7 +186,8 @@ def _create_stub(receiver_address):
request = message.transform(to_list=True)
try:
stub.sendMessage(request)
- except grpc._channel._InactiveRpcError:
+ except grpc._channel._InactiveRpcError as error:
+ logger.warning(error)
pass
channel.close()
diff --git a/federatedscope/core/configs/cfg_fl_setting.py b/federatedscope/core/configs/cfg_fl_setting.py
--- a/federatedscope/core/configs/cfg_fl_setting.py
+++ b/federatedscope/core/configs/cfg_fl_setting.py
@@ -72,8 +72,8 @@ def extend_fl_setting_cfg(cfg):
# data_idx = -1 means that the whole dataset is owned by the participant.
# when data_idx is other invalid values excepted for -1, we randomly
# sample the data_idx for simulation
- cfg.distribute.grpc_max_send_message_length = 100 * 1024 * 1024
- cfg.distribute.grpc_max_receive_message_length = 100 * 1024 * 1024
+ cfg.distribute.grpc_max_send_message_length = 300 * 1024 * 1024 # 300M
+ cfg.distribute.grpc_max_receive_message_length = 300 * 1024 * 1024 # 300M
cfg.distribute.grpc_enable_http_proxy = False
cfg.distribute.grpc_compression = 'nocompression' # [deflate, gzip]
| The message transformation in distributed mode is inefficient
When the shared model is large (e.g., >200M), the transformation defined in https://github.com/alibaba/FederatedScope/blob/master/federatedscope/core/message.py#L205 needs lots of time.
We need to make it more efficient
| 2022-10-17T03:54:54 |
||
alibaba/FederatedScope | 470 | alibaba__FederatedScope-470 | [
"462"
] | cd9237c4217747405a8e326948602a2b83bf3e8c | diff --git a/federatedscope/core/workers/server.py b/federatedscope/core/workers/server.py
--- a/federatedscope/core/workers/server.py
+++ b/federatedscope/core/workers/server.py
@@ -655,11 +655,14 @@ def broadcast_model_para(self,
else:
model_para = {} if skip_broadcast else self.model.state_dict()
+ # We define the evaluation happens at the end of an epoch
+ rnd = self.state - 1 if msg_type == 'evaluate' else self.state
+
self.comm_manager.send(
Message(msg_type=msg_type,
sender=self.ID,
receiver=receiver,
- state=min(self.state, self.total_round_num),
+ state=min(rnd, self.total_round_num),
timestamp=self.cur_timestamp,
content=model_para))
if self._cfg.federate.online_aggr:
| Inappropriate output order
> 2022-12-06 21:22:48,250 (server:329) INFO: Server: Starting evaluation at the end of round 1.
2022-12-06 21:22:48,253 (server:335) INFO: ----------- Starting a new training round (Round #2) -------------
2022-12-06 21:22:49,032 (context:224) WARNING: cur_mode `test` mismatch mode `val`, will use `val` to calculate `ctx.var`.
2022-12-06 21:22:49,386 (context:224) WARNING: cur_mode `test` mismatch mode `val`, will use `val` to calculate `ctx.var`.
2022-12-06 21:22:50,482 (context:224) WARNING: cur_mode `test` mismatch mode `val`, will use `val` to calculate `ctx.var`.
2022-12-06 21:22:50,753 (context:224) WARNING: cur_mode `test` mismatch mode `val`, will use `val` to calculate `ctx.var`.
2022-12-06 21:22:51,852 (context:224) WARNING: cur_mode `test` mismatch mode `val`, will use `val` to calculate `ctx.var`.
2022-12-06 21:22:52,207 (context:224) WARNING: cur_mode `test` mismatch mode `val`, will use `val` to calculate `ctx.var`.
2022-12-06 21:22:53,362 (context:224) WARNING: cur_mode `test` mismatch mode `val`, will use `val` to calculate `ctx.var`.
2022-12-06 21:22:53,716 (context:224) WARNING: cur_mode `test` mismatch mode `val`, will use `val` to calculate `ctx.var`.
2022-12-06 21:22:56,703 (client:306) INFO: {'Role': 'Client #1', 'Round': 2, 'Results_raw': {'train_acc': 0.364414, 'train_total': 1169, 'train_loss': 2097.855597, 'train_avg_loss': 1.794573}}
2022-12-06 21:23:00,110 (client:306) INFO: {'Role': 'Client #3', 'Round': 2, 'Results_raw': {'train_acc': 0.412195, 'train_total': 1640, 'train_loss': 2594.247381, 'train_avg_loss': 1.581858}}
2022-12-06 21:23:03,130 (client:306) INFO: {'Role': 'Client #2', 'Round': 2, 'Results_raw': {'train_acc': 0.267969, 'train_total': 1433, 'train_loss': 3005.513627, 'train_avg_loss': 2.097358}}
2022-12-06 21:23:03,243 (server:335) INFO: ----------- Starting a new training round (Round #3) -------------
2022-12-06 21:23:06,647 (client:306) INFO: {'Role': 'Client #3', 'Round': 3, 'Results_raw': {'train_acc': 0.473171, 'train_total': 1640, 'train_loss': 2338.653491, 'train_avg_loss': 1.426008}}
2022-12-06 21:23:09,206 (client:306) INFO: {'Role': 'Client #1', 'Round': 3, 'Results_raw': {'train_acc': 0.443969, 'train_total': 1169, 'train_loss': 1788.425374, 'train_avg_loss': 1.529876}}
2022-12-06 21:23:12,214 (client:306) INFO: {'Role': 'Client #2', 'Round': 3, 'Results_raw': {'train_acc': 0.300768, 'train_total': 1433, 'train_loss': 2753.047348, 'train_avg_loss': 1.921177}}
/home/jones.wz/anaconda3/envs/fs/lib/python3.9/site-packages/numpy/core/fromnumeric.py:3440: RuntimeWarning: Mean of empty slice.
return _methods._mean(a, axis=axis, dtype=dtype,
/home/jones.wz/anaconda3/envs/fs/lib/python3.9/site-packages/numpy/core/_methods.py:189: RuntimeWarning: invalid value encountered in double_scalars
ret = ret.dtype.type(ret / rcount)
2022-12-06 21:23:12,220 (server:593) INFO: {'Role': 'Server #', 'Round': 2, 'Results_weighted_avg': {'test_acc': 0.22716, 'test_total': 405.0, 'test_loss': 821.534889, 'test_avg_loss': 1.996196, 'val_acc': 0.216529, 'val_total': 201.666667, 'val_loss': 411.385751, 'val_
avg_loss': 2.002012}, 'Results_avg': {'test_acc': 0.231187, 'test_total': 405.0, 'test_loss': 808.45956, 'test_avg_loss': 2.000993, 'val_acc': 0.224764, 'val_total': 201.666667, 'val_loss': 403.73907, 'val_avg_loss': 1.999528}, 'Results_fairness': {'test_total': 405.0,
'val_total': 201.666667, 'test_acc_std': 0.029981, 'test_acc_bottom_decile': 0.202128, 'test_acc_top_decile': 0.272455, 'test_acc_min': 0.202128, 'test_acc_max': 0.272455, 'test_acc_bottom10%': nan, 'test_acc_top10%': 0.272455, 'test_acc_cos1': 0.991696, 'test_acc_entro
py': 1.090368, 'test_loss_std': 99.240095, 'test_loss_bottom_decile': 669.849205, 'test_loss_top_decile':
It seems that the evaluation is conducted at the end of round1. However, its results are printed after the display of round3's training results. This is likely to make users confused.
| 2022-12-09T04:23:07 |
||
alibaba/FederatedScope | 496 | alibaba__FederatedScope-496 | [
"488"
] | bdcd3a51fd8d5db1a172d62888e473eb45e4781a | diff --git a/federatedscope/core/message.py b/federatedscope/core/message.py
--- a/federatedscope/core/message.py
+++ b/federatedscope/core/message.py
@@ -21,7 +21,7 @@ def __init__(self,
sender=0,
receiver=0,
state=0,
- content=None,
+ content='None',
timestamp=0,
strategy=None):
self._msg_type = msg_type
| Message asked for local pretraining is missing the "content" para when train a graph model in distributed mode?
If no "content" para, there will raise ValueError('The data type {} has not been supported.'.format(type(value))) in Message.create_by_type() function.
| Thanks a lot for your interest! This issue is caused by that NoneType is not supported by grpc proto, you can change None to a string (such as 'None') as a workaround. | 2023-01-16T03:05:24 |
|
alibaba/FederatedScope | 546 | alibaba__FederatedScope-546 | [
"543"
] | 883fc7b5da84792d648bed238e231cc655261ada | diff --git a/federatedscope/core/fed_runner.py b/federatedscope/core/fed_runner.py
--- a/federatedscope/core/fed_runner.py
+++ b/federatedscope/core/fed_runner.py
@@ -59,6 +59,7 @@ def __init__(self,
config.ready_for_run()
self.cfg = config
self.client_cfgs = client_configs
+ self.serial_num_for_msg = 0
self.mode = self.cfg.federate.mode.lower()
self.gpu_manager = GPUManager(gpu_available=self.cfg.use_gpu,
@@ -471,6 +472,8 @@ def _run_simulation(self):
# For the server, move the received message to a
# cache for reordering the messages according to
# the timestamps
+ msg.serial_num = self.serial_num_for_msg
+ self.serial_num_for_msg += 1
heapq.heappush(server_msg_cache, msg)
else:
self._handle_msg(msg)
diff --git a/federatedscope/core/message.py b/federatedscope/core/message.py
--- a/federatedscope/core/message.py
+++ b/federatedscope/core/message.py
@@ -23,7 +23,8 @@ def __init__(self,
state=0,
content='None',
timestamp=0,
- strategy=None):
+ strategy=None,
+ serial_num=0):
self._msg_type = msg_type
self._sender = sender
self._receiver = receiver
@@ -31,6 +32,7 @@ def __init__(self,
self._content = content
self._timestamp = timestamp
self._strategy = strategy
+ self.serial_num = serial_num
@property
def msg_type(self):
@@ -93,8 +95,10 @@ def strategy(self, value):
def __lt__(self, other):
if self.timestamp != other.timestamp:
return self.timestamp < other.timestamp
- else:
+ elif self.state != other.state:
return self.state < other.state
+ else:
+ return self.serial_num < other.serial_num
def transform_to_list(self, x):
if isinstance(x, list) or isinstance(x, tuple):
| A bug when performing asynchronous FL
**Describe the bug**
When performing asynchronous FL, if we set client_num = 50 and sample_client_num > 47 (e.g., 48), the training procedure will remain stagnant in round #0 and never get into round #1.
**To Reproduce**
Steps to reproduce the behavior:
1. Specify the FL settings as in FederatedScope/scripts/example_configs/asyn_cifar10.yaml.
2. Set "cfg.federate.client_num = 50" and "cfg.federate.sample_client_num = 48"
3. Run with command
```python federatedscope/main.py --cfg scripts/example_configs/asyn_cifar10.yaml```
5. The training logs are as follows.
**Screenshots**
<img width="918" alt="image" src="https://user-images.githubusercontent.com/45845531/225267160-8b125511-e381-4c4e-a3c0-5792db773cbd.png">
| 2023-03-16T11:57:28 |
||
alibaba/FederatedScope | 547 | alibaba__FederatedScope-547 | [
"502"
] | c6a7de482047a4c84224fbc2744aed30811f4213 | diff --git a/federatedscope/core/parallel/parallel_runner.py b/federatedscope/core/parallel/parallel_runner.py
--- a/federatedscope/core/parallel/parallel_runner.py
+++ b/federatedscope/core/parallel/parallel_runner.py
@@ -195,6 +195,7 @@ def __init__(self, rank, config, server_class, receive_channel,
self.server_id = 0
self.resource_info = resource_info
self.client_resource_info = client_resource_info
+ self.serial_num_for_msg = 0
def setup(self):
self.config.defrost()
@@ -248,6 +249,8 @@ def run(self):
# For the server, move the received message to a
# cache for reordering the messages according to
# the timestamps
+ msg.serial_num = self.serial_num_for_msg
+ self.serial_num_for_msg += 1
heapq.heappush(server_msg_cache, msg)
elif len(server_msg_cache) > 0:
msg = heapq.heappop(server_msg_cache)
diff --git a/federatedscope/core/workers/server.py b/federatedscope/core/workers/server.py
--- a/federatedscope/core/workers/server.py
+++ b/federatedscope/core/workers/server.py
@@ -132,10 +132,10 @@ def __init__(self,
if self._cfg.federate.make_global_eval:
# set up a trainer for conducting evaluation in server
- assert self.model is not None
+ assert self.models is not None
assert self.data is not None
self.trainer = get_trainer(
- model=self.model,
+ model=self.models[0],
data=self.data,
device=self.device,
config=self._cfg,
@@ -456,7 +456,7 @@ def _perform_federated_aggregation(self):
staleness.append((client_id, self.state - state))
# Trigger the monitor here (for training)
- self._monitor.calc_model_metric(self.model.state_dict(),
+ self._monitor.calc_model_metric(self.models[0].state_dict(),
msg_list,
rnd=self.state)
@@ -664,7 +664,7 @@ def broadcast_model_para(self,
model_para = [{} if skip_broadcast else model.state_dict()
for model in self.models]
else:
- model_para = {} if skip_broadcast else self.model.state_dict()
+ model_para = {} if skip_broadcast else self.models[0].state_dict()
# We define the evaluation happens at the end of an epoch
rnd = self.state - 1 if msg_type == 'evaluate' else self.state
@@ -781,7 +781,7 @@ def trigger_for_start(self):
else:
if self._cfg.backend == 'torch':
model_size = sys.getsizeof(pickle.dumps(
- self.model)) / 1024.0 * 8.
+ self.models[0])) / 1024.0 * 8.
else:
# TODO: calculate model size for TF Model
model_size = 1.0
@@ -851,7 +851,7 @@ def terminate(self, msg_type='finish'):
if self.model_num > 1:
model_para = [model.state_dict() for model in self.models]
else:
- model_para = self.model.state_dict()
+ model_para = self.models[0].state_dict()
self._monitor.finish_fl()
diff --git a/federatedscope/tabular/dataloader/toy.py b/federatedscope/tabular/dataloader/toy.py
--- a/federatedscope/tabular/dataloader/toy.py
+++ b/federatedscope/tabular/dataloader/toy.py
@@ -1,3 +1,4 @@
+import copy
import pickle
import numpy as np
@@ -58,7 +59,7 @@ def _generate_data(client_num=5,
test_y = np.expand_dims(test_y, -1)
test_data = {'x': test_x, 'y': test_y}
for each_client in range(1, client_num + 1):
- data[each_client]['test'] = test_data
+ data[each_client]['test'] = copy.deepcopy(test_data)
# val data
val_x = np.random.normal(loc=0.0,
@@ -68,7 +69,7 @@ def _generate_data(client_num=5,
val_y = np.expand_dims(val_y, -1)
val_data = {'x': val_x, 'y': val_y}
for each_client in range(1, client_num + 1):
- data[each_client]['val'] = val_data
+ data[each_client]['val'] = copy.deepcopy(val_data)
# server_data
data[0] = dict()
| diff --git a/.github/workflows/test_atc.yml b/.github/workflows/test_atc.yml
--- a/.github/workflows/test_atc.yml
+++ b/.github/workflows/test_atc.yml
@@ -7,7 +7,7 @@ on:
jobs:
run:
- if: false == contains(github.event.pull_request.title, 'WIP')
+ if: (false == contains(github.event.pull_request.title, 'WIP') && github.repository == 'alibaba/FederatedScope')
runs-on: ${{ matrix.os }}
timeout-minutes: 30
strategy:
| Inappropriate way of assigning values to Client data in toy and vfl synthetic dataset
When applying feature transformation in `ClientData`, we must use `deepcopy`. Otherwise, the data in server and client will be inconsistent (in server, x = f(x), but in client, x=f(f(x))).
And I've fixed this in https://github.com/alibaba/FederatedScope/pull/486/commits/ae896cd5740dc0b34f03bc637a724edf3f3be77e, please help me to double-check if there are the same issues on other datasets, thanks! @xieyxclack
| There is a minor bug in `vfl_synthetic_data`, where the `config.vertical.dims` is inconsistent with those in `xgb` module.
https://github.com/alibaba/FederatedScope/blob/f4872600f01ea9b4dba9fb4c95f7145f4dc419eb/federatedscope/vertical_fl/dataloader/dataloader.py#L50
I've fix it in [`74bceeb`](https://github.com/alibaba/FederatedScope/pull/486/commits/74bceeb62433cd8591022ea0945a4a7434ad9f44):
```python
total_dims = config.vertical.dims[-1]
```
Thank you very much for pointing out and fixing this issue! | 2023-03-17T09:08:47 |
alibaba/FederatedScope | 637 | alibaba__FederatedScope-637 | [
"636"
] | 05a2c4a72489d56d51aa056b9b2a67626e09de3e | diff --git a/federatedscope/core/monitors/monitor.py b/federatedscope/core/monitors/monitor.py
--- a/federatedscope/core/monitors/monitor.py
+++ b/federatedscope/core/monitors/monitor.py
@@ -737,6 +737,7 @@ def update_best_result(self, best_results, new_results, results_type):
logger.error(
"cfg.wandb.use=True but not install the wandb package")
exit()
+ return update_best_this_round
def add_items_to_best_result(self, best_results, new_results,
results_type):
diff --git a/federatedscope/core/workers/server.py b/federatedscope/core/workers/server.py
--- a/federatedscope/core/workers/server.py
+++ b/federatedscope/core/workers/server.py
@@ -522,7 +522,8 @@ def save_best_results(self):
"""
if self._cfg.federate.save_to != '':
- self.aggregator.save_model(self._cfg.federate.save_to, self.state)
+ self.aggregator.save_model(f'final_{self._cfg.federate.save_to}',
+ self.state)
formatted_best_res = self._monitor.format_eval_res(
results=self.best_results,
rnd="Final",
@@ -600,11 +601,18 @@ def merge_eval_results_from_all_clients(self):
del formatted_logs[key]
logger.info(formatted_logs)
formatted_logs_all_set.update(formatted_logs)
- self._monitor.update_best_result(
+ update_best_this_round = self._monitor.update_best_result(
self.best_results,
metrics_all_clients,
results_type="unseen_client_best_individual"
if merge_type == "unseen" else "client_best_individual")
+ if update_best_this_round:
+ # When the frequency of evaluations is high,
+ # the frequency of writing to disk in the early stages
+ # may also be high
+ if self._cfg.federate.save_to != '':
+ self.aggregator.save_model(self._cfg.federate.save_to,
+ self.state)
self._monitor.save_formatted_results(formatted_logs)
for form in self._cfg.eval.report:
if form != "raw":
| The saved model is not the best model from validation set
The model is saved in the final round with latest state dict.
| 2023-06-15T11:38:07 |
||
ciudadanointeligente/votainteligente-portal-electoral | 106 | ciudadanointeligente__votainteligente-portal-electoral-106 | [
"105"
] | cb7d3d4b5bbc84b0d6cc0aa87da3711617e4c28f | diff --git a/elections/migrations/0007_auto__add_field_election_uses_preguntales__add_field_election_uses_ran.py b/elections/migrations/0007_auto__add_field_election_uses_preguntales__add_field_election_uses_ran.py
new file mode 100644
--- /dev/null
+++ b/elections/migrations/0007_auto__add_field_election_uses_preguntales__add_field_election_uses_ran.py
@@ -0,0 +1,210 @@
+# -*- coding: utf-8 -*-
+import datetime
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+
+class Migration(SchemaMigration):
+
+ def forwards(self, orm):
+ # Adding field 'Election.uses_preguntales'
+ db.add_column(u'elections_election', 'uses_preguntales',
+ self.gf('django.db.models.fields.BooleanField')(default=True),
+ keep_default=False)
+
+ # Adding field 'Election.uses_ranking'
+ db.add_column(u'elections_election', 'uses_ranking',
+ self.gf('django.db.models.fields.BooleanField')(default=True),
+ keep_default=False)
+
+ # Adding field 'Election.uses_face_to_face'
+ db.add_column(u'elections_election', 'uses_face_to_face',
+ self.gf('django.db.models.fields.BooleanField')(default=True),
+ keep_default=False)
+
+ # Adding field 'Election.uses_soul_mate'
+ db.add_column(u'elections_election', 'uses_soul_mate',
+ self.gf('django.db.models.fields.BooleanField')(default=True),
+ keep_default=False)
+
+ # Adding field 'Election.uses_questionary'
+ db.add_column(u'elections_election', 'uses_questionary',
+ self.gf('django.db.models.fields.BooleanField')(default=True),
+ keep_default=False)
+
+
+ def backwards(self, orm):
+ # Deleting field 'Election.uses_preguntales'
+ db.delete_column(u'elections_election', 'uses_preguntales')
+
+ # Deleting field 'Election.uses_ranking'
+ db.delete_column(u'elections_election', 'uses_ranking')
+
+ # Deleting field 'Election.uses_face_to_face'
+ db.delete_column(u'elections_election', 'uses_face_to_face')
+
+ # Deleting field 'Election.uses_soul_mate'
+ db.delete_column(u'elections_election', 'uses_soul_mate')
+
+ # Deleting field 'Election.uses_questionary'
+ db.delete_column(u'elections_election', 'uses_questionary')
+
+
+ models = {
+ u'candideitorg.answer': {
+ 'Meta': {'object_name': 'Answer'},
+ 'caption': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'question': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['candideitorg.Question']"}),
+ 'remote_id': ('django.db.models.fields.IntegerField', [], {}),
+ 'resource_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'})
+ },
+ u'candideitorg.candidate': {
+ 'Meta': {'object_name': 'Candidate'},
+ 'answers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['candideitorg.Answer']", 'null': 'True', 'blank': 'True'}),
+ 'election': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['candideitorg.Election']"}),
+ 'has_answered': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'photo': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
+ 'remote_id': ('django.db.models.fields.IntegerField', [], {}),
+ 'resource_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
+ },
+ u'candideitorg.category': {
+ 'Meta': {'object_name': 'Category'},
+ 'election': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['candideitorg.Election']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'order': ('django.db.models.fields.IntegerField', [], {}),
+ 'remote_id': ('django.db.models.fields.IntegerField', [], {}),
+ 'resource_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
+ },
+ u'candideitorg.election': {
+ 'Meta': {'object_name': 'Election'},
+ 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'information_source': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+ 'logo': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
+ 'remote_id': ('django.db.models.fields.IntegerField', [], {}),
+ 'resource_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
+ 'use_default_media_naranja_option': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
+ },
+ u'candideitorg.question': {
+ 'Meta': {'object_name': 'Question'},
+ 'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['candideitorg.Category']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'question': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'remote_id': ('django.db.models.fields.IntegerField', [], {}),
+ 'resource_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'})
+ },
+ u'contenttypes.contenttype': {
+ 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
+ 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
+ },
+ u'elections.candidateperson': {
+ 'Meta': {'object_name': 'CandidatePerson'},
+ 'candidate': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'relation'", 'unique': 'True', 'to': u"orm['candideitorg.Candidate']"}),
+ 'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'person': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'relation'", 'unique': 'True', 'to': u"orm['popit.Person']"}),
+ 'reachable': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
+ },
+ u'elections.election': {
+ 'Meta': {'object_name': 'Election'},
+ 'can_election': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['candideitorg.Election']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
+ 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'extra_info_content': ('django.db.models.fields.TextField', [], {'max_length': '3000', 'null': 'True', 'blank': 'True'}),
+ 'extra_info_title': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
+ 'highlighted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'popit_api_instance': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['popit.ApiInstance']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
+ 'searchable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': "'name'", 'unique_with': '()'}),
+ 'uses_face_to_face': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'uses_preguntales': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'uses_questionary': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'uses_ranking': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'uses_soul_mate': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'writeitinstance': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['writeit.WriteItInstance']", 'unique': 'True', 'null': 'True', 'blank': 'True'})
+ },
+ u'elections.votainteligenteanswer': {
+ 'Meta': {'object_name': 'VotaInteligenteAnswer'},
+ 'content': ('django.db.models.fields.TextField', [], {}),
+ 'created': ('django.db.models.fields.DateTimeField', [], {}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'message': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': u"orm['elections.VotaInteligenteMessage']"}),
+ 'person': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': u"orm['popit.Person']"})
+ },
+ u'elections.votainteligentemessage': {
+ 'Meta': {'object_name': 'VotaInteligenteMessage', '_ormbases': [u'writeit.Message']},
+ 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
+ u'message_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['writeit.Message']", 'unique': 'True', 'primary_key': 'True'}),
+ 'moderated': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
+ },
+ u'popit.apiinstance': {
+ 'Meta': {'object_name': 'ApiInstance'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'url': ('popit.fields.ApiInstanceURLField', [], {'unique': 'True', 'max_length': '200'})
+ },
+ u'popit.person': {
+ 'Meta': {'object_name': 'Person'},
+ 'api_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['popit.ApiInstance']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'image': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
+ 'popit_url': ('popit.fields.PopItURLField', [], {'default': "''", 'max_length': '200', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
+ 'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'})
+ },
+ u'taggit.tag': {
+ 'Meta': {'object_name': 'Tag'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
+ 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
+ },
+ u'taggit.taggeditem': {
+ 'Meta': {'object_name': 'TaggedItem'},
+ 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_tagged_items'", 'to': u"orm['contenttypes.ContentType']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
+ 'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_items'", 'to': u"orm['taggit.Tag']"})
+ },
+ u'writeit.message': {
+ 'Meta': {'object_name': 'Message', '_ormbases': [u'writeit.WriteItDocument']},
+ 'author_email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
+ 'author_name': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
+ 'content': ('django.db.models.fields.TextField', [], {}),
+ 'people': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'messages'", 'symmetrical': 'False', 'to': u"orm['popit.Person']"}),
+ 'slug': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
+ 'subject': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
+ u'writeitdocument_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['writeit.WriteItDocument']", 'unique': 'True', 'primary_key': 'True'}),
+ 'writeitinstance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['writeit.WriteItInstance']"})
+ },
+ u'writeit.writeitapiinstance': {
+ 'Meta': {'object_name': 'WriteItApiInstance'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200'})
+ },
+ u'writeit.writeitdocument': {
+ 'Meta': {'object_name': 'WriteItDocument'},
+ 'api_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['writeit.WriteItApiInstance']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'remote_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'url': ('django.db.models.fields.CharField', [], {'max_length': '256'})
+ },
+ u'writeit.writeitinstance': {
+ 'Meta': {'object_name': 'WriteItInstance', '_ormbases': [u'writeit.WriteItDocument']},
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ u'writeitdocument_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['writeit.WriteItDocument']", 'unique': 'True', 'primary_key': 'True'})
+ }
+ }
+
+ complete_apps = ['elections']
\ No newline at end of file
diff --git a/elections/models.py b/elections/models.py
--- a/elections/models.py
+++ b/elections/models.py
@@ -30,6 +30,12 @@ class Election(models.Model):
extra_info_title = models.CharField(max_length = 50, blank = True, null = True)
extra_info_content = models.TextField(max_length = 3000, blank = True, null = True, help_text=_("Puedes usar Markdown. <br/> ")
+ markdown_allowed())
+ uses_preguntales = models.BooleanField(default=True, help_text=_(u"Esta elecciรณn debe usar preguntales?"))
+ uses_ranking = models.BooleanField(default=True, help_text=_(u"Esta elecciรณn debe usar ranking"))
+ uses_face_to_face = models.BooleanField(default=True, help_text=_(u"Esta elecciรณn debe usar frente a frente"))
+ uses_soul_mate = models.BooleanField(default=True, help_text=_(u"Esta elecciรณn debe usar 1/2 naranja"))
+ uses_questionary = models.BooleanField(default=True, help_text=_(u"Esta elecciรณn debe usar cuestionario"))
+
def __unicode__(self):
| diff --git a/elections/tests/candideitorg_popit_tests.py b/elections/tests/candideitorg_popit_tests.py
--- a/elections/tests/candideitorg_popit_tests.py
+++ b/elections/tests/candideitorg_popit_tests.py
@@ -108,9 +108,6 @@ def test_no_tweet_if_candidate_has_no_twitter(self):
expected_twitter_button = ""
actual_twitter_button_template = Template("{% load votainteligente_extras %}{% no_ha_respondido_twitter_button %}")
actual_twitter_button = actual_twitter_button_template.render(Context({"candidate":self.candidato1}))
- print "actual_twitter_button"
- print actual_twitter_button
- print "/actual_twitter_button"
self.assertEquals(actual_twitter_button, expected_twitter_button)
def test_unicode(self):
diff --git a/elections/tests/election_tests.py b/elections/tests/election_tests.py
--- a/elections/tests/election_tests.py
+++ b/elections/tests/election_tests.py
@@ -34,6 +34,12 @@ def test_election_create(self):
self.assertEqual(election.can_election, self.can_election)
self.assertTrue(election.searchable)
self.assertFalse(election.highlighted)
+ self.assertTrue(election.uses_preguntales)
+ self.assertTrue(election.uses_ranking)
+ self.assertTrue(election.uses_face_to_face)
+ self.assertTrue(election.uses_soul_mate)
+ self.assertTrue(election.uses_questionary)
+
def test_it_can_have_a_popit_instance(self):
election = Election.objects.create(
| allow disabling features per election
in some cases the admin of the site might need to disable the ask or soulmate feature in some election, it should be possible through the admin page
| 2013-10-29T18:30:48 |
|
ciudadanointeligente/votainteligente-portal-electoral | 107 | ciudadanointeligente__votainteligente-portal-electoral-107 | [
"105"
] | 92ffdc4c2b613a253d5f694be41411a9ee03b3d0 | diff --git a/elections/migrations/0007_auto__add_field_election_uses_preguntales__add_field_election_uses_ran.py b/elections/migrations/0007_auto__add_field_election_uses_preguntales__add_field_election_uses_ran.py
new file mode 100644
--- /dev/null
+++ b/elections/migrations/0007_auto__add_field_election_uses_preguntales__add_field_election_uses_ran.py
@@ -0,0 +1,210 @@
+# -*- coding: utf-8 -*-
+import datetime
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+
+class Migration(SchemaMigration):
+
+ def forwards(self, orm):
+ # Adding field 'Election.uses_preguntales'
+ db.add_column(u'elections_election', 'uses_preguntales',
+ self.gf('django.db.models.fields.BooleanField')(default=True),
+ keep_default=False)
+
+ # Adding field 'Election.uses_ranking'
+ db.add_column(u'elections_election', 'uses_ranking',
+ self.gf('django.db.models.fields.BooleanField')(default=True),
+ keep_default=False)
+
+ # Adding field 'Election.uses_face_to_face'
+ db.add_column(u'elections_election', 'uses_face_to_face',
+ self.gf('django.db.models.fields.BooleanField')(default=True),
+ keep_default=False)
+
+ # Adding field 'Election.uses_soul_mate'
+ db.add_column(u'elections_election', 'uses_soul_mate',
+ self.gf('django.db.models.fields.BooleanField')(default=True),
+ keep_default=False)
+
+ # Adding field 'Election.uses_questionary'
+ db.add_column(u'elections_election', 'uses_questionary',
+ self.gf('django.db.models.fields.BooleanField')(default=True),
+ keep_default=False)
+
+
+ def backwards(self, orm):
+ # Deleting field 'Election.uses_preguntales'
+ db.delete_column(u'elections_election', 'uses_preguntales')
+
+ # Deleting field 'Election.uses_ranking'
+ db.delete_column(u'elections_election', 'uses_ranking')
+
+ # Deleting field 'Election.uses_face_to_face'
+ db.delete_column(u'elections_election', 'uses_face_to_face')
+
+ # Deleting field 'Election.uses_soul_mate'
+ db.delete_column(u'elections_election', 'uses_soul_mate')
+
+ # Deleting field 'Election.uses_questionary'
+ db.delete_column(u'elections_election', 'uses_questionary')
+
+
+ models = {
+ u'candideitorg.answer': {
+ 'Meta': {'object_name': 'Answer'},
+ 'caption': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'question': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['candideitorg.Question']"}),
+ 'remote_id': ('django.db.models.fields.IntegerField', [], {}),
+ 'resource_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'})
+ },
+ u'candideitorg.candidate': {
+ 'Meta': {'object_name': 'Candidate'},
+ 'answers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['candideitorg.Answer']", 'null': 'True', 'blank': 'True'}),
+ 'election': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['candideitorg.Election']"}),
+ 'has_answered': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'photo': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
+ 'remote_id': ('django.db.models.fields.IntegerField', [], {}),
+ 'resource_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
+ },
+ u'candideitorg.category': {
+ 'Meta': {'object_name': 'Category'},
+ 'election': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['candideitorg.Election']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'order': ('django.db.models.fields.IntegerField', [], {}),
+ 'remote_id': ('django.db.models.fields.IntegerField', [], {}),
+ 'resource_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
+ },
+ u'candideitorg.election': {
+ 'Meta': {'object_name': 'Election'},
+ 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'information_source': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+ 'logo': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
+ 'remote_id': ('django.db.models.fields.IntegerField', [], {}),
+ 'resource_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
+ 'use_default_media_naranja_option': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
+ },
+ u'candideitorg.question': {
+ 'Meta': {'object_name': 'Question'},
+ 'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['candideitorg.Category']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'question': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'remote_id': ('django.db.models.fields.IntegerField', [], {}),
+ 'resource_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'})
+ },
+ u'contenttypes.contenttype': {
+ 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
+ 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
+ },
+ u'elections.candidateperson': {
+ 'Meta': {'object_name': 'CandidatePerson'},
+ 'candidate': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'relation'", 'unique': 'True', 'to': u"orm['candideitorg.Candidate']"}),
+ 'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'person': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'relation'", 'unique': 'True', 'to': u"orm['popit.Person']"}),
+ 'reachable': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
+ },
+ u'elections.election': {
+ 'Meta': {'object_name': 'Election'},
+ 'can_election': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['candideitorg.Election']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
+ 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'extra_info_content': ('django.db.models.fields.TextField', [], {'max_length': '3000', 'null': 'True', 'blank': 'True'}),
+ 'extra_info_title': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
+ 'highlighted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'popit_api_instance': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['popit.ApiInstance']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
+ 'searchable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': "'name'", 'unique_with': '()'}),
+ 'uses_face_to_face': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'uses_preguntales': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'uses_questionary': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'uses_ranking': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'uses_soul_mate': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'writeitinstance': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['writeit.WriteItInstance']", 'unique': 'True', 'null': 'True', 'blank': 'True'})
+ },
+ u'elections.votainteligenteanswer': {
+ 'Meta': {'object_name': 'VotaInteligenteAnswer'},
+ 'content': ('django.db.models.fields.TextField', [], {}),
+ 'created': ('django.db.models.fields.DateTimeField', [], {}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'message': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': u"orm['elections.VotaInteligenteMessage']"}),
+ 'person': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': u"orm['popit.Person']"})
+ },
+ u'elections.votainteligentemessage': {
+ 'Meta': {'object_name': 'VotaInteligenteMessage', '_ormbases': [u'writeit.Message']},
+ 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
+ u'message_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['writeit.Message']", 'unique': 'True', 'primary_key': 'True'}),
+ 'moderated': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
+ },
+ u'popit.apiinstance': {
+ 'Meta': {'object_name': 'ApiInstance'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'url': ('popit.fields.ApiInstanceURLField', [], {'unique': 'True', 'max_length': '200'})
+ },
+ u'popit.person': {
+ 'Meta': {'object_name': 'Person'},
+ 'api_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['popit.ApiInstance']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'image': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
+ 'popit_url': ('popit.fields.PopItURLField', [], {'default': "''", 'max_length': '200', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
+ 'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'})
+ },
+ u'taggit.tag': {
+ 'Meta': {'object_name': 'Tag'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
+ 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
+ },
+ u'taggit.taggeditem': {
+ 'Meta': {'object_name': 'TaggedItem'},
+ 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_tagged_items'", 'to': u"orm['contenttypes.ContentType']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
+ 'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_items'", 'to': u"orm['taggit.Tag']"})
+ },
+ u'writeit.message': {
+ 'Meta': {'object_name': 'Message', '_ormbases': [u'writeit.WriteItDocument']},
+ 'author_email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
+ 'author_name': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
+ 'content': ('django.db.models.fields.TextField', [], {}),
+ 'people': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'messages'", 'symmetrical': 'False', 'to': u"orm['popit.Person']"}),
+ 'slug': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
+ 'subject': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
+ u'writeitdocument_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['writeit.WriteItDocument']", 'unique': 'True', 'primary_key': 'True'}),
+ 'writeitinstance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['writeit.WriteItInstance']"})
+ },
+ u'writeit.writeitapiinstance': {
+ 'Meta': {'object_name': 'WriteItApiInstance'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200'})
+ },
+ u'writeit.writeitdocument': {
+ 'Meta': {'object_name': 'WriteItDocument'},
+ 'api_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['writeit.WriteItApiInstance']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'remote_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'url': ('django.db.models.fields.CharField', [], {'max_length': '256'})
+ },
+ u'writeit.writeitinstance': {
+ 'Meta': {'object_name': 'WriteItInstance', '_ormbases': [u'writeit.WriteItDocument']},
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ u'writeitdocument_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['writeit.WriteItDocument']", 'unique': 'True', 'primary_key': 'True'})
+ }
+ }
+
+ complete_apps = ['elections']
\ No newline at end of file
diff --git a/elections/models.py b/elections/models.py
--- a/elections/models.py
+++ b/elections/models.py
@@ -30,6 +30,12 @@ class Election(models.Model):
extra_info_title = models.CharField(max_length = 50, blank = True, null = True)
extra_info_content = models.TextField(max_length = 3000, blank = True, null = True, help_text=_("Puedes usar Markdown. <br/> ")
+ markdown_allowed())
+ uses_preguntales = models.BooleanField(default=True, help_text=_(u"Esta elecciรณn debe usar preguntales?"))
+ uses_ranking = models.BooleanField(default=True, help_text=_(u"Esta elecciรณn debe usar ranking"))
+ uses_face_to_face = models.BooleanField(default=True, help_text=_(u"Esta elecciรณn debe usar frente a frente"))
+ uses_soul_mate = models.BooleanField(default=True, help_text=_(u"Esta elecciรณn debe usar 1/2 naranja"))
+ uses_questionary = models.BooleanField(default=True, help_text=_(u"Esta elecciรณn debe usar cuestionario"))
+
def __unicode__(self):
| diff --git a/elections/tests/candideitorg_popit_tests.py b/elections/tests/candideitorg_popit_tests.py
--- a/elections/tests/candideitorg_popit_tests.py
+++ b/elections/tests/candideitorg_popit_tests.py
@@ -108,9 +108,6 @@ def test_no_tweet_if_candidate_has_no_twitter(self):
expected_twitter_button = ""
actual_twitter_button_template = Template("{% load votainteligente_extras %}{% no_ha_respondido_twitter_button %}")
actual_twitter_button = actual_twitter_button_template.render(Context({"candidate":self.candidato1}))
- print "actual_twitter_button"
- print actual_twitter_button
- print "/actual_twitter_button"
self.assertEquals(actual_twitter_button, expected_twitter_button)
def test_unicode(self):
diff --git a/elections/tests/election_tests.py b/elections/tests/election_tests.py
--- a/elections/tests/election_tests.py
+++ b/elections/tests/election_tests.py
@@ -34,6 +34,12 @@ def test_election_create(self):
self.assertEqual(election.can_election, self.can_election)
self.assertTrue(election.searchable)
self.assertFalse(election.highlighted)
+ self.assertTrue(election.uses_preguntales)
+ self.assertTrue(election.uses_ranking)
+ self.assertTrue(election.uses_face_to_face)
+ self.assertTrue(election.uses_soul_mate)
+ self.assertTrue(election.uses_questionary)
+
def test_it_can_have_a_popit_instance(self):
election = Election.objects.create(
| allow disabling features per election
in some cases the admin of the site might need to disable the ask or soulmate feature in some election, it should be possible through the admin page
| 2013-10-29T18:31:16 |
|
ciudadanointeligente/votainteligente-portal-electoral | 170 | ciudadanointeligente__votainteligente-portal-electoral-170 | [
"161"
] | e6e875bfd6f8a1e9e5429dd7f3005613c65b1dc8 | diff --git a/elections/admin.py b/elections/admin.py
--- a/elections/admin.py
+++ b/elections/admin.py
@@ -39,7 +39,7 @@ class AnswerInline(admin.TabularInline):
class CandidatePersonExtraInfoAdmin(admin.ModelAdmin):
readonly_fields = ('person',)
- fields = ('reachable','description', 'portrait_photo')
+ fields = ('reachable','description', 'portrait_photo', 'custom_ribbon')
search_fields = ['person__name', 'person__api_instance__election__name']
admin.site.register(CandidatePerson, CandidatePersonExtraInfoAdmin)
diff --git a/elections/migrations/0008_auto__add_field_candidateperson_custom_ribbon__chg_field_candidatepers.py b/elections/migrations/0008_auto__add_field_candidateperson_custom_ribbon__chg_field_candidatepers.py
new file mode 100644
--- /dev/null
+++ b/elections/migrations/0008_auto__add_field_candidateperson_custom_ribbon__chg_field_candidatepers.py
@@ -0,0 +1,186 @@
+# -*- coding: utf-8 -*-
+import datetime
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+
+class Migration(SchemaMigration):
+
+ def forwards(self, orm):
+ # Adding field 'CandidatePerson.custom_ribbon'
+ db.add_column(u'elections_candidateperson', 'custom_ribbon',
+ self.gf('django.db.models.fields.CharField')(max_length=15, null=True, blank=True),
+ keep_default=False)
+
+
+ # Changing field 'CandidatePerson.portrait_photo'
+ db.alter_column(u'elections_candidateperson', 'portrait_photo', self.gf('django.db.models.fields.CharField')(max_length=256, null=True))
+
+ def backwards(self, orm):
+ # Deleting field 'CandidatePerson.custom_ribbon'
+ db.delete_column(u'elections_candidateperson', 'custom_ribbon')
+
+
+ # Changing field 'CandidatePerson.portrait_photo'
+ db.alter_column(u'elections_candidateperson', 'portrait_photo', self.gf('django.db.models.fields.CharField')(default='', max_length=256))
+
+ models = {
+ u'candideitorg.answer': {
+ 'Meta': {'object_name': 'Answer'},
+ 'caption': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'question': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['candideitorg.Question']"}),
+ 'remote_id': ('django.db.models.fields.IntegerField', [], {}),
+ 'resource_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'})
+ },
+ u'candideitorg.candidate': {
+ 'Meta': {'object_name': 'Candidate'},
+ 'answers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['candideitorg.Answer']", 'null': 'True', 'blank': 'True'}),
+ 'election': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['candideitorg.Election']"}),
+ 'has_answered': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'photo': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
+ 'remote_id': ('django.db.models.fields.IntegerField', [], {}),
+ 'resource_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
+ },
+ u'candideitorg.category': {
+ 'Meta': {'object_name': 'Category'},
+ 'election': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['candideitorg.Election']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'order': ('django.db.models.fields.IntegerField', [], {}),
+ 'remote_id': ('django.db.models.fields.IntegerField', [], {}),
+ 'resource_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
+ },
+ u'candideitorg.election': {
+ 'Meta': {'object_name': 'Election'},
+ 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'information_source': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+ 'logo': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
+ 'remote_id': ('django.db.models.fields.IntegerField', [], {}),
+ 'resource_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
+ 'use_default_media_naranja_option': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
+ },
+ u'candideitorg.question': {
+ 'Meta': {'object_name': 'Question'},
+ 'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['candideitorg.Category']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'question': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'remote_id': ('django.db.models.fields.IntegerField', [], {}),
+ 'resource_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'})
+ },
+ u'contenttypes.contenttype': {
+ 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
+ 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
+ },
+ u'elections.candidateperson': {
+ 'Meta': {'object_name': 'CandidatePerson'},
+ 'candidate': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'relation'", 'unique': 'True', 'to': u"orm['candideitorg.Candidate']"}),
+ 'custom_ribbon': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
+ 'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'person': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'relation'", 'unique': 'True', 'to': u"orm['popit.Person']"}),
+ 'portrait_photo': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
+ 'reachable': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
+ },
+ u'elections.election': {
+ 'Meta': {'object_name': 'Election'},
+ 'can_election': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['candideitorg.Election']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
+ 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'extra_info_content': ('django.db.models.fields.TextField', [], {'max_length': '3000', 'null': 'True', 'blank': 'True'}),
+ 'extra_info_title': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
+ 'highlighted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'popit_api_instance': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['popit.ApiInstance']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
+ 'searchable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': "'name'", 'unique_with': '()'}),
+ 'uses_face_to_face': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'uses_preguntales': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'uses_questionary': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'uses_ranking': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'uses_soul_mate': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'writeitinstance': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['writeit.WriteItInstance']", 'unique': 'True', 'null': 'True', 'blank': 'True'})
+ },
+ u'elections.votainteligenteanswer': {
+ 'Meta': {'object_name': 'VotaInteligenteAnswer'},
+ 'content': ('django.db.models.fields.TextField', [], {}),
+ 'created': ('django.db.models.fields.DateTimeField', [], {}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'message': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': u"orm['elections.VotaInteligenteMessage']"}),
+ 'person': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': u"orm['popit.Person']"})
+ },
+ u'elections.votainteligentemessage': {
+ 'Meta': {'object_name': 'VotaInteligenteMessage', '_ormbases': [u'writeit.Message']},
+ 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
+ u'message_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['writeit.Message']", 'unique': 'True', 'primary_key': 'True'}),
+ 'moderated': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
+ },
+ u'popit.apiinstance': {
+ 'Meta': {'object_name': 'ApiInstance'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'url': ('popit.fields.ApiInstanceURLField', [], {'unique': 'True', 'max_length': '200'})
+ },
+ u'popit.person': {
+ 'Meta': {'object_name': 'Person'},
+ 'api_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['popit.ApiInstance']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'image': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
+ 'popit_url': ('popit.fields.PopItURLField', [], {'default': "''", 'max_length': '200', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
+ 'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'})
+ },
+ u'taggit.tag': {
+ 'Meta': {'object_name': 'Tag'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
+ 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
+ },
+ u'taggit.taggeditem': {
+ 'Meta': {'object_name': 'TaggedItem'},
+ 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_tagged_items'", 'to': u"orm['contenttypes.ContentType']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
+ 'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_items'", 'to': u"orm['taggit.Tag']"})
+ },
+ u'writeit.message': {
+ 'Meta': {'object_name': 'Message', '_ormbases': [u'writeit.WriteItDocument']},
+ 'author_email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
+ 'author_name': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
+ 'content': ('django.db.models.fields.TextField', [], {}),
+ 'people': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'messages'", 'symmetrical': 'False', 'to': u"orm['popit.Person']"}),
+ 'slug': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
+ 'subject': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
+ u'writeitdocument_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['writeit.WriteItDocument']", 'unique': 'True', 'primary_key': 'True'}),
+ 'writeitinstance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['writeit.WriteItInstance']"})
+ },
+ u'writeit.writeitapiinstance': {
+ 'Meta': {'object_name': 'WriteItApiInstance'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200'})
+ },
+ u'writeit.writeitdocument': {
+ 'Meta': {'object_name': 'WriteItDocument'},
+ 'api_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['writeit.WriteItApiInstance']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'remote_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'url': ('django.db.models.fields.CharField', [], {'max_length': '256'})
+ },
+ u'writeit.writeitinstance': {
+ 'Meta': {'object_name': 'WriteItInstance', '_ormbases': [u'writeit.WriteItDocument']},
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ u'writeitdocument_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['writeit.WriteItDocument']", 'unique': 'True', 'primary_key': 'True'})
+ }
+ }
+
+ complete_apps = ['elections']
\ No newline at end of file
diff --git a/elections/models.py b/elections/models.py
--- a/elections/models.py
+++ b/elections/models.py
@@ -60,6 +60,7 @@ class CandidatePerson(models.Model):
reachable = models.BooleanField(default=False)
description = models.TextField(default='', blank=True)
portrait_photo = models.CharField(max_length=256, blank=True, null=True)
+ custom_ribbon = models.CharField(max_length=15, blank=True, null=True)
def __unicode__(self):
return u'Extra info de %(candidate)s'%{
| diff --git a/elections/tests/candideitorg_popit_tests.py b/elections/tests/candideitorg_popit_tests.py
--- a/elections/tests/candideitorg_popit_tests.py
+++ b/elections/tests/candideitorg_popit_tests.py
@@ -32,10 +32,25 @@ def test_create_a_model_that_relates_them_both(self):
candidate=self.candidato1
)
- candidate_person.portrait_photo = 'http://imgur.com/0tJAgHo'
- candidate_person.save()
+ self.assertEquals(candidate_person.person, self.pedro)
+ self.assertEquals(candidate_person.candidate, self.candidato1)
- candidate_photo = CandidatePerson.objects.get(portrait_photo='http://imgur.com/0tJAgHo')
+ self.assertEquals(self.pedro.relation, candidate_person)
+ self.assertEquals(self.candidato1.relation, candidate_person)
+
+ def test_realtion_stores_extra_atributes(self):
+ candidate_person = CandidatePerson.objects.get(
+ person=self.pedro,
+ candidate=self.candidato1
+ )
+ #Deletes created relation
+ candidate_person.delete()
+ candidate_person = CandidatePerson.objects.create(
+ person=self.pedro,
+ candidate=self.candidato1,
+ portrait_photo ='http://imgur.com/0tJAgHo',
+ custom_ribbon = 'ribbon text'
+ )
self.assertEquals(candidate_person.person, self.pedro)
self.assertEquals(candidate_person.candidate, self.candidato1)
@@ -44,9 +59,10 @@ def test_create_a_model_that_relates_them_both(self):
self.assertEquals(self.candidato1.relation, candidate_person)
self.assertFalse(candidate_person.reachable)
self.assertFalse(candidate_person.description)
- self.assertEquals(candidate_photo.portrait_photo, 'http://imgur.com/0tJAgHo')
+ self.assertEquals(candidate_person.portrait_photo, 'http://imgur.com/0tJAgHo')
# self.assertTrue(False)
+
def test_it_creates_a_link_to_the_candidate_twitter(self):
link = Link.objects.create(url = 'http://twitter.com/candidato1',\
name = 'twitter',\
| Banda candidato ganador
Banda o marca que permita destacar al candidato que ganรณ la elecciรณn.
| 2013-11-14T16:16:10 |
|
ciudadanointeligente/votainteligente-portal-electoral | 260 | ciudadanointeligente__votainteligente-portal-electoral-260 | [
"258"
] | 36c42f3353901397f6f3656e271619393a684d6f | diff --git a/elections/admin.py b/elections/admin.py
--- a/elections/admin.py
+++ b/elections/admin.py
@@ -45,6 +45,7 @@ class TakenPositionCandidateInline(admin.TabularInline):
model = TakenPosition
form = TakenPositionInlineModelForm
extra = 0
+ can_delete = False
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == 'position':
diff --git a/elections/models.py b/elections/models.py
--- a/elections/models.py
+++ b/elections/models.py
@@ -9,6 +9,8 @@
from candidator.models import Category, Topic as CanTopic
from picklefield.fields import PickledObjectField
from django.conf import settings
+from django.utils.encoding import python_2_unicode_compatible
+import copy
class ExtraInfoMixin(models.Model):
@@ -19,7 +21,7 @@ class Meta:
def __init__(self, *args, **kwargs):
super(ExtraInfoMixin, self).__init__(*args, **kwargs)
- default_extra_info = self.default_extra_info
+ default_extra_info = copy.copy(self.default_extra_info)
default_extra_info.update(self.extra_info)
self.extra_info = default_extra_info
@@ -52,9 +54,13 @@ def election(self):
return category.election
+@python_2_unicode_compatible
class QuestionCategory(Category):
election = models.ForeignKey('Election', related_name='categories', null=True)
+ def __str__(self):
+ return u'<%s> in <%s>' % (self.name, self.election.name)
+
class Election(ExtraInfoMixin, models.Model):
name = models.CharField(max_length=255)
| diff --git a/elections/tests/version2/models_tests.py b/elections/tests/version2/models_tests.py
--- a/elections/tests/version2/models_tests.py
+++ b/elections/tests/version2/models_tests.py
@@ -119,10 +119,10 @@ def test_can_have_extra_info(self):
candidate.save()
self.assertEquals(candidate.extra_info['ribbon'], "perrito")
- @override_settings(DEFAULT_CANDIDATE_EXTRA_INFO={'ribbon': 'perrito'})
+ @override_settings(DEFAULT_CANDIDATE_EXTRA_INFO={'custom_ribbon': 'ribbon text'})
def test_default_candidate_extra_info(self):
candidate = Candidate.objects.get(id=1)
- self.assertEquals(candidate.extra_info['ribbon'], 'perrito')
+ self.assertEquals(candidate.extra_info['custom_ribbon'], 'ribbon text')
@override_settings(DEFAULT_CANDIDATE_EXTRA_INFO={'ribbon': 'perrito'})
def test_do_not_override_settings(self):
@@ -140,6 +140,15 @@ def test_instanciate_a_personal_data(self):
self.assertEquals(personal_data.value, u'31 aรฑos')
self.assertIn(personal_data, candidate.personal_datas.all())
+ def test_bug_258(self):
+ candidate = Candidate.objects.get(id=1)
+ candidate.extra_info['custom_ribbon'] = 'Perro grande'
+ candidate.extra_info['other_thing'] = 'This is something else'
+ candidate.save()
+ candidate2 = Candidate.objects.get(id=2)
+ self.assertEquals(candidate2.extra_info['custom_ribbon'], 'ribbon text')
+ self.assertNotIn('other_thing', candidate2.extra_info.keys())
+
class QuestionCategoryTestCase(Version2TestCase):
def setUp(self):
@@ -147,7 +156,9 @@ def setUp(self):
def test_instanciate_one(self):
category = QuestionCategory.objects.create(name="Perros", election=self.election)
+
self.assertIsInstance(category, Category)
+ self.assertEquals(category.__str__(), u"<Perros> in <the name>")
class TopicTestCase(Version2TestCase):
| Extra info is the same for all candidates
When using extra_info such as ribbon or colors the same values are returned for all candidates in cases such as election page or soul-mate JSON.
The correct value is returned in cases where information for only one candidate is requested, such as the candidate detail page.
| 2015-07-14T16:28:21 |
|
ciudadanointeligente/votainteligente-portal-electoral | 283 | ciudadanointeligente__votainteligente-portal-electoral-283 | [
"272"
] | 43caffd50bc9c0eb44e96fc5d772fc995cdaad9f | diff --git a/elections/models.py b/elections/models.py
--- a/elections/models.py
+++ b/elections/models.py
@@ -6,7 +6,7 @@
from popolo.models import Person, Area
from django.utils.translation import ugettext_lazy as _
from markdown_deux.templatetags.markdown_deux_tags import markdown_allowed
-from candidator.models import Category, Topic as CanTopic
+from candidator.models import Category, Topic as CanTopic, TakenPosition
from picklefield.fields import PickledObjectField
from django.conf import settings
from django.utils.encoding import python_2_unicode_compatible
@@ -38,6 +38,11 @@ def twitter(self):
if links:
return links.first()
+ @property
+ def has_answered(self):
+ are_there_answers = TakenPosition.objects.filter(person=self, position__isnull=False).exists()
+ return are_there_answers
+
class Meta:
verbose_name = _("Candidato")
verbose_name_plural = _("Candidatos")
| diff --git a/elections/tests/version2/models_tests.py b/elections/tests/version2/models_tests.py
--- a/elections/tests/version2/models_tests.py
+++ b/elections/tests/version2/models_tests.py
@@ -2,7 +2,7 @@
from elections.tests import VotaInteligenteTestCase as TestCase
from popolo.models import Person, ContactDetail
from elections.models import Candidate, Election, QuestionCategory, PersonalData
-from candidator.models import Category
+from candidator.models import Category, Position, TakenPosition
from django.template.loader import get_template
from django.template import Context, Template
from django.test import override_settings
@@ -111,6 +111,26 @@ def test_ranking_twitter_button(self):
actual_twitter_button = actual_twitter_button_template.render(Context({"candidate": candidate}))
self.assertEquals(actual_twitter_button, expected_twitter_button)
+ def test_candidate_has_answered(self):
+ TakenPosition.objects.all().delete()
+ candidate = Candidate.objects.get(id=1)
+ category = QuestionCategory.objects.create(name="Perros", election=self.election)
+ topic = Topic.objects.create(
+ label=u"Should marijuana be legalized?",
+ category=category,
+ description=u"This is a description of the topic of marijuana")
+ position = Position.objects.create(
+ topic=topic,
+ label=u"Yes",
+ description=u"Yes, means that it is considered a good thing for marijuana to be legalized"
+ )
+ taken_position = TakenPosition.objects.create(topic=topic,
+ person=candidate)
+ self.assertFalse(candidate.has_answered)
+ taken_position.position = position
+ taken_position.save()
+ self.assertTrue(candidate.has_answered)
+
class CandidateExtraInfoTestCase(Version2TestCase):
def test_can_have_extra_info(self):
| Candidate has_answered siempre en false
ยฟCรณmo se hace para que deje de mostrar el enlace a twitter para candidatos que tienen todas las respuestas?
ยฟCรณmo se hace para cambiar "pรญdele" por "pedile"?
| Ademรกs, en los casos en que las respuestas ya estรกn, podrรญa decir "leer sus propuestas" o algo asรญ, hoy un usuario se confundiรณ creyendo que el sitio era para pedirle las posicones a los candidatos en vez de para leerlas.
"compรกralo" con otro candidato. poner COMPARALO
Sabes tengo un poco la duda, por que antes tenรญamos un flag por cada candidato, te parece si colocamos este dato en el candidate.extra_info? Aรบn estoy pensando en cรณmo debe ser que un candidato estรก marcado como has_answered o no.
Quizรกs deba ser algo asรญ como: Si al menos tiene una pregunta respondida entonces el has_answered = True. Quรฉ piensan?
Si le falta alguna va en false, para que aparezca el link para pedirle su
posiciรณn
El jul 24, 2015 4:40 PM, "Luis Felipe รlvarez Burgos" <
[email protected]> escribiรณ:
> Quizรกs deba ser algo asรญ como: Si al menos tiene una pregunta respondida
> entonces el has_answered = True. Quรฉ piensan?
>
> โ
> Reply to this email directly or view it on GitHub
> https://github.com/ciudadanointeligente/votainteligente-portal-electoral/issues/272#issuecomment-124670036
> .
Ya esa es mi pregunta, tenemos 2 opciones:
- Si tiene una respondida entonces el `has_answer = True`
- Sรณlo si tienen todas respondidas `has_answer = True`
| 2015-07-24T20:28:05 |
ciudadanointeligente/votainteligente-portal-electoral | 328 | ciudadanointeligente__votainteligente-portal-electoral-328 | [
"327"
] | 2195bac429e3b81497f0140c0c22cc7fb1c91523 | diff --git a/popular_proposal/forms.py b/popular_proposal/forms.py
--- a/popular_proposal/forms.py
+++ b/popular_proposal/forms.py
@@ -56,10 +56,11 @@ class ProposalFormBase(forms.Form):
quieras solucionar. lรญneas)'),
help_text=_(u'Ej: Poca participaciรณn en el Plan Regulador, falta de transparencia en el trabajo de la \
municipalidad, pocos puntos de reciclaje, etc.'))
- solution = forms.CharField(label=_(u'Quรฉ quieres que haga tu autoridad para solucionar el problema? (3 lรญneas)'),
- help_text=_(u'Ejemplo: "Que se aumenten en 30% las horas de atenciรณn de la especialidad Cardiologรญa en \
- los Cesfam y consultorios de la comuna", "Que se publiquen todos los concejos municipales en \
+ solution = forms.CharField(label=_(u'ยฟQuรฉ deberรญa hacer la municipalidad para solucionar el problema? (3 lรญneas)'),
+ help_text=_(u'Ejemplo: "Crear una ciclovia que circunvale Valdivia", "Que se publiquen todos los concejos municipales en \
el sitio web del municipio".'))
+ solution_at_the_end = forms.CharField(label=u"ยฟQuรฉ avances concretos esperas que se logren durante el periodo del alcalde (4 aรฑos)?",
+ help_text=_(u'Ejemplo: "Aumentar en un 20% la cantidad de ciclovรญas en la ciudad"'), required=False)
when = forms.ChoiceField(choices=WHEN_CHOICES, label=_(u'ยฟEn quรฉ plazo te gustarรญa que estรฉ solucionado?'))
title = forms.CharField(label=_(u'Tรญtulo corto'), help_text=_(u"Un tรญtulo que nos permita describir tu propuesta\
ciudadana. Ej: 50% mรกs de ciclovรญas para la comuna"))
| diff --git a/popular_proposal/tests/form_tests.py b/popular_proposal/tests/form_tests.py
--- a/popular_proposal/tests/form_tests.py
+++ b/popular_proposal/tests/form_tests.py
@@ -136,15 +136,6 @@ def test_update_temporary_popular_proposal(self):
data=self.data,
comments=self.comments,
status=ProposalTemporaryData.Statuses.InTheirSide)
-
- #data = {
- # 'clasification': u'genero',
- # 'title': u'Fiera de vuelta con sus amigos!',
- # 'problem': u'A mi me gusta la contaminaciรณn de Santiago y los autos y sus estresantes ruedas',
- # 'solution': u'Viajar a ver al equipo una vez al mes',
- # 'when': u'1_year',
- # 'allies': u'El Feli y el resto de los cabros de la FCI'
- #}
data = self.data
data['solution'] = u'Viajar a ver al equipo una vez al mes'
form = ProposalTemporaryDataUpdateForm(data=data,
@@ -155,7 +146,8 @@ def test_update_temporary_popular_proposal(self):
self.assertTrue(form.is_valid())
temporary_data = form.save()
temporary_data = ProposalTemporaryData.objects.get(id=temporary_data.id)
- self.assertEquals(temporary_data.data, data)
+ for key in data.keys():
+ self.assertEquals(temporary_data.data[key], data[key])
self.assertEquals(temporary_data.status, ProposalTemporaryData.Statuses.InOurSide)
def test_when_template_tag(self):
| nueva pregunta en formulario
Modificar la pregunta sobre la soluciรณn al problema y dividirla en 2. Una mรกs general y otra mรกs especรญfica.
- La primera: ยฟQuรฉ deberรญa hacer la municipalidad para solucionar el problema?โ
- La segunda es โยฟQuรฉ avances concretos esperas que se logren durante el periodo del alcalde (4 aรฑos)?โ
| 2016-04-18T18:19:00 |
|
ciudadanointeligente/votainteligente-portal-electoral | 331 | ciudadanointeligente__votainteligente-portal-electoral-331 | [
"330"
] | 2195bac429e3b81497f0140c0c22cc7fb1c91523 | diff --git a/popular_proposal/forms.py b/popular_proposal/forms.py
--- a/popular_proposal/forms.py
+++ b/popular_proposal/forms.py
@@ -128,6 +128,9 @@ def reject(self):
class ProposalTemporaryDataUpdateForm(ProposalFormBase):
+ overall_comments = forms.CharField(required=False, label=_(u'Comentarios sobre tu revisรณn'))
+
+
def __init__(self, *args, **kwargs):
self.proposer = kwargs.pop('proposer')
self.temporary_data = kwargs.pop('temporary_data')
@@ -139,11 +142,15 @@ def __init__(self, *args, **kwargs):
self.fields[comment_key].help_text += _(' <b>Commentarios: %s </b>') % (comment)
def save(self):
+ self.overall_comments = self.cleaned_data.pop('overall_comments')
self.temporary_data.data = self.cleaned_data
+ self.temporary_data.overall_comments = self.overall_comments
self.temporary_data.status = ProposalTemporaryData.Statuses.InOurSide
self.temporary_data.save()
return self.temporary_data
-
+
+ def get_overall_comments(self):
+ return self.cleaned_data.get('overall_comments', '')
class SubscriptionForm(forms.Form):
def __init__(self, *args, **kwargs):
diff --git a/popular_proposal/migrations/0008_proposaltemporarydata_overall_comments.py b/popular_proposal/migrations/0008_proposaltemporarydata_overall_comments.py
new file mode 100644
--- /dev/null
+++ b/popular_proposal/migrations/0008_proposaltemporarydata_overall_comments.py
@@ -0,0 +1,20 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.9.2 on 2016-04-18 19:04
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('popular_proposal', '0007_popularproposal_likers'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='proposaltemporarydata',
+ name='overall_comments',
+ field=models.CharField(blank=True, default='', max_length=512, null=True),
+ ),
+ ]
diff --git a/popular_proposal/models.py b/popular_proposal/models.py
--- a/popular_proposal/models.py
+++ b/popular_proposal/models.py
@@ -39,6 +39,10 @@ class Statuses(DjangoChoices):
choices=Statuses.choices,
validators=[Statuses.validator],
default=Statuses.InOurSide)
+ overall_comments = models.CharField(max_length=512,
+ blank=True,
+ null=True,
+ default="")
needing_moderation = NeedingModerationManager()
objects = models.Manager()
| diff --git a/popular_proposal/tests/__init__.py b/popular_proposal/tests/__init__.py
--- a/popular_proposal/tests/__init__.py
+++ b/popular_proposal/tests/__init__.py
@@ -25,6 +25,7 @@ def setUp(self):
'solution': '',
'when': u'El plazo no estรก tan bueno',
'allies': ''
+
}
diff --git a/popular_proposal/tests/form_tests.py b/popular_proposal/tests/form_tests.py
--- a/popular_proposal/tests/form_tests.py
+++ b/popular_proposal/tests/form_tests.py
@@ -147,16 +147,20 @@ def test_update_temporary_popular_proposal(self):
#}
data = self.data
data['solution'] = u'Viajar a ver al equipo una vez al mes'
+ data['overall_comments'] = u"Quizรกs sea una buena idea que revises si conviene el plazo de un aรฑo"
form = ProposalTemporaryDataUpdateForm(data=data,
temporary_data=temporary_data,
proposer=self.fiera)
self.assertTrue(form.initial)
self.assertIn(self.comments['when'], form.fields['when'].help_text)
self.assertTrue(form.is_valid())
+ self.assertEquals(form.get_overall_comments(), data['overall_comments'])
temporary_data = form.save()
temporary_data = ProposalTemporaryData.objects.get(id=temporary_data.id)
+ overall_comments = data.pop('overall_comments')
self.assertEquals(temporary_data.data, data)
self.assertEquals(temporary_data.status, ProposalTemporaryData.Statuses.InOurSide)
+ self.assertEquals(temporary_data.overall_comments, overall_comments)
def test_when_template_tag(self):
choice = WHEN_CHOICES[0]
diff --git a/popular_proposal/tests/new_proposal_cycle_tests.py b/popular_proposal/tests/new_proposal_cycle_tests.py
--- a/popular_proposal/tests/new_proposal_cycle_tests.py
+++ b/popular_proposal/tests/new_proposal_cycle_tests.py
@@ -25,6 +25,7 @@ def test_instanciate_one(self):
self.assertIsNotNone(temporary_data.comments['solution'])
self.assertIsNotNone(temporary_data.comments['when'])
self.assertIsNotNone(temporary_data.comments['allies'])
+ self.assertIsNotNone(temporary_data.overall_comments)
self.assertEquals(temporary_data.status, ProposalTemporaryData.Statuses.InOurSide)
self.assertIn(temporary_data, self.fiera.temporary_proposals.all())
self.assertEquals(temporary_data.get_title(), self.data['title'])
| Comentarios sobre la revisiรณn
Un usuario puede agregar comentarios sobre su revisiรณn
| 2016-04-18T19:31:58 |
|
ciudadanointeligente/votainteligente-portal-electoral | 427 | ciudadanointeligente__votainteligente-portal-electoral-427 | [
"423"
] | 0b58cfddf8d4d37c54cf618c307bc8007f035834 | diff --git a/popular_proposal/forms/forms.py b/popular_proposal/forms/forms.py
--- a/popular_proposal/forms/forms.py
+++ b/popular_proposal/forms/forms.py
@@ -175,6 +175,10 @@ def __init__(self, *args, **kwargs):
class Meta:
model = PopularProposal
fields = ['background', 'image']
+ labels = {'background': _(u'Mรกs antecedentes sobre tu propuesta.'),
+ 'image': _(u'ยฟTienes alguna imagen para compartir?')
+ }
+ help_texts = {'background': _(u'Ejemplo: Durante el aรฑo 2011, existiรณ una iniciativa de otra comunidad que no llegรณ a buen puerto.')}
class CommentsForm(forms.Form):
| La pรกgina de agregar antecedentes no dice mucho.
Actualmente agregar mรกs antecedentes puede ser accedida desde una sola parte:

Y deberรญa ser accesible desde el mail de felicitaciones, la lista de tus propuestas.
Y ademรกs deberรญa poder proveer de ejemplos que te permitan hacer de tu propuesta una mรกs completa.
Por ejemplo en la parte donde dice "Background" deberรญa decir algo asรญ como:
"Puedes agregar mรกs antecedentes? como por ejemplo, en quรฉ aรฑo se iniciรณ el problema? o quizรกs cuantas veces han intentado darle soluciรณn?"
Ademรกs deberรญa poder utilizarse esta informaciรณn en el detalle de la propuesta.
<!---
@huboard:{"order":50.8644609375,"milestone_order":237,"custom_state":""}
-->
| 2016-07-28T15:39:20 |
||
ciudadanointeligente/votainteligente-portal-electoral | 462 | ciudadanointeligente__votainteligente-portal-electoral-462 | [
"439"
] | 1aed46485daedf2286b398c9423d01252235c5e6 | diff --git a/popular_proposal/views.py b/popular_proposal/views.py
--- a/popular_proposal/views.py
+++ b/popular_proposal/views.py
@@ -170,7 +170,7 @@ def done(self, form_list, **kwargs):
data=data)
t_data.notify_new()
return render_to_response('popular_proposal/wizard/done.html', {
- 'proposal': t_data,
+ 'popular_proposal': t_data,
'area': self.area
})
@@ -205,10 +205,19 @@ def done(self, form_list, **kwargs):
area=area,
data=data)
return render_to_response('popular_proposal/wizard/done.html', {
- 'proposal': temporary_data,
+ 'popular_proposal': temporary_data,
'area': area
})
+ def get_context_data(self, *args, **kwargs):
+ context = super(ProposalWizardFull, self).get_context_data(*args, **kwargs)
+ data = self.get_all_cleaned_data()
+ if 'area' in data:
+ context['area'] = data['area']
+ context['preview_data'] = self.get_all_cleaned_data()
+
+ return context
+
class PopularProposalUpdateView(UpdateView):
form_class = UpdateProposalForm
| diff --git a/popular_proposal/tests/wizard_tests.py b/popular_proposal/tests/wizard_tests.py
--- a/popular_proposal/tests/wizard_tests.py
+++ b/popular_proposal/tests/wizard_tests.py
@@ -144,7 +144,7 @@ def test_post_data_to_the_wizard(self):
self.assertTemplateUsed(response, 'popular_proposal/wizard/done.html')
# Probar que se creรณ la promesa
self.assertEquals(ProposalTemporaryData.objects.count(), 1)
- temporary_data = response.context['proposal']
+ temporary_data = response.context['popular_proposal']
self.assertEquals(response.context['area'], self.arica)
self.assertEquals(temporary_data.proposer, self.feli)
self.assertEquals(temporary_data.area, self.arica)
@@ -180,13 +180,16 @@ def test_full_wizard(self):
data = test_response[i]
data.update({'proposal_wizard_full-current_step': unicode(i)})
response = self.client.post(url, data=data)
+ self.assertEquals(response.context['area'], self.arica)
+
if 'form' in response.context:
+ self.assertTrue(response.context['preview_data'])
self.assertFalse(response.context['form'].errors)
steps = response.context['wizard']['steps']
self.assertTemplateUsed(response, 'popular_proposal/wizard/done.html')
# Probar que se creรณ la promesa
self.assertEquals(ProposalTemporaryData.objects.count(), 1)
- temporary_data = response.context['proposal']
+ temporary_data = response.context['popular_proposal']
self.assertEquals(response.context['area'], self.arica)
self.assertEquals(temporary_data.proposer, self.feli)
self.assertEquals(temporary_data.area, self.arica)
| Paso 3 que te aparezca el resultado de la pregunta del paso 2
<!---
@huboard:{"order":0.21846582396300857,"milestone_order":237}
-->
| 2016-08-10T21:38:30 |
|
ciudadanointeligente/votainteligente-portal-electoral | 482 | ciudadanointeligente__votainteligente-portal-electoral-482 | [
"481"
] | 5f5a27ec349ce1387d0be7178d359a3177d3cb85 | diff --git a/popular_proposal/views.py b/popular_proposal/views.py
--- a/popular_proposal/views.py
+++ b/popular_proposal/views.py
@@ -75,6 +75,10 @@ class SubscriptionView(FormView):
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
self.proposal = get_object_or_404(PopularProposal, id=self.kwargs['pk'])
+ if self.request.method == 'GET':
+ self.next_url = self.request.GET.get('next', None)
+ elif self.request.method == 'POST':
+ self.next_url = self.request.POST.get('next', None)
return super(SubscriptionView, self).dispatch(*args, **kwargs)
def get_form_kwargs(self):
@@ -86,9 +90,13 @@ def get_form_kwargs(self):
def get_context_data(self, **kwargs):
kwargs = super(SubscriptionView, self).get_context_data(**kwargs)
kwargs['proposal'] = self.proposal
+ if self.next_url:
+ kwargs['next'] = self.next_url
return kwargs
def get_success_url(self):
+ if self.next_url:
+ return self.next_url
return reverse('area', kwargs={'slug': self.proposal.area.id})
def form_valid(self, form):
@@ -204,6 +212,7 @@ def done(self, form_list, **kwargs):
temporary_data = ProposalTemporaryData.objects.create(proposer=self.request.user,
area=area,
data=data)
+ temporary_data.notify_new()
context = self.get_context_data(form=None)
context.update({'popular_proposal': temporary_data,
'area': area
| diff --git a/popular_proposal/tests/subscribing_tests.py b/popular_proposal/tests/subscribing_tests.py
--- a/popular_proposal/tests/subscribing_tests.py
+++ b/popular_proposal/tests/subscribing_tests.py
@@ -7,6 +7,8 @@
from unittest import skip
from django.core.urlresolvers import reverse
from django.template import Template, Context
+from django.test import RequestFactory
+from popular_proposal.views import SubscriptionView
class SubscribingToPopularProposal(TestCase):
@@ -63,6 +65,21 @@ def test_liking_view(self):
p = ProposalLike.objects.get(user=self.feli, proposal=self.proposal)
self.assertTrue(p)
+
+ def test_liking_redirecting_view(self):
+ url_home = reverse('popular_proposals:home')
+ kwargs = {'pk': self.proposal.id}
+ url = reverse('popular_proposals:like_a_proposal',
+ kwargs=kwargs)
+ self.client.login(username=self.feli,
+ password='alvarez')
+ response_get = self.client.get(url, {'next': url_home})
+ self.assertEquals(response_get.context['next'], url_home)
+ response = self.client.post(url,
+ data={'next': url_home})
+ self.assertEquals(response.status_code, 302)
+ self.assertEquals(response.url, url_home)
+
def test_popular_proposal_likers(self):
like = ProposalLike.objects.create(user=self.feli,
proposal=self.proposal)
diff --git a/popular_proposal/tests/views_tests.py b/popular_proposal/tests/views_tests.py
--- a/popular_proposal/tests/views_tests.py
+++ b/popular_proposal/tests/views_tests.py
@@ -83,7 +83,7 @@ def setUp(self):
def test_there_is_a_page(self):
response = self.client.get(self.url)
self.assertEquals(response.status_code, 200)
- self.assertTemplateUsed('popular_proposals/home.html')
+ self.assertTemplateUsed(response, 'popular_proposal/home.html')
def test_brings_a_list_of_proposals(self):
response = self.client.get(self.url, {'clasification': '', 'area': ''})
diff --git a/popular_proposal/tests/wizard_tests.py b/popular_proposal/tests/wizard_tests.py
--- a/popular_proposal/tests/wizard_tests.py
+++ b/popular_proposal/tests/wizard_tests.py
@@ -168,6 +168,7 @@ def test_user_should_accept_terms_and_conditions(self):
self.assertTrue(form.is_valid())
def test_full_wizard(self):
+ original_amount = len(mail.outbox)
url = reverse('popular_proposals:propose_wizard_full')
self.client.login(username=self.feli,
password=USER_PASSWORD)
@@ -193,3 +194,4 @@ def test_full_wizard(self):
self.assertEquals(response.context['area'], self.arica)
self.assertEquals(temporary_data.proposer, self.feli)
self.assertEquals(temporary_data.area, self.arica)
+ self.assertEquals(len(mail.outbox), original_amount + 1)
| No notifica en wizard full que llega un mail!!!
<!---
@huboard:{"order":470.04699999999997,"milestone_order":481,"custom_state":""}
-->
| 2016-08-17T12:59:06 |
|
ciudadanointeligente/votainteligente-portal-electoral | 484 | ciudadanointeligente__votainteligente-portal-electoral-484 | [
"417"
] | d02a5166879fdf7c095c119c71f84b997ddddf11 | diff --git a/popular_proposal/forms/form_texts.py b/popular_proposal/forms/form_texts.py
--- a/popular_proposal/forms/form_texts.py
+++ b/popular_proposal/forms/form_texts.py
@@ -25,12 +25,12 @@
)
TEXTS = OrderedDict({
- 'problem': {'label': u' ',
+ 'problem': {'label': u'ยฟCuรกl es el problema?',
'preview_question': u'ยฟCuรกl es el problema?',
'help_text': u'',
'placeholder': u'Describe el problema que afecta a la comuna (o barrio) del que el alcalde debe hacerse cargo',
'long_text': "paso1.html"},
- 'causes': {'label': u' ',
+ 'causes': {'label': u'ยฟCuรกles son las causas?',
'preview_question': u'ยฟCuรกles son las causas?',
'help_text': u'',
'placeholder': u'Utiliza el ejercicio que te mostramos arriba para encontrar las causas del problema',
@@ -40,7 +40,7 @@
'help_text': u'',
'placeholder': u'Siguiendo el ejemplo, propongan la(s) medida(s) que el alcalde debe tomar para solucionar la causa del problema y poder alcanzar la situaciรณn ideal.',
'long_text': "paso3.html"},
- 'solution_at_the_end': {'label': u' ',
+ 'solution_at_the_end': {'label': u'ยฟCuรกl serรญa la soluciรณn?',
'preview_question': u'ยฟCuรกl serรญa la soluciรณn?',
'help_text': u'',
'placeholder': u'Define la o las acciones que debe realizar el alcalde para que la propuesta se de por cumplida.',
diff --git a/popular_proposal/forms/forms.py b/popular_proposal/forms/forms.py
--- a/popular_proposal/forms/forms.py
+++ b/popular_proposal/forms/forms.py
@@ -52,6 +52,7 @@ def get_user_organizations_choicefield(user=None):
'fields': OrderedDict([
('problem', forms.CharField(max_length=512,
widget=forms.Textarea(),
+ label=u'ยฟCuรกl es el problema?'
))
])
},
@@ -61,6 +62,7 @@ def get_user_organizations_choicefield(user=None):
'fields': OrderedDict([(
'causes', forms.CharField(max_length=256,
widget=forms.Textarea(),
+ label=u'ยฟCuรกles son las causas?'
)
)])
@@ -138,6 +140,7 @@ def __init__(self, *args, **kwargs):
class ProposalFormBase(forms.Form, TextsFormMixin):
def set_fields(self):
+ fields = OrderedDict()
for steps in wizard_forms_fields:
for field_name in steps['fields']:
field = steps['fields'][field_name]
@@ -146,12 +149,13 @@ def set_fields(self):
field = field.__call__(**kwargs)
if field is None:
continue
- self.fields[field_name] = field
+ fields[field_name] = field
+ return fields
def __init__(self, *args, **kwargs):
self.proposer = kwargs.pop('proposer', None)
super(ProposalFormBase, self).__init__(*args, **kwargs)
- self.set_fields()
+ self.fields.update(self.set_fields())
self.add_texts_to_fields()
@@ -232,19 +236,52 @@ def reject(self):
self.temporary_data.reject(self.cleaned_data['reason'])
+FIELDS_TO_BE_AVOIDED = ['terms_and_conditions', ]
+
class ProposalTemporaryDataUpdateForm(ProposalFormBase):
- overall_comments = forms.CharField(required=False, label=_(u'Comentarios sobre tu revisรณn'))
+ overall_comments = forms.CharField(required=False,
+ label=_(u'Comentarios sobre tu revisรณn'),
+ widget=forms.Textarea())
+
def __init__(self, *args, **kwargs):
self.proposer = kwargs.pop('proposer')
self.temporary_data = kwargs.pop('temporary_data')
+ field_order = self.get_fields_order(self.temporary_data)
super(ProposalTemporaryDataUpdateForm, self).__init__(*args, **kwargs)
+ self.order_fields(field_order)
+ for field_to_be_avoided in FIELDS_TO_BE_AVOIDED:
+ self.fields.pop(field_to_be_avoided)
self.initial = self.temporary_data.data
+ commented_fields = []
for comment_key in self.temporary_data.comments.keys():
comment = self.temporary_data.comments[comment_key]
if comment:
+ commented_fields.append(comment_key)
self.fields[comment_key].help_text += _(' <b>Commentarios: %s </b>') % (comment)
+ def get_fields_order(self, temporary_data):
+ commented_fields = []
+ fields_at_the_end = ProposalTemporaryDataUpdateForm.base_fields
+ fields = self.set_fields()
+
+ for comment_key in temporary_data.comments.keys():
+ comment = temporary_data.comments[comment_key]
+ if comment:
+ commented_fields.append(comment_key)
+ keyOrder = commented_fields
+ for field in fields:
+ if field not in commented_fields and field not in fields_at_the_end:
+ keyOrder.append(field)
+ for field in fields_at_the_end:
+ keyOrder.append(field)
+ return keyOrder
+
+ def order_fields(self, field_order):
+ if hasattr(self, 'keyOrder'):
+ field_order = self.keyOrder
+ super(ProposalTemporaryDataUpdateForm, self).order_fields(field_order)
+
def save(self):
self.overall_comments = self.cleaned_data.pop('overall_comments')
self.temporary_data.data = self.cleaned_data
| diff --git a/popular_proposal/tests/form_tests.py b/popular_proposal/tests/form_tests.py
--- a/popular_proposal/tests/form_tests.py
+++ b/popular_proposal/tests/form_tests.py
@@ -4,6 +4,7 @@
CommentsForm,
RejectionForm,
ProposalTemporaryDataUpdateForm,
+ FIELDS_TO_BE_AVOIDED,
UpdateProposalForm,
AreaForm)
from django.contrib.auth.models import User
@@ -168,11 +169,34 @@ def test_update_temporary_popular_proposal(self):
t_data = ProposalTemporaryData.objects.get(id=t_data.id)
overall_comments = data.pop('overall_comments')
for key in data.keys():
+ if key in FIELDS_TO_BE_AVOIDED:
+ continue
self.assertEquals(t_data.data[key], data[key])
self.assertEquals(t_data.status,
ProposalTemporaryData.Statuses.InOurSide)
self.assertEquals(t_data.overall_comments, overall_comments)
+ def test_form_fields(self):
+ theirside_status = ProposalTemporaryData.Statuses.InTheirSide
+ t_data = ProposalTemporaryData.objects.create(proposer=self.fiera,
+ area=self.arica,
+ data=self.data,
+ comments=self.comments,
+ status=theirside_status)
+ data = self.data
+ data['solution'] = u'Viajar a ver al equipo una vez al mes'
+ data['overall_comments'] = u"Quizรกs sea una buena idea que revises si \
+ conviene el plazo de un aรฑo"
+ form = ProposalTemporaryDataUpdateForm(data=data,
+ temporary_data=t_data,
+ proposer=self.fiera)
+ self.assertNotIn('terms_and_conditions', form.fields)
+ first_field = form.fields.popitem(last=False)
+ # Because the field when has comments this should be the firstone
+ self.assertEquals(first_field[0], 'when')
+ last_field = form.fields.popitem()
+ self.assertEquals(last_field[0], 'overall_comments')
+
def test_when_template_tag(self):
choice = WHEN_CHOICES[1]
template = Template(
@@ -250,4 +274,4 @@ def test_post_update_view(self):
self.client.login(username=self.fiera.username, password='feroz')
response = self.client.post(url, **kwargs)
detail_url = reverse('popular_proposals:detail', kwargs={'slug': self.popular_proposal.slug})
- self.assertRedirects(response, detail_url)
+ self.assertRedirects(response, detail_url)
\ No newline at end of file
| Comentarios sobre tu propuesta arreglar el orden del formulario
El formulario actualmente se ve asรญ:

Y deberรญa verse al menos con los campos en orden y con los tรญtulos de los campos.
Si pudiese tambiรฉn tener un preview serรญa bacรกn.
Por otro lado, este fomrularion no necesita llevar el T&C.
<!---
@huboard:{"order":0.21857507872375723,"milestone_order":0.21857507872375723,"custom_state":""}
-->
| 2016-08-17T16:04:51 |
|
ciudadanointeligente/votainteligente-portal-electoral | 518 | ciudadanointeligente__votainteligente-portal-electoral-518 | [
"515"
] | bcb59214739c465a3cfff6672e67652d50529610 | diff --git a/popular_proposal/forms/forms.py b/popular_proposal/forms/forms.py
--- a/popular_proposal/forms/forms.py
+++ b/popular_proposal/forms/forms.py
@@ -9,6 +9,7 @@
from .form_texts import TEXTS, TOPIC_CHOICES, WHEN_CHOICES
from popolo.models import Area
from collections import OrderedDict
+from votainteligente.send_mails import send_mails_to_staff
class TextsFormMixin():
@@ -288,6 +289,7 @@ def save(self):
self.temporary_data.overall_comments = self.overall_comments
self.temporary_data.status = ProposalTemporaryData.Statuses.InOurSide
self.temporary_data.save()
+ send_mails_to_staff({'temporary_data': self.temporary_data}, 'notify_staff_new_proposal_update')
return self.temporary_data
def get_overall_comments(self):
diff --git a/popular_proposal/views.py b/popular_proposal/views.py
--- a/popular_proposal/views.py
+++ b/popular_proposal/views.py
@@ -26,6 +26,7 @@
from popular_proposal.forms import ProposalAreaFilterForm
from popular_proposal.filters import ProposalAreaFilter
from votainteligente.view_mixins import EmbeddedViewBase
+from votainteligente.send_mails import send_mails_to_staff
class ProposalCreationView(FormView):
@@ -169,6 +170,7 @@ def done(self, form_list, **kwargs):
area=self.area,
data=data)
t_data.notify_new()
+ send_mails_to_staff({'temporary_data': t_data}, 'notify_staff_new_proposal')
return render_to_response('popular_proposal/wizard/done.html', {
'popular_proposal': t_data,
'area': self.area
@@ -209,6 +211,7 @@ def done(self, form_list, **kwargs):
context.update({'popular_proposal': temporary_data,
'area': area
})
+ send_mails_to_staff({'temporary_data': temporary_data}, 'notify_staff_new_proposal')
return render_to_response('popular_proposal/wizard/done.html',
context)
diff --git a/votainteligente/send_mails.py b/votainteligente/send_mails.py
--- a/votainteligente/send_mails.py
+++ b/votainteligente/send_mails.py
@@ -2,6 +2,7 @@
from django.template.loader import get_template
from django.core.mail import EmailMessage
from django.template import Context
+from django.contrib.auth.models import User
from django.conf import settings
def send_mail(context_dict, template_prefix, to=[], reply_to=None, from_email=settings.DEFAULT_FROM_EMAIL):
@@ -9,11 +10,17 @@ def send_mail(context_dict, template_prefix, to=[], reply_to=None, from_email=se
template_prefix_dict = {'template_prefix': template_prefix}
template_body = get_template('mails/%(template_prefix)s_body.html' % template_prefix_dict)
body = template_body.render(context)
- template_subject= get_template('mails/%(template_prefix)s_subject.html' % template_prefix_dict)
+ template_subject = get_template('mails/%(template_prefix)s_subject.html' % template_prefix_dict)
subject = template_subject.render(context).replace('\n', '').replace('\r', '')
- email = EmailMessage(subject, body, from_email,
- to)
+ email = EmailMessage(subject, body, from_email, to)
if reply_to is not None:
email.reply_to = [reply_to]
email.send()
+
+def send_mails_to_staff(context_dict, template_prefix):
+ to = []
+ for u in User.objects.filter(is_staff=True):
+ to.append(u.email)
+ send_mail(context_dict, template_prefix, to=to)
+
| diff --git a/backend_citizen/tests/views_tests.py b/backend_citizen/tests/views_tests.py
--- a/backend_citizen/tests/views_tests.py
+++ b/backend_citizen/tests/views_tests.py
@@ -9,6 +9,7 @@
from backend_citizen.forms import UserChangeForm
from backend_citizen.tests import BackendCitizenTestCaseBase, PASSWORD
from backend_citizen.models import Organization
+from django.core import mail
class BackendCitizenViewsTests(BackendCitizenTestCaseBase):
@@ -63,6 +64,13 @@ def test_temporary_promise_detail_view(self):
temporary_data = ProposalTemporaryData.objects.get(id=temporary_data.id)
self.assertEquals(temporary_data.data['solution'], data['solution'])
+ self.assertEquals(len(mail.outbox), 1)
+ the_mail = mail.outbox[0]
+ self.assertIn(self.feli.email, the_mail.to)
+ self.assertIn(self.feli.email, the_mail.to)
+ self.assertIn(str(temporary_data.id), the_mail.body)
+ self.assertIn(temporary_data.get_title(), the_mail.body)
+
def test_brings_all_the_proposals_that_are_in_my_side(self):
t_d1 = ProposalTemporaryData.objects.create(proposer=self.fiera,
area=self.arica,
diff --git a/backend_staff/tests/mails_for_staff_tests.py b/backend_staff/tests/mails_for_staff_tests.py
new file mode 100644
--- /dev/null
+++ b/backend_staff/tests/mails_for_staff_tests.py
@@ -0,0 +1,51 @@
+# coding=utf-8
+from popular_proposal.tests import ProposingCycleTestCaseBase as TestCase
+from votainteligente.send_mails import send_mails_to_staff
+from django.contrib.auth.models import User
+from popular_proposal.models import ProposalTemporaryData, PopularProposal
+from django.core import mail
+
+
+class MailsForStaffTestCase(TestCase):
+ def setUp(self):
+ super(MailsForStaffTestCase, self).setUp()
+ self.staff1 = User.objects.create_superuser(username='staff1',
+ password='perrito',
+ email='[email protected]')
+ self.staff2 = User.objects.create_superuser(username='staff2',
+ password='perrito',
+ email='[email protected]')
+ self.temporary_data = ProposalTemporaryData.objects.create(proposer=self.fiera,
+ area=self.arica,
+ data=self.data)
+ self.proposal = PopularProposal.objects.create(proposer=self.fiera,
+ area=self.arica,
+ data=self.data,
+ title=u'This is a title',
+ clasification=u'education'
+ )
+
+ def test_send_mails_to_staff_when_new_comments(self):
+ context = {
+ 'temporary_data': self.temporary_data,
+ }
+ send_mails_to_staff(context, 'notify_staff_new_proposal_update')
+ self.assertEquals(len(mail.outbox), 1)
+ the_mail = mail.outbox[0]
+ self.assertIn(self.staff1.email, the_mail.to)
+ self.assertIn(self.staff2.email, the_mail.to)
+ self.assertIn(str(self.temporary_data.id), the_mail.body)
+ self.assertIn(self.temporary_data.get_title(), the_mail.body)
+
+ def test_send_mails_to_staff_when_new_proposal(self):
+ context = {
+ 'temporary_data': self.temporary_data,
+ }
+ send_mails_to_staff(context, 'notify_staff_new_proposal')
+ self.assertEquals(len(mail.outbox), 1)
+ the_mail = mail.outbox[0]
+ self.assertIn(self.staff1.email, the_mail.to)
+ self.assertIn(self.staff2.email, the_mail.to)
+ self.assertIn(str(self.proposal.id), the_mail.body)
+ self.assertIn(self.temporary_data.get_title(), the_mail.body)
+ self.assertIn(self.temporary_data.area.name, the_mail.subject)
diff --git a/popular_proposal/tests/wizard_tests.py b/popular_proposal/tests/wizard_tests.py
--- a/popular_proposal/tests/wizard_tests.py
+++ b/popular_proposal/tests/wizard_tests.py
@@ -148,7 +148,14 @@ def test_post_data_to_the_wizard(self):
self.assertEquals(response.context['area'], self.arica)
self.assertEquals(temporary_data.proposer, self.feli)
self.assertEquals(temporary_data.area, self.arica)
- self.assertEquals(len(mail.outbox), original_amount + 1)
+ self.assertEquals(len(mail.outbox), original_amount + 2)
+
+ the_mail = mail.outbox[original_amount + 1]
+ self.assertIn(self.fiera.email, the_mail.to)
+ self.assertIn(self.feli.email, the_mail.to)
+ self.assertIn(str(temporary_data.id), the_mail.body)
+ self.assertIn(temporary_data.get_title(), the_mail.body)
+ self.assertIn(temporary_data.area.name, the_mail.subject)
def test_user_should_accept_terms_and_conditions(self):
list_ = get_form_list()
@@ -194,4 +201,11 @@ def test_full_wizard(self):
self.assertEquals(response.context['area'], self.arica)
self.assertEquals(temporary_data.proposer, self.feli)
self.assertEquals(temporary_data.area, self.arica)
- self.assertEquals(len(mail.outbox), original_amount + 1)
+ self.assertEquals(len(mail.outbox), original_amount + 2)
+
+ the_mail = mail.outbox[original_amount + 1]
+ self.assertIn(self.fiera.email, the_mail.to)
+ self.assertIn(self.feli.email, the_mail.to)
+ self.assertIn(str(temporary_data.id), the_mail.body)
+ self.assertIn(temporary_data.get_title(), the_mail.body)
+ self.assertIn(temporary_data.area.name, the_mail.subject)
\ No newline at end of file
| Que le avise a los staff cuando hay alguien actualiza su propuesta
| 2016-08-25T19:18:50 |
|
ciudadanointeligente/votainteligente-portal-electoral | 552 | ciudadanointeligente__votainteligente-portal-electoral-552 | [
"513"
] | 8257f7387c1582fa61dc20c19fa3462db12eb6f0 | diff --git a/popular_proposal/forms/forms.py b/popular_proposal/forms/forms.py
--- a/popular_proposal/forms/forms.py
+++ b/popular_proposal/forms/forms.py
@@ -180,11 +180,14 @@ def __init__(self, *args, **kwargs):
class Meta:
model = PopularProposal
- fields = ['background', 'image']
+ fields = ['background', 'image', 'contact_details', 'document']
labels = {'background': _(u'Mรกs antecedentes sobre tu propuesta.'),
- 'image': _(u'ยฟTienes alguna imagen para compartir?')
+ 'image': _(u'ยฟTienes alguna imagen para compartir?'),
+ 'document': _(u'ยฟTienes algรบn documento para complementar tu propuesta?'),
+ 'contact_details': _(u'ยฟCรณmo te puede contactar un candidato?')
}
- help_texts = {'background': _(u'Ejemplo: Durante el aรฑo 2011, existiรณ una iniciativa de otra comunidad que no llegรณ a buen puerto.')}
+ help_texts = {'background': _(u'Ejemplo: Durante el aรฑo 2011, existiรณ una iniciativa de otra comunidad que no llegรณ a buen puerto.'),
+ 'contact_details': _(u'Ejemplo: Tu telรฉfono o el lugar donde eres ubicable y en quรฉ horario.')}
class CommentsForm(forms.Form):
@@ -425,4 +428,4 @@ def save(self):
commitment = super(CandidateNotCommitingForm, self).save()
commitment.detail = self.cleaned_data['detail']
commitment.save()
- return commitment
\ No newline at end of file
+ return commitment
diff --git a/popular_proposal/migrations/0013_auto_20160907_1410.py b/popular_proposal/migrations/0013_auto_20160907_1410.py
new file mode 100644
--- /dev/null
+++ b/popular_proposal/migrations/0013_auto_20160907_1410.py
@@ -0,0 +1,37 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.9.2 on 2016-09-07 14:10
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+import django.db.models.manager
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('popular_proposal', '0012_auto_20160906_1622'),
+ ]
+
+ operations = [
+ migrations.AlterModelManagers(
+ name='popularproposal',
+ managers=[
+ ('ordered', django.db.models.manager.Manager()),
+ ],
+ ),
+ migrations.AddField(
+ model_name='popularproposal',
+ name='contact_details',
+ field=models.TextField(blank=True, help_text='\xbfC\xf3mo te puede contactar un candidato?', null=True),
+ ),
+ migrations.AddField(
+ model_name='popularproposal',
+ name='document',
+ field=models.FileField(blank=True, help_text='\xbfTienes alg\xfan documento para complementar tu propuesta?', null=True, upload_to='uploads/proposal/backgrounds/%Y/%m/%d/'),
+ ),
+ migrations.AlterField(
+ model_name='popularproposal',
+ name='background',
+ field=models.TextField(blank=True, help_text='Antecedentes sobre tu propuesta', null=True),
+ ),
+ ]
diff --git a/popular_proposal/models.py b/popular_proposal/models.py
--- a/popular_proposal/models.py
+++ b/popular_proposal/models.py
@@ -1,3 +1,4 @@
+# coding=utf-8
from __future__ import unicode_literals
from django.db import models
@@ -14,6 +15,7 @@
from votainteligente.open_graph import OGPMixin
from elections.models import Candidate
from django.db.models import Count
+from django.utils.translation import ugettext_lazy as _
class NeedingModerationManager(models.Manager):
@@ -150,7 +152,14 @@ class PopularProposal(models.Model, OGPMixin):
organization = models.ForeignKey(Organization,
related_name='popular_proposals',
null=True)
- background = models.TextField(null=True, blank=True)
+ background = models.TextField(null=True, blank=True, help_text=_(u"Antecedentes sobre tu propuesta"))
+ contact_details = models.TextField(null=True,
+ blank=True,
+ help_text=_(u'ยฟCรณmo te puede contactar un candidato?'))
+ document = models.FileField(upload_to='uploads/proposal/backgrounds/%Y/%m/%d/',
+ help_text=_(u'ยฟTienes algรบn documento para complementar tu propuesta?'),
+ null=True,
+ blank=True)
image = models.ImageField(upload_to='proposals/image/',
max_length=512,
null=True,
| diff --git a/elections/tests/fixtures/example.pdf b/elections/tests/fixtures/example.pdf
new file mode 100644
Binary files /dev/null and b/elections/tests/fixtures/example.pdf differ
diff --git a/popular_proposal/tests/form_tests.py b/popular_proposal/tests/form_tests.py
--- a/popular_proposal/tests/form_tests.py
+++ b/popular_proposal/tests/form_tests.py
@@ -259,8 +259,11 @@ def setUp(self):
self.fiera.save()
def test_instanciate_form(self):
- update_data = {'background': u'Esto es un antecedente'}
- file_data = {'image': self.image}
+ update_data = {'background': u'Esto es un antecedente',
+ 'contact_details': u'Me puedes contactar en el telรฉfono 123456',
+ }
+ file_data = {'image': self.image,
+ 'document': self.get_document()}
form = UpdateProposalForm(data=update_data,
files=file_data,
instance=self.popular_proposal)
@@ -268,6 +271,8 @@ def test_instanciate_form(self):
proposal = form.save()
self.assertEquals(proposal.background, update_data['background'])
self.assertTrue(proposal.image)
+ self.assertTrue(proposal.document)
+ self.assertEquals(proposal.contact_details, update_data['contact_details'])
def test_get_update_view(self):
url = reverse('popular_proposals:citizen_update', kwargs={'slug': self.popular_proposal.slug})
@@ -285,7 +290,10 @@ def test_get_update_view(self):
def test_post_update_view(self):
url = reverse('popular_proposals:citizen_update', kwargs={'slug': self.popular_proposal.slug})
- kwargs = {'data': {'background': u'Esto es un antecedente'}, 'files': {'image': self.image}}
+ kwargs = {'data': {'background': u'Esto es un antecedente',
+ 'contact_details': u'Me puedes contactar en el telรฉfono 123456'},
+ 'files': {'image': self.image,
+ 'document': self.get_document()}}
self.client.login(username=self.fiera.username, password='feroz')
response = self.client.post(url, **kwargs)
detail_url = reverse('popular_proposals:detail', kwargs={'slug': self.popular_proposal.slug})
diff --git a/popular_proposal/tests/new_proposal_cycle_tests.py b/popular_proposal/tests/new_proposal_cycle_tests.py
--- a/popular_proposal/tests/new_proposal_cycle_tests.py
+++ b/popular_proposal/tests/new_proposal_cycle_tests.py
@@ -153,6 +153,8 @@ def test_instantiate_one(self):
self.assertIn(popular_proposal, self.arica.proposals.all())
self.assertIsNone(popular_proposal.temporary)
self.assertFalse(popular_proposal.background)
+ self.assertFalse(popular_proposal.contact_details)
+ self.assertFalse(popular_proposal.document)
self.assertFalse(popular_proposal.image)
self.assertEquals(popular_proposal.clasification, u'education')
self.assertFalse(popular_proposal.for_all_areas)
| Ampliar antecedentes de la propuesta
Ademรกs agregar:
- Documento
- Cรณmo te puede contactar un candidato
| 2016-09-07T14:41:24 |
|
ciudadanointeligente/votainteligente-portal-electoral | 573 | ciudadanointeligente__votainteligente-portal-electoral-573 | [
"571"
] | 096fe8876315f4dbdd4443aba67f7230ca1fd240 | diff --git a/backend_candidate/views.py b/backend_candidate/views.py
--- a/backend_candidate/views.py
+++ b/backend_candidate/views.py
@@ -38,7 +38,7 @@ def dispatch(self, request, *args, **kwargs):
**kwargs)
-class HomeView(BackendCandidateBase, TemplateView):
+class HomeView(BackendCandidateBase, RedirectView):
template_name = "backend_candidate/home.html"
def get_context_data(self, *args, **kwargs):
@@ -46,6 +46,13 @@ def get_context_data(self, *args, **kwargs):
context['candidacies'] = self.user.candidacies.all()
return context
+ def get_redirect_url(self, *args, **kwargs):
+ candidacy = self.user.candidacies.first()
+ profile_url = reverse('backend_candidate:complete_profile',
+ kwargs={'slug': candidacy.candidate.election.slug,
+ 'candidate_id': candidacy.candidate.id})
+ return profile_url
+
class CompleteMediaNaranjaView(FormView):
template_name = 'backend_candidate/complete_12_naranja.html'
@@ -106,7 +113,11 @@ def get_redirect_url(self, *args, **kwargs):
self.contact.candidacy = candidacy
self.contact.used_by_candidate = True
self.contact.save()
- return reverse('backend_candidate:home')
+ candidacy = self.request.user.candidacies.first()
+ profile_url = reverse('backend_candidate:complete_profile',
+ kwargs={'slug': candidacy.candidate.election.slug,
+ 'candidate_id': candidacy.candidate.id})
+ return profile_url
form_class = get_candidate_profile_form_class()
| diff --git a/backend_candidate/tests/candidacy_tests.py b/backend_candidate/tests/candidacy_tests.py
--- a/backend_candidate/tests/candidacy_tests.py
+++ b/backend_candidate/tests/candidacy_tests.py
@@ -70,6 +70,7 @@ def test_instanciate_candidacy(self):
self.assertTrue(candidacy.created)
self.assertTrue(candidacy.updated)
+
def test_user_has_candidacy(self):
self.assertFalse(is_candidate(self.feli))
candidacy = Candidacy.objects.create(user=self.feli,
@@ -92,12 +93,15 @@ def test_get_candidate_home(self):
password='alvarez')
response = self.client.get(url)
self.assertEquals(response.status_code, 404)
- candidacy = Candidacy.objects.create(user=self.feli,
- candidate=self.candidate
- )
+ Candidacy.objects.create(user=self.feli,
+ candidate=self.candidate
+ )
+
response = self.client.get(url)
- self.assertEquals(response.status_code, 200)
- self.assertIn(candidacy, response.context['candidacies'])
+ profile_url = reverse('backend_candidate:complete_profile',
+ kwargs={'slug': self.candidate.election.slug,
+ 'candidate_id': self.candidate.id})
+ self.assertRedirects(response, profile_url)
def test_proposals_with_a_resolution(self):
@@ -244,12 +248,14 @@ def test_candidacy_redirect_view(self):
self.assertRedirects(response, login_url)
self.client.login(username=self.feli.username, password='alvarez')
response = self.client.get(url)
- candidate_home = reverse('backend_candidate:home')
- self.assertRedirects(response, candidate_home)
- self.assertTrue(Candidacy.objects.filter(candidate=self.candidate,
- user=self.feli))
candidacy = Candidacy.objects.get(candidate=self.candidate,
user=self.feli)
+ profile_url = reverse('backend_candidate:complete_profile',
+ kwargs={'slug': candidacy.candidate.election.slug,
+ 'candidate_id': candidacy.candidate.id})
+ self.assertRedirects(response, profile_url)
+ self.assertTrue(Candidacy.objects.filter(candidate=self.candidate,
+ user=self.feli))
contact = CandidacyContact.objects.get(id=contact.id)
self.assertEquals(contact.candidacy, candidacy)
@@ -364,7 +370,10 @@ def test_login_candidate_marks_her_him_as_contacted(self):
self.assertRedirects(response, change_password_url)
response = self.client.get(home_url)
- self.assertEquals(response.status_code, 200)
+ profile_url = reverse('backend_candidate:complete_profile',
+ kwargs={'slug': contact.candidacy.candidate.election.slug,
+ 'candidate_id': contact.candidacy.candidate.id})
+ self.assertRedirects(response, profile_url)
@override_settings(MAX_AMOUNT_OF_MAILS_TO_CANDIDATE=3)
def test_send_candidate_maximum_amount_of_times(self):
| Backend candidato: hay una pregunta que tiene una alternativa en blanco.

| 2016-09-09T21:36:37 |
|
ciudadanointeligente/votainteligente-portal-electoral | 697 | ciudadanointeligente__votainteligente-portal-electoral-697 | [
"679"
] | dfd4434a5c8a34940504e6506f70a79eecdc1c40 | diff --git a/popular_proposal/models.py b/popular_proposal/models.py
--- a/popular_proposal/models.py
+++ b/popular_proposal/models.py
@@ -196,12 +196,6 @@ def __str__(self):
def get_absolute_url(self):
return reverse('popular_proposals:detail', kwargs={'slug': self.slug})
- def save(self, *args, **kwargs):
- creating = self.pk is None
- super(PopularProposal, self).save(*args, **kwargs)
- if self.pk is not None and creating:
- self.notify_candidates_of_new()
-
def notify_candidates_of_new(self):
if not (settings.NOTIFY_CANDIDATES and settings.NOTIFY_CANDIDATES_OF_NEW_PROPOSAL):
return
| diff --git a/popular_proposal/tests/subscription_tests.py b/popular_proposal/tests/subscription_tests.py
--- a/popular_proposal/tests/subscription_tests.py
+++ b/popular_proposal/tests/subscription_tests.py
@@ -243,14 +243,14 @@ def test_like_a_proposal_signal(self):
self.assertIn(self.candidate2.name, the_mail.body)
self.assertIn(str(2), the_mail.body)
- def test_new_proposal_notification_for_candidates(self):
+ def test_proposal_notification_for_candidates(self):
previous_amount = len(mail.outbox)
proposal = PopularProposal.objects.create(proposer=self.fiera,
area=self.arica,
data=self.data,
title=u'This is a title'
)
-
+ proposal.notify_candidates_of_new()
self.assertEquals(len(mail.outbox), previous_amount + 2)
first_mail = mail.outbox[previous_amount]
self.assertEquals(len(first_mail.to), 1)
@@ -282,6 +282,7 @@ def test_new_proposal_notification_with_login_info(self):
data=self.data,
title=u'This is a title'
)
+ proposal.notify_candidates_of_new()
first_mail = mail.outbox[previous_amount]
contact = self.candidate.contacts.all().first()
self.assertIn(self.feli.username, first_mail.body)
@@ -291,8 +292,6 @@ def test_new_proposal_notification_with_media_naranja(self):
self.feli.last_login = timezone.now()
self.feli.save()
self.candidate.taken_positions.all().delete()
- for e in self.candidate.elections.all():
- print e.id
candidacy = Candidacy.objects.create(user=self.feli,
candidate=self.candidate
)
@@ -306,6 +305,7 @@ def test_new_proposal_notification_with_media_naranja(self):
data=self.data,
title=u'This is a title'
)
+ proposal.notify_candidates_of_new()
first_mail = mail.outbox[previous_amount]
contact = self.candidate.contacts.all().first()
self.assertNotIn(self.feli.username, first_mail.body)
| [Propuesta] Al momento de ser publicada no se envรญรก automรกticamente a los candidatos.
| 2017-06-01T19:24:35 |
|
ciudadanointeligente/votainteligente-portal-electoral | 765 | ciudadanointeligente__votainteligente-portal-electoral-765 | [
"760"
] | ff9c82514db6ca133f51f142c146ccb3df9977d4 | diff --git a/popular_proposal/filters.py b/popular_proposal/filters.py
--- a/popular_proposal/filters.py
+++ b/popular_proposal/filters.py
@@ -24,6 +24,8 @@ class TextSearchForm(Form):
label=u"Ordenar por",
choices=[('', u'Por apoyos'),
('-created', u'รltimas primero'),
+ ('-proposer__profile__is_organization', u'De organizaciones primero'),
+ ('-is_local_meeting', u'Encuentros locales primero'),
])
def full_clean(self):
| Ordernar Propuestas
Por:
- [x] รบltimas creadas
- [x] Creadas por organizaciรณn
- [x] Con mรกs orazones.
Y por *defecto* puede ser:
- Random
- Por corazones, encuentro local, es organizaciรณn.
| 2017-07-19T14:23:46 |
||
ciudadanointeligente/votainteligente-portal-electoral | 781 | ciudadanointeligente__votainteligente-portal-electoral-781 | [
"759"
] | 624266f492c2c33864f291cc50479b22384da885 | diff --git a/popular_proposal/admin.py b/popular_proposal/admin.py
--- a/popular_proposal/admin.py
+++ b/popular_proposal/admin.py
@@ -10,10 +10,13 @@ class PopularProposalAdmin(admin.ModelAdmin):
'area',
'title',
'data',
- 'proposer'
+ 'proposer',
)
exclude = ('organization',)
+ def get_queryset(self, *args, **kwargs):
+ return PopularProposal.all_objects.all()
+
fieldset = []
for key in ProposalTemporaryDataModelForm.base_fields:
diff --git a/popular_proposal/migrations/0024_auto_20170720_2052.py b/popular_proposal/migrations/0024_auto_20170720_2052.py
new file mode 100644
--- /dev/null
+++ b/popular_proposal/migrations/0024_auto_20170720_2052.py
@@ -0,0 +1,31 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.10.7 on 2017-07-20 20:52
+from __future__ import unicode_literals
+
+from django.conf import settings
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('popular_proposal', '0023_auto_20170711_1805'),
+ ]
+
+ operations = [
+ migrations.AlterModelOptions(
+ name='popularproposal',
+ options={'ordering': ['for_all_areas', '-created'], 'verbose_name': 'Propuesta Ciudadana', 'verbose_name_plural': 'Propuestas Ciudadanas'},
+ ),
+ migrations.AddField(
+ model_name='popularproposal',
+ name='is_reported',
+ field=models.BooleanField(default=False),
+ ),
+ migrations.AlterField(
+ model_name='proposallike',
+ name='user',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='likes', to=settings.AUTH_USER_MODEL),
+ ),
+ ]
diff --git a/popular_proposal/models.py b/popular_proposal/models.py
--- a/popular_proposal/models.py
+++ b/popular_proposal/models.py
@@ -79,6 +79,7 @@ class Statuses(DjangoChoices):
needing_moderation = NeedingModerationManager()
objects = models.Manager()
+
def save(self, *args, **kwargs):
if not self.comments:
self.comments = {}
@@ -146,20 +147,24 @@ def get_title(self):
def __str__(self):
return self.get_title()
+class ProposalsManager(models.Manager):
+ def get_queryset(self):
+ qs = super(ProposalsManager, self).get_queryset()
+ qs = qs.exclude(is_reported=True)
+ return qs
class ProposalQuerySet(models.QuerySet):
def by_likers(self, *args, **kwargs):
return self.order_by('-num_likers', 'proposer__profile__is_organization')
-class ProposalsOrderedManager(models.Manager):
+class ProposalsOrderedManager(ProposalsManager):
def get_queryset(self):
- qs = ProposalQuerySet(self.model, using=self._db)
+ qs = ProposalQuerySet(self.model, using=self._db).exclude(is_reported=True)
qs = qs.annotate(num_likers=Count('likers'))
return qs
-
@python_2_unicode_compatible
class PopularProposal(models.Model, OGPMixin):
title = models.CharField(max_length=255, default='')
@@ -198,11 +203,13 @@ class PopularProposal(models.Model, OGPMixin):
null=True,
blank=True)
is_local_meeting = models.BooleanField(default=False)
+ is_reported = models.BooleanField(default=False)
ogp_enabled = True
ordered = ProposalsOrderedManager.from_queryset(ProposalQuerySet)()
- objects = models.Manager()
+ objects = ProposalsManager()
+ all_objects = models.Manager()
class Meta:
ordering = ['for_all_areas', '-created']
| diff --git a/popular_proposal/tests/popular_proposal_model_tests.py b/popular_proposal/tests/popular_proposal_model_tests.py
--- a/popular_proposal/tests/popular_proposal_model_tests.py
+++ b/popular_proposal/tests/popular_proposal_model_tests.py
@@ -40,6 +40,7 @@ def test_instantiate_one(self):
self.assertEquals(popular_proposal.clasification, u'education')
self.assertFalse(popular_proposal.for_all_areas)
self.assertFalse(popular_proposal.is_local_meeting)
+ self.assertFalse(popular_proposal.is_reported)
def test_popular_proposal_card_as_property(self):
popular_proposal = PopularProposal.objects.create(proposer=self.fiera,
@@ -54,6 +55,29 @@ def test_popular_proposal_card_as_property(self):
self.assertEquals(popular_proposal.card, expected_card_html)
+ def test_reportedproposals_are_not_in_default_manager(self):
+ p1 = PopularProposal.objects.create(proposer=self.fiera,
+ area=self.arica,
+ data=self.data,
+ title=u'This is a title',
+ clasification=u'education'
+ )
+ p2 = PopularProposal.objects.create(proposer=self.fiera,
+ area=self.arica,
+ data=self.data,
+ title=u'This is a title',
+ clasification=u'education',
+ is_reported=True
+ )
+ self.assertIn(p1, PopularProposal.objects.all())
+ self.assertNotIn(p2, PopularProposal.objects.all())
+ # now ordered
+ self.assertIn(p1, PopularProposal.ordered.all())
+ self.assertNotIn(p2, PopularProposal.ordered.all())
+ #but they appear in the all Manager
+ self.assertIn(p1, PopularProposal.all_objects.all())
+ self.assertIn(p2, PopularProposal.all_objects.all())
+
def test_proposal_ogp(self):
site = Site.objects.get_current()
popular_proposal = PopularProposal.objects.create(proposer=self.fiera,
| Reportar una propuesta
Se pueden reportar por :
- [ ] Faltas a los T&C
- [ ] Repetida
| 2017-07-20T21:34:08 |
|
ciudadanointeligente/votainteligente-portal-electoral | 846 | ciudadanointeligente__votainteligente-portal-electoral-846 | [
"841"
] | 1e18ba81cc1bb118b6283e952314dc2ee0b1cb76 | diff --git a/votita/forms/forms.py b/votita/forms/forms.py
--- a/votita/forms/forms.py
+++ b/votita/forms/forms.py
@@ -29,10 +29,11 @@ def __init__(self, *args, **kwargs):
self.proposer = kwargs.pop('proposer')
super(CreateGatheringForm, self).__init__(*args, **kwargs)
- def save(self):
+ def save(self, commit=True):
instance = super(CreateGatheringForm, self).save(commit=False)
instance.proposer = self.proposer
instance.save()
+ self.save_m2m()
return instance
diff --git a/votita/migrations/0003_auto_20170811_1542.py b/votita/migrations/0003_auto_20170811_1542.py
new file mode 100644
--- /dev/null
+++ b/votita/migrations/0003_auto_20170811_1542.py
@@ -0,0 +1,26 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.10.7 on 2017-08-11 15:42
+from __future__ import unicode_literals
+
+from django.db import migrations
+import taggit.managers
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('taggit', '0002_auto_20150616_2121'),
+ ('votita', '0002_kidsgathering_generated_at'),
+ ]
+
+ operations = [
+ migrations.RemoveField(
+ model_name='kidsgathering',
+ name='presidents_features',
+ ),
+ migrations.AddField(
+ model_name='kidsgathering',
+ name='presidents_features',
+ field=taggit.managers.TaggableManager(help_text='A comma-separated list of tags.', through='taggit.TaggedItem', to='taggit.Tag', verbose_name='Caracter\xedsticas de un buen presidente'),
+ ),
+ ]
diff --git a/votita/models.py b/votita/models.py
--- a/votita/models.py
+++ b/votita/models.py
@@ -7,6 +7,7 @@
from multiselectfield import MultiSelectField
from django.contrib.auth.models import User
from elections.models import Area
+from taggit.managers import TaggableManager
PRESIDENTS_FEATURES = (
@@ -54,11 +55,7 @@ class KidsGathering(models.Model):
name = models.CharField(max_length=512,
verbose_name="Nombre del encuentro")
stats_data = PickledObjectField()
- presidents_features = MultiSelectField(choices=PRESIDENTS_FEATURES,
- null=True,
- max_choices=10,
- max_length=512,
- verbose_name="Caracterรญsticas de un buen presidente")
+ presidents_features = TaggableManager(verbose_name="Caracterรญsticas de un buen presidente")
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now_add=True)
image = models.ImageField(upload_to='votita/images/',
| diff --git a/votita/tests/forms_tests.py b/votita/tests/forms_tests.py
--- a/votita/tests/forms_tests.py
+++ b/votita/tests/forms_tests.py
@@ -27,13 +27,13 @@ def setUp(self):
def test_create_a_gathering(self):
data = {"name": "Segundo medio C",
- "presidents_features": ["inteligente"],
+ "presidents_features": "inteligente,honesto",
"generated_at": self.a_comuna.id}
form = CreateGatheringForm(data, proposer=self.feli)
self.assertTrue(form.is_valid())
gathering = form.save()
self.assertEquals(gathering.name, data['name'])
- self.assertTrue(gathering.presidents_features)
+ self.assertTrue(gathering.presidents_features.all())
self.assertEquals(gathering.generated_at, self.a_comuna)
def test_update_gathering(self):
| Los presidents_features es un campo de tags
| 2017-08-11T23:00:40 |
|
ciudadanointeligente/votainteligente-portal-electoral | 847 | ciudadanointeligente__votainteligente-portal-electoral-847 | [
"842"
] | e904e1068393eb1b99c5fb4ead8f0ce2bcb8cc85 | diff --git a/votita/forms/forms.py b/votita/forms/forms.py
--- a/votita/forms/forms.py
+++ b/votita/forms/forms.py
@@ -50,7 +50,7 @@ class UpdateGatheringForm(ModelForm):
initial=0)
class Meta:
model = KidsGathering
- fields = ['image']
+ fields = ['image', 'comments']
def clean(self):
cleaned_data = super(UpdateGatheringForm, self).clean()
diff --git a/votita/migrations/0004_kidsgathering_comments.py b/votita/migrations/0004_kidsgathering_comments.py
new file mode 100644
--- /dev/null
+++ b/votita/migrations/0004_kidsgathering_comments.py
@@ -0,0 +1,20 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.10.7 on 2017-08-16 15:53
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('votita', '0003_auto_20170811_1542'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='kidsgathering',
+ name='comments',
+ field=models.TextField(blank=True, default='', help_text='\xbfC\xf3mo podemos empoderar mejor a nuestros ni\xf1os?', verbose_name='\xbfTienes alg\xfan comentario sobre la actividad?'),
+ ),
+ ]
diff --git a/votita/models.py b/votita/models.py
--- a/votita/models.py
+++ b/votita/models.py
@@ -66,6 +66,10 @@ class KidsGathering(models.Model):
generated_at = models.ForeignKey(Area,
null=True,
blank=True)
+ comments = models.TextField(verbose_name=u"ยฟTienes algรบn comentario sobre la actividad?",
+ help_text=u"ยฟCรณmo podemos empoderar mejor a nuestros niรฑos?",
+ blank=True,
+ default=u"")
@property
def presidents_features_str(self):
| diff --git a/votita/tests/forms_tests.py b/votita/tests/forms_tests.py
--- a/votita/tests/forms_tests.py
+++ b/votita/tests/forms_tests.py
@@ -45,7 +45,8 @@ def test_update_gathering(self):
data = {
'male': 10,
'female': 10,
- 'others': 10
+ 'others': 10,
+ 'comments': "Muy buena actividad, esto es lindo",
}
file_data = {'image': photo}
form = UpdateGatheringForm(data=data,
@@ -56,3 +57,4 @@ def test_update_gathering(self):
g = KidsGathering.objects.get(id=g.id)
self.assertTrue(g.stats_data)
self.assertTrue(g.image)
+ self.assertTrue(g.comments)
diff --git a/votita/tests/model_tests.py b/votita/tests/model_tests.py
--- a/votita/tests/model_tests.py
+++ b/votita/tests/model_tests.py
@@ -84,10 +84,12 @@ def test_gathering_can_have_area(self):
generated_at=a_comuna,
stats_data=stats_data,
presidents_features=['ingeligente',
- 'honesto'])
+ 'honesto'],
+ comments="This is a comment")
self.assertTrue(gathering.created)
self.assertTrue(gathering.updated)
self.assertEquals(gathering.generated_at, a_comuna)
+ self.assertTrue(gathering.comments)
def test_a_proposal_is_related_to_it(self):
stats_data = {
| Abrirle un campo de texto al final para que nos cuente sobre su experiencia.
En la parte final (donde suben la foto) deberรญa decir algo asรญ como: "Quรฉ te pareciรณ esta actividad? Crees que los niรฑos se llevaron algo para su casa? Cรณmo podrรญamos mejorar?".
| 2017-08-16T18:36:10 |
|
ciudadanointeligente/votainteligente-portal-electoral | 858 | ciudadanointeligente__votainteligente-portal-electoral-858 | [
"855"
] | 7e258a673960b73309a54bf1bef3957965df55e8 | diff --git a/popular_proposal/migrations/0026_popularproposal_featured.py b/popular_proposal/migrations/0026_popularproposal_featured.py
new file mode 100644
--- /dev/null
+++ b/popular_proposal/migrations/0026_popularproposal_featured.py
@@ -0,0 +1,20 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.10.7 on 2017-08-18 18:31
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('popular_proposal', '0025_popularproposal_content_type'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='popularproposal',
+ name='featured',
+ field=models.BooleanField(default=False),
+ ),
+ ]
diff --git a/popular_proposal/models.py b/popular_proposal/models.py
--- a/popular_proposal/models.py
+++ b/popular_proposal/models.py
@@ -211,6 +211,7 @@ class PopularProposal(models.Model, OGPMixin):
is_reported = models.BooleanField(default=False)
content_type = models.ForeignKey(ContentType, null=True)
+ featured = models.BooleanField(default=False)
ogp_enabled = True
@@ -219,7 +220,7 @@ class PopularProposal(models.Model, OGPMixin):
all_objects = models.Manager()
class Meta:
- ordering = ['for_all_areas', '-created']
+ ordering = ['-featured' ,'for_all_areas', '-created']
verbose_name = _(u'Propuesta Ciudadana')
verbose_name_plural = _(u'Propuestas Ciudadanas')
| diff --git a/popular_proposal/tests/popular_proposal_model_tests.py b/popular_proposal/tests/popular_proposal_model_tests.py
--- a/popular_proposal/tests/popular_proposal_model_tests.py
+++ b/popular_proposal/tests/popular_proposal_model_tests.py
@@ -42,6 +42,7 @@ def test_instantiate_one(self):
self.assertFalse(popular_proposal.for_all_areas)
self.assertFalse(popular_proposal.is_local_meeting)
self.assertFalse(popular_proposal.is_reported)
+ self.assertFalse(popular_proposal.featured)
content_type = popular_proposal.content_type
expected_content_type = ContentType.objects.get_for_model(PopularProposal)
self.assertEquals(content_type, expected_content_type)
@@ -82,6 +83,29 @@ def test_reportedproposals_are_not_in_default_manager(self):
self.assertIn(p1, PopularProposal.all_objects.all())
self.assertIn(p2, PopularProposal.all_objects.all())
+ def test_featured_proposals_are_first(self):
+ p1 = PopularProposal.objects.create(proposer=self.fiera,
+ area=self.arica,
+ data=self.data,
+ title=u'This is a title1',
+ clasification=u'education'
+ )
+ p2 = PopularProposal.objects.create(proposer=self.fiera,
+ area=self.arica,
+ data=self.data,
+ title=u'This is a title2',
+ clasification=u'education',
+ featured=True
+ )
+ p3 = PopularProposal.objects.create(proposer=self.fiera,
+ area=self.arica,
+ data=self.data,
+ title=u'This is a title3',
+ clasification=u'education'
+ )
+ proposals = PopularProposal.objects.all()
+ self.assertEquals(p2, proposals.first())
+
def test_proposal_ogp(self):
site = Site.objects.get_current()
popular_proposal = PopularProposal.objects.create(proposer=self.fiera,
| Agregar un campo extra a las propuestas ciudadanas llamada algo asรญ como ```featured``` y que sea un booleano.
Que las propuestas se ordenen con este campo primero y luego con lo que siga.
| 2017-08-21T20:38:00 |
|
opsdroid/opsdroid | 12 | opsdroid__opsdroid-12 | [
"6"
] | c268280fd99b569183c60f331694ad1cdae63524 | diff --git a/opsdroid/core.py b/opsdroid/core.py
--- a/opsdroid/core.py
+++ b/opsdroid/core.py
@@ -3,6 +3,7 @@
import logging
import sys
import weakref
+from multiprocessing import Process
from opsdroid.helper import match
from opsdroid.memory import Memory
@@ -17,6 +18,7 @@ def __init__(self):
self.bot_name = 'opsdroid'
self.sys_status = 0
self.connectors = []
+ self.connector_jobs = []
self.skills = []
self.memory = Memory()
logging.info("Created main opsdroid object")
@@ -56,7 +58,11 @@ def start_connectors(self, connectors):
connector_module["config"]["bot-name"] = self.bot_name
connector = cls(connector_module["config"])
self.connectors.append(connector)
- connector.connect(self)
+ job = Process(target=connector.connect, args=(self,))
+ job.start()
+ self.connector_jobs.append(job)
+ for job in self.connector_jobs:
+ job.join()
def start_databases(self, databases):
"""Start the databases."""
| diff --git a/tests/test_core.py b/tests/test_core.py
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -69,8 +69,7 @@ def test_start_connectors(self):
"tests.mockmodules.connectors.connector")
opsdroid.start_connectors([module])
self.assertEqual(len(opsdroid.connectors), 1)
- self.assertEqual(
- len(opsdroid.connectors[0].connect.mock_calls), 1)
+ self.assertEqual(len(opsdroid.connector_jobs), 1)
def test_multiple_opsdroids(self):
with OpsDroid() as opsdroid:
| Connectors should fork
When a connector is started it should fork into its own process. This is because connectors block to accept messages from their source.
This requires #5 to enable persistent memory between connector processes.
| 2016-08-03T21:18:24 |
|
opsdroid/opsdroid | 13 | opsdroid__opsdroid-13 | [
"7"
] | 8ee3d206bba28e15764dbdd8ddcb34f78be01c31 | diff --git a/opsdroid/loader.py b/opsdroid/loader.py
--- a/opsdroid/loader.py
+++ b/opsdroid/loader.py
@@ -5,7 +5,6 @@
import shutil
import subprocess
import importlib
-import pip
import yaml
from opsdroid.const import (
DEFAULT_GIT_URL, MODULES_DIRECTORY, DEFAULT_MODULE_BRANCH)
@@ -51,6 +50,19 @@ def git_clone(git_url, install_path, branch):
process.wait()
+def pip_install_deps(requirements_path):
+ """Pip install a requirements.txt file and wait for finish."""
+ process = subprocess.Popen(["pip", "install", "-r", requirements_path],
+ shell=False,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ for output in process.communicate():
+ if output != "":
+ for line in output.splitlines():
+ logging.debug(str(line).strip())
+ process.wait()
+
+
class Loader:
"""Class to load in config and modules."""
@@ -174,5 +186,4 @@ def _install_module(self, config):
# Install module dependancies
if os.path.isfile(config["install_path"] + "/requirements.txt"):
- pip.main(["install", "-r", config["install_path"] +
- "/requirements.txt"])
+ pip_install_deps(config["install_path"] + "/requirements.txt")
| diff --git a/tests/test_loader.py b/tests/test_loader.py
--- a/tests/test_loader.py
+++ b/tests/test_loader.py
@@ -20,6 +20,9 @@ def setup(self):
loader = ld.Loader(opsdroid)
return opsdroid, loader
+ def reset_subprocess_mocks(self):
+ sys.modules['subprocess'].mock_calls = []
+
def test_load_config_file(self):
opsdroid, loader = self.setup()
config = loader.load_config_file("tests/configs/minimal.yaml")
@@ -43,10 +46,16 @@ def test_setup_modules(self):
self.assertEqual(len(example_modules[0]["module"].mock_calls), 1)
def test_git_clone(self):
+ self.reset_subprocess_mocks()
ld.git_clone("https://github.com/rmccue/test-repository.git",
"/tmp/test", "master")
self.assertNotEqual(len(sys.modules['subprocess'].mock_calls), 0)
+ def test_pip_install_deps(self):
+ self.reset_subprocess_mocks()
+ ld.pip_install_deps("/path/to/some/file.txt")
+ self.assertNotEqual(len(sys.modules['subprocess'].mock_calls), 0)
+
def test_build_module_path(self):
config = {}
config["type"] = "test"
| Manage logging properly
When a function calls `subprocess.Popen()` the logging seems to reset to default and print to `stdout` and `stderror`.
This is probably because logging hasn't been configured properly. The `opsdroid` object should probably handle this as it is accessible almost everywhere.
| 2016-08-04T07:20:49 |
|
opsdroid/opsdroid | 22 | opsdroid__opsdroid-22 | [
"21"
] | a83fe0672936bc0f23bf824b1ffcb0135795f453 | diff --git a/opsdroid/__main__.py b/opsdroid/__main__.py
--- a/opsdroid/__main__.py
+++ b/opsdroid/__main__.py
@@ -15,7 +15,11 @@ def main():
logging.info("Stated application")
with OpsDroid() as opsdroid:
loader = Loader(opsdroid)
- opsdroid.config = loader.load_config_file("./configuration.yaml")
+ opsdroid.config = loader.load_config_file([
+ "./configuration.yaml",
+ "~/.opsdroid/configuration.yaml",
+ "/etc/opsdroid/configuration.yaml"
+ ])
if "logging" in opsdroid.config:
set_logging_level(opsdroid.config['logging'])
loader.load_config(opsdroid.config)
diff --git a/opsdroid/loader.py b/opsdroid/loader.py
--- a/opsdroid/loader.py
+++ b/opsdroid/loader.py
@@ -71,11 +71,19 @@ def __init__(self, opsdroid):
self.opsdroid = opsdroid
logging.debug("Loaded loader")
- def load_config_file(self, config_path):
+ def load_config_file(self, config_paths):
"""Load a yaml config file from path."""
- if not os.path.isfile(config_path):
- self.opsdroid.critical("Config file " + config_path +
- " not found", 1)
+ config_path = ""
+ for possible_path in config_paths:
+ if not os.path.isfile(possible_path):
+ logging.warning("Config file " + possible_path +
+ " not found", 1)
+ else:
+ config_path = possible_path
+ break
+
+ if not config_path:
+ self.opsdroid.critical("No configuration files found", 1)
try:
with open(config_path, 'r') as stream:
| diff --git a/tests/test_loader.py b/tests/test_loader.py
--- a/tests/test_loader.py
+++ b/tests/test_loader.py
@@ -25,17 +25,17 @@ def reset_subprocess_mocks(self):
def test_load_config_file(self):
opsdroid, loader = self.setup()
- config = loader.load_config_file("tests/configs/minimal.yaml")
+ config = loader.load_config_file(["tests/configs/minimal.yaml"])
self.assertIsNotNone(config)
def test_load_non_existant_config_file(self):
opsdroid, loader = self.setup()
- loader.load_config_file("file_which_does_not_exist")
+ loader.load_config_file(["file_which_does_not_exist"])
self.assertEqual(len(opsdroid.mock_calls), 2)
def test_load_broken_config_file(self):
opsdroid, loader = self.setup()
- loader.load_config_file("tests/configs/broken.yaml")
+ loader.load_config_file(["tests/configs/broken.yaml"])
self.assertRaises(yaml.YAMLError)
def test_setup_modules(self):
| Config locations
Currently opsdroid looks for the `configuration.yaml` file in the current working directory. It should also look in `~/.opsdroid/configuration.yaml` and `/etc/opsdroid/configuration.yaml`.
| 2016-08-07T21:31:45 |
|
opsdroid/opsdroid | 25 | opsdroid__opsdroid-25 | [
"24"
] | 9e6ca3e6c4ff1f41316a258ad181d27dfbd290cc | diff --git a/opsdroid/loader.py b/opsdroid/loader.py
--- a/opsdroid/loader.py
+++ b/opsdroid/loader.py
@@ -157,7 +157,10 @@ def _load_modules(self, modules_type, modules):
def _setup_modules(self, modules):
"""Call the setup function on the passed in modules."""
for module in modules:
- module["module"].setup(self.opsdroid)
+ try:
+ module["module"].setup(self.opsdroid)
+ except AttributeError:
+ pass
def _install_module(self, config):
# pylint: disable=R0201
| diff --git a/tests/test_loader.py b/tests/test_loader.py
--- a/tests/test_loader.py
+++ b/tests/test_loader.py
@@ -42,6 +42,7 @@ def test_setup_modules(self):
opsdroid, loader = self.setup()
example_modules = []
example_modules.append({"module": mock.MagicMock()})
+ example_modules.append({"module": {"name": "test"}})
loader._setup_modules(example_modules)
self.assertEqual(len(example_modules[0]["module"].mock_calls), 1)
| Skill setup should be optional
You might not want to have a setup function in your skill.
| 2016-08-09T15:04:13 |
|
opsdroid/opsdroid | 28 | opsdroid__opsdroid-28 | [
"27"
] | 8a83182211e98de45a5e6c54b3d996167823a810 | diff --git a/opsdroid/helper.py b/opsdroid/helper.py
--- a/opsdroid/helper.py
+++ b/opsdroid/helper.py
@@ -27,4 +27,4 @@ def set_logging_level(logging_level):
def match(regex, message):
"""Regex match a string."""
- return re.match(regex, message, re.M | re.I)
+ return re.match(regex, message)
| diff --git a/tests/test_helper.py b/tests/test_helper.py
--- a/tests/test_helper.py
+++ b/tests/test_helper.py
@@ -33,3 +33,11 @@ def test_match(self):
match = helper.match(r"hello (.*)", "hello world")
self.assertEqual(match.group(1), "world")
+
+ def test_sensitive_match(self):
+ """Matches should be case sensitive"""
+ match = helper.match(r"hello", "hello")
+ self.assertTrue(match)
+
+ match = helper.match(r"hello", "HELLO")
+ self.assertFalse(match)
| Regex case sensitive
The regex match is currently case insensitive. It shouldn't be.
https://github.com/opsdroid/opsdroid/blob/master/opsdroid/helper.py#L30
| 2016-08-12T13:02:47 |
|
opsdroid/opsdroid | 30 | opsdroid__opsdroid-30 | [
"29"
] | 5916a400a6ad8e2229950c45d71aa5aead9d6045 | diff --git a/opsdroid/message.py b/opsdroid/message.py
--- a/opsdroid/message.py
+++ b/opsdroid/message.py
@@ -1,5 +1,7 @@
"""Class to encapsulate a message."""
+from copy import copy
+
class Message:
# pylint: disable=too-few-public-methods
@@ -15,5 +17,6 @@ def __init__(self, text, user, room, connector):
def respond(self, text):
"""Respond to this message using the connector it was created by."""
- self.text = text
- self.connector.respond(self)
+ response = copy(self)
+ response.text = text
+ self.connector.respond(response)
| diff --git a/tests/test_message.py b/tests/test_message.py
--- a/tests/test_message.py
+++ b/tests/test_message.py
@@ -19,4 +19,11 @@ def test_message(self):
message.respond("Goodbye world")
self.assertEqual(len(mock_connector.mock_calls), 1)
- self.assertEqual(message.text, "Goodbye world")
+
+ def test_response_effects(self):
+ """Responding to a message shouldn't change the message."""
+ mock_connector = mock.MagicMock()
+ message_text = "Hello world"
+ message = Message(message_text, "user", "default", mock_connector)
+ message.respond("Goodbye world")
+ self.assertEqual(message_text, message.text)
| Copy message on respond
When a message responds it updates it's `text` value and passes itself to the connector. Due to pointers in Python the next rule to parse the message goes on to parse the response text.
The message respond method should create a shallow copy of itself to pass to the connector, instead of updating itself directly.
| 2016-08-12T14:01:22 |
|
opsdroid/opsdroid | 32 | opsdroid__opsdroid-32 | [
"17"
] | a1d97f4e204c007177ad65945b4b7c4909237d51 | diff --git a/opsdroid/connector.py b/opsdroid/connector.py
new file mode 100644
--- /dev/null
+++ b/opsdroid/connector.py
@@ -0,0 +1,59 @@
+"""A base class for connectors to inherit from."""
+
+from opsdroid.message import Message # NOQA # pylint: disable=unused-import
+
+
+class Connector():
+ """A base connector.
+
+ Connectors are used to interact with a given chat service.
+
+ """
+
+ def __init__(self, config):
+ """Setup the connector.
+
+ Set some basic properties from the connector config such as the name
+ of this connector and the name the bot should appear with in chat
+ service.
+
+ Args:
+ config (dict): The config for this connector specified in the
+ `configuration.yaml` file.
+
+ """
+ self.name = ""
+ self.config = config
+
+ def connect(self, opsdroid):
+ """Connect to chat service and parse all messages.
+
+ This method should block the thread with an infinite loop and create
+ Message objects for chat messages coming from the service. It should
+ then call `opsdroid.parse(message)` on those messages.
+
+ Due to this method blocking, if multiple connectors are configured in
+ opsdroid they will be run in parallel using the multiprocessing
+ library.
+
+ Args:
+ opsdroid (OpsDroid): An instance of the opsdroid core.
+
+ """
+ raise NotImplementedError
+
+ def respond(self, message):
+ """Send a message back to the chat service.
+
+ The message object will have a `text` property which should be sent
+ back to the chat service. It may also have a `room` and `user` property
+ which gives information on where the message should be directed.
+
+ Args:
+ message (Message): A message received by the connector.
+
+ Returns:
+ bool: True for message successfully sent. False otherwise.
+
+ """
+ raise NotImplementedError
diff --git a/opsdroid/database.py b/opsdroid/database.py
new file mode 100644
--- /dev/null
+++ b/opsdroid/database.py
@@ -0,0 +1,71 @@
+"""A base class for databases to inherit from."""
+
+
+class Database():
+ """A base database.
+
+ Database classes are used to persist key/value pairs in a database.
+
+ """
+
+ def __init__(self, config):
+ """Setup the database.
+
+ Set some basic properties from the database config such as the name
+ of this database. It could also be a good place to setup properties
+ to hold things like the database connection object and the database
+ name.
+
+ Args:
+ config (dict): The config for this database specified in the
+ `configuration.yaml` file.
+
+ """
+ self.name = ""
+ self.config = config
+ self.client = None
+ self.database = None
+
+ def connect(self, opsdroid):
+ """Connect to chat service and store the connection object.
+
+ This method should connect to the given database using a native
+ python library for that database. The library will most likely involve
+ a connection object which will be used by the put and get methods.
+ This object should be stored in self.
+
+ Args:
+ opsdroid (OpsDroid): An instance of the opsdroid core.
+
+ """
+ raise NotImplementedError
+
+ def put(self, key, data):
+ """Store the data object in a database against the key.
+
+ The data object will need to be serialised in a sensible way which
+ suits the database being used and allows for reconstruction of the
+ object.
+
+ Args:
+ key (string): The key to store the data object under.
+ data (object): The data object to store.
+
+ Returns:
+ bool: True for data successfully stored, False otherwise.
+
+ """
+ raise NotImplementedError
+
+ def get(self, key):
+ """Return a data object for a given key.
+
+ Args:
+ key (string): The key to lookup in the database.
+
+ Returns:
+ object or None: The data object stored for that key, or None if no
+ object found for that key.
+
+ """
+ raise NotImplementedError
| diff --git a/tests/test_connector.py b/tests/test_connector.py
new file mode 100644
--- /dev/null
+++ b/tests/test_connector.py
@@ -0,0 +1,24 @@
+
+import unittest
+
+from opsdroid.connector import Connector
+
+
+class TestConnectorBaseClass(unittest.TestCase):
+ """Test the opsdroid connector base class."""
+
+ def test_init(self):
+ config = {"example_item": "test"}
+ connector = Connector(config)
+ self.assertEqual("", connector.name)
+ self.assertEqual("test", connector.config["example_item"])
+
+ def test_connect(self):
+ connector = Connector({})
+ with self.assertRaises(NotImplementedError):
+ connector.connect({})
+
+ def test_respond(self):
+ connector = Connector({})
+ with self.assertRaises(NotImplementedError):
+ connector.respond({})
diff --git a/tests/test_database.py b/tests/test_database.py
new file mode 100644
--- /dev/null
+++ b/tests/test_database.py
@@ -0,0 +1,29 @@
+
+import unittest
+
+from opsdroid.database import Database
+
+
+class TestDatabaseBaseClass(unittest.TestCase):
+ """Test the opsdroid database base class."""
+
+ def test_init(self):
+ config = {"example_item": "test"}
+ database = Database(config)
+ self.assertEqual("", database.name)
+ self.assertEqual("test", database.config["example_item"])
+
+ def test_connect(self):
+ database = Database({})
+ with self.assertRaises(NotImplementedError):
+ database.connect({})
+
+ def test_get(self):
+ database = Database({})
+ with self.assertRaises(NotImplementedError):
+ database.get("test")
+
+ def test_put(self):
+ database = Database({})
+ with self.assertRaises(NotImplementedError):
+ database.put("test", {})
| Connector and Database base classes
Connector and Database module classes should inherit from some kind of base class with required methods defined to raise not implemented errors.
This would also mean that instead of checking the class name when importing the modules we can instead check whether the class inherits from the base class.
| 2016-08-19T10:08:23 |
|
opsdroid/opsdroid | 34 | opsdroid__opsdroid-34 | [
"33"
] | 203f0a1d723b3d0ee4822a259a2f1e78063a0fe0 | diff --git a/opsdroid/core.py b/opsdroid/core.py
--- a/opsdroid/core.py
+++ b/opsdroid/core.py
@@ -6,6 +6,8 @@
from multiprocessing import Process
from opsdroid.helper import match
from opsdroid.memory import Memory
+from opsdroid.connector import Connector
+from opsdroid.database import Database
class OpsDroid():
@@ -54,7 +56,9 @@ def start_connectors(self, connectors):
self.critical("All connectors failed to load", 1)
elif len(connectors) == 1:
for name, cls in connectors[0]["module"].__dict__.items():
- if isinstance(cls, type) and "Connector" in name:
+ if isinstance(cls, type) and \
+ isinstance(cls({}), Connector):
+ logging.debug("Adding connector: " + name)
connectors[0]["config"]["bot-name"] = self.bot_name
connector = cls(connectors[0]["config"])
self.connectors.append(connector)
@@ -62,7 +66,8 @@ def start_connectors(self, connectors):
else:
for connector_module in connectors:
for name, cls in connector_module["module"].__dict__.items():
- if isinstance(cls, type) and "Connector" in name:
+ if isinstance(cls, type) and \
+ isinstance(cls({}), Connector):
connector_module["config"]["bot-name"] = self.bot_name
connector = cls(connector_module["config"])
self.connectors.append(connector)
@@ -78,11 +83,12 @@ def start_databases(self, databases):
logging.warning("All databases failed to load")
for database_module in databases:
for name, cls in database_module["module"].__dict__.items():
- if isinstance(cls, type) and "Database" in name:
+ if isinstance(cls, type) and \
+ isinstance(cls({}), Database):
logging.debug("Adding database: " + name)
database = cls(database_module["config"])
self.memory.databases.append(database)
- database.connect()
+ database.connect(self)
def load_regex_skill(self, regex, skill):
"""Load skills."""
| diff --git a/tests/mockmodules/connectors/connector.py b/tests/mockmodules/connectors/connector.py
--- a/tests/mockmodules/connectors/connector.py
+++ b/tests/mockmodules/connectors/connector.py
@@ -2,8 +2,10 @@
import unittest.mock as mock
+from opsdroid.connector import Connector
-class ConnectorTest:
+
+class ConnectorTest(Connector):
"""The mocked connector class."""
def __init__(self, config):
diff --git a/tests/mockmodules/databases/database.py b/tests/mockmodules/databases/database.py
--- a/tests/mockmodules/databases/database.py
+++ b/tests/mockmodules/databases/database.py
@@ -2,8 +2,10 @@
import unittest.mock as mock
+from opsdroid.database import Database
-class DatabaseTest:
+
+class DatabaseTest(Database):
"""The mocked database class."""
def __init__(self, config):
diff --git a/tests/test_core.py b/tests/test_core.py
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -55,10 +55,8 @@ def test_start_databases(self):
module["config"] = {}
module["module"] = importlib.import_module(
"tests.mockmodules.databases.database")
- opsdroid.start_databases([module])
- self.assertEqual(len(opsdroid.memory.databases), 1)
- self.assertEqual(
- len(opsdroid.memory.databases[0].connect.mock_calls), 1)
+ with self.assertRaises(NotImplementedError):
+ opsdroid.start_databases([module])
def test_start_connectors(self):
with OpsDroid() as opsdroid:
@@ -67,12 +65,11 @@ def test_start_connectors(self):
module["config"] = {}
module["module"] = importlib.import_module(
"tests.mockmodules.connectors.connector")
- opsdroid.start_connectors([module])
- self.assertEqual(len(opsdroid.connectors), 1)
+
+ with self.assertRaises(NotImplementedError):
+ opsdroid.start_connectors([module])
opsdroid.start_connectors([module, module])
- self.assertEqual(len(opsdroid.connectors), 3)
- self.assertEqual(len(opsdroid.connector_jobs), 2)
def test_multiple_opsdroids(self):
with OpsDroid() as opsdroid:
| Check for base classes instead of name when importing connectors and databases
Currently you must give your connector or database class a specific name for them to be imported.
Now that these classes should inherit from the base classes we can test for `isinstance(baseclass)` instead.
| 2016-08-19T11:05:03 |
|
opsdroid/opsdroid | 41 | opsdroid__opsdroid-41 | [
"39"
] | 38975e453e065e9be19a191d38a38492bb196c6a | diff --git a/opsdroid/__main__.py b/opsdroid/__main__.py
--- a/opsdroid/__main__.py
+++ b/opsdroid/__main__.py
@@ -1,6 +1,9 @@
"""Starts opsdroid."""
+import sys
+import os
import logging
+import argparse
from opsdroid.loader import Loader
from opsdroid.core import OpsDroid
@@ -8,11 +11,30 @@
from opsdroid.const import LOG_FILENAME
+def parse_args(args):
+ """Parse command line arguments."""
+ parser = argparse.ArgumentParser(description='Run opsdroid.')
+ parser.add_argument('--gen-config', action="store_true",
+ help='prints out an example configuration file')
+ return parser.parse_args(args)
+
+
def main():
"""The main function."""
logging.basicConfig(filename=LOG_FILENAME, level=logging.INFO)
logging.info("="*40)
logging.info("Stated application")
+
+ args = parse_args(sys.argv[1:])
+
+ if args.gen_config:
+ path = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)),
+ "configuration/example_configuration.yaml")
+ with open(path, 'r') as conf:
+ print(conf.read())
+ sys.exit(0)
+
with OpsDroid() as opsdroid:
loader = Loader(opsdroid)
opsdroid.config = loader.load_config_file([
| diff --git a/tests/test_main.py b/tests/test_main.py
new file mode 100644
--- /dev/null
+++ b/tests/test_main.py
@@ -0,0 +1,21 @@
+
+import sys
+import unittest
+import unittest.mock as mock
+
+sys.modules['sys'].exit = mock.MagicMock()
+
+import opsdroid.__main__ as opsdroid # noqa: E402
+
+
+class TestMain(unittest.TestCase):
+ """Test the main opsdroid module."""
+
+ def test_parse_args(self):
+ args = opsdroid.parse_args(["--gen-config"])
+ self.assertEqual(True, args.gen_config)
+
+ def text_gen_config(self):
+ sys.argv = ["--gen-config"]
+ opsdroid.main()
+ self.assertEqual(1, len(sys.modules['sys'].exit.mock_calls))
| Generate default config
It should be possible to generate some basic config with a command line flag to opsdroid. It should cause opsdroid to print out the config so that is can be piped into a file.
e.g
```
opsdroid --gen-config > configuration.yaml
```
| 2016-08-25T15:10:32 |
|
opsdroid/opsdroid | 43 | opsdroid__opsdroid-43 | [
"16"
] | eebab3c36f5b8dc0b9a4e26a5179bcdca8d7cb51 | diff --git a/opsdroid/loader.py b/opsdroid/loader.py
--- a/opsdroid/loader.py
+++ b/opsdroid/loader.py
@@ -10,59 +10,6 @@
DEFAULT_GIT_URL, MODULES_DIRECTORY, DEFAULT_MODULE_BRANCH)
-def import_module(config):
- """Import module namespace as variable and return it."""
- try:
- module = importlib.import_module(
- config["path"] + "." + config["name"])
- logging.debug("Loading " + config["type"] + ": " + config["name"])
- return module
- except ImportError as error:
- logging.error("Failed to load " + config["type"] +
- " " + config["name"])
- logging.error(error)
- return None
-
-
-def check_cache(config):
- """Remove module if 'no-cache' set in config."""
- if "no-cache" in config \
- and config["no-cache"] \
- and os.path.isdir(config["install_path"]):
- logging.debug("'no-cache' set, removing " + config["install_path"])
- shutil.rmtree(config["install_path"])
-
-
-def build_module_path(path_type, config):
- """Generate the module path from name and type."""
- if path_type == "import":
- return MODULES_DIRECTORY + "." + config["type"] + "." + config["name"]
- elif path_type == "install":
- return MODULES_DIRECTORY + "/" + config["type"] + "/" + config["name"]
-
-
-def git_clone(git_url, install_path, branch):
- """Clone a git repo to a location and wait for finish."""
- process = subprocess.Popen(["git", "clone", "-b", branch,
- git_url, install_path], shell=False,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- process.wait()
-
-
-def pip_install_deps(requirements_path):
- """Pip install a requirements.txt file and wait for finish."""
- process = subprocess.Popen(["pip", "install", "-r", requirements_path],
- shell=False,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- for output in process.communicate():
- if output != "":
- for line in output.splitlines():
- logging.debug(str(line).strip())
- process.wait()
-
-
class Loader:
"""Class to load in config and modules."""
@@ -71,6 +18,61 @@ def __init__(self, opsdroid):
self.opsdroid = opsdroid
logging.debug("Loaded loader")
+ @staticmethod
+ def import_module(config):
+ """Import module namespace as variable and return it."""
+ try:
+ module = importlib.import_module(
+ config["path"] + "." + config["name"])
+ logging.debug("Loading " + config["type"] + ": " + config["name"])
+ return module
+ except ImportError as error:
+ logging.error("Failed to load " + config["type"] +
+ " " + config["name"])
+ logging.error(error)
+ return None
+
+ @staticmethod
+ def check_cache(config):
+ """Remove module if 'no-cache' set in config."""
+ if "no-cache" in config \
+ and config["no-cache"] \
+ and os.path.isdir(config["install_path"]):
+ logging.debug("'no-cache' set, removing " + config["install_path"])
+ shutil.rmtree(config["install_path"])
+
+ @staticmethod
+ def build_module_path(path_type, config):
+ """Generate the module path from name and type."""
+ if path_type == "import":
+ return MODULES_DIRECTORY + "." + config["type"] + \
+ "." + config["name"]
+ elif path_type == "install":
+ return MODULES_DIRECTORY + "/" + config["type"] + \
+ "/" + config["name"]
+
+ @staticmethod
+ def git_clone(git_url, install_path, branch):
+ """Clone a git repo to a location and wait for finish."""
+ process = subprocess.Popen(["git", "clone", "-b", branch,
+ git_url, install_path], shell=False,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ process.wait()
+
+ @staticmethod
+ def pip_install_deps(requirements_path):
+ """Pip install a requirements.txt file and wait for finish."""
+ process = subprocess.Popen(["pip", "install", "-r", requirements_path],
+ shell=False,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ for output in process.communicate():
+ if output != "":
+ for line in output.splitlines():
+ logging.debug(str(line).strip())
+ process.wait()
+
def load_config_file(self, config_paths):
"""Load a yaml config file from path."""
config_path = ""
@@ -134,19 +136,19 @@ def _load_modules(self, modules_type, modules):
config = {} if config is None else config
config["name"] = module_name
config["type"] = modules_type
- config["path"] = build_module_path("import", config)
- config["install_path"] = build_module_path("install", config)
+ config["path"] = self.build_module_path("import", config)
+ config["install_path"] = self.build_module_path("install", config)
if "branch" not in config:
config["branch"] = DEFAULT_MODULE_BRANCH
# Remove module for reinstall if no-cache set
- check_cache(config)
+ self.check_cache(config)
# Install module
self._install_module(config)
# Import module
- module = import_module(config)
+ module = self.import_module(config)
if module is not None:
loaded_modules.append({
"module": module,
@@ -181,11 +183,12 @@ def _install_module(self, config):
if any(prefix in git_url for prefix in ["http", "https", "ssh"]):
# TODO Test if url or ssh path exists
# TODO Handle github authentication
- git_clone(git_url, config["install_path"], config["branch"])
+ self.git_clone(git_url, config["install_path"],
+ config["branch"])
else:
if os.path.isdir(git_url):
- git_clone(git_url, config["install_path"],
- config["branch"])
+ self.git_clone(git_url, config["install_path"],
+ config["branch"])
else:
logging.debug("Could not find local git repo " + git_url)
@@ -197,4 +200,5 @@ def _install_module(self, config):
# Install module dependancies
if os.path.isfile(config["install_path"] + "/requirements.txt"):
- pip_install_deps(config["install_path"] + "/requirements.txt")
+ self.pip_install_deps(config["install_path"] +
+ "/requirements.txt")
| diff --git a/tests/test_loader.py b/tests/test_loader.py
--- a/tests/test_loader.py
+++ b/tests/test_loader.py
@@ -48,28 +48,30 @@ def test_setup_modules(self):
def test_git_clone(self):
self.reset_subprocess_mocks()
- ld.git_clone("https://github.com/rmccue/test-repository.git",
- "/tmp/test", "master")
+ ld.Loader.git_clone("https://github.com/rmccue/test-repository.git",
+ "/tmp/test", "master")
self.assertNotEqual(len(sys.modules['subprocess'].mock_calls), 0)
def test_pip_install_deps(self):
self.reset_subprocess_mocks()
- ld.pip_install_deps("/path/to/some/file.txt")
+ ld.Loader.pip_install_deps("/path/to/some/file.txt")
self.assertNotEqual(len(sys.modules['subprocess'].mock_calls), 0)
def test_build_module_path(self):
config = {}
config["type"] = "test"
config["name"] = "test"
- self.assertIn("test.test", ld.build_module_path("import", config))
- self.assertIn("test/test", ld.build_module_path("install", config))
+ self.assertIn("test.test",
+ ld.Loader.build_module_path("import", config))
+ self.assertIn("test/test",
+ ld.Loader.build_module_path("install", config))
def test_check_cache_removes(self):
config = {}
config["no-cache"] = True
config['install_path'] = "/tmp/test/module"
os.makedirs(config['install_path'])
- ld.check_cache(config)
+ ld.Loader.check_cache(config)
self.assertFalse(os.path.isdir(config["install_path"]))
def test_check_cache_leaves(self):
@@ -77,7 +79,7 @@ def test_check_cache_leaves(self):
config["no-cache"] = False
config['install_path'] = "/tmp/test/module"
os.makedirs(config['install_path'])
- ld.check_cache(config)
+ ld.Loader.check_cache(config)
self.assertTrue(os.path.isdir(config["install_path"]))
shutil.rmtree(config["install_path"])
@@ -87,7 +89,7 @@ def test_import_module(self):
config["name"] = "path"
config["type"] = "system"
- module = ld.import_module(config)
+ module = ld.Loader.import_module(config)
self.assertIsInstance(module, ModuleType)
def test_import_module_failure(self):
@@ -96,7 +98,7 @@ def test_import_module_failure(self):
config["name"] = "module"
config["type"] = "broken"
- module = ld.import_module(config)
+ module = ld.Loader.import_module(config)
self.assertEqual(module, None)
def test_load_config(self):
| Loader static methods
There are some functions at the beginning of the loader file. These should be private static methods in the class.
| 2016-08-26T13:21:37 |
|
opsdroid/opsdroid | 58 | opsdroid__opsdroid-58 | [
"48"
] | bef3ccfae2b581b0e01fa001334a6beef7a7df57 | diff --git a/opsdroid/connector.py b/opsdroid/connector.py
--- a/opsdroid/connector.py
+++ b/opsdroid/connector.py
@@ -24,6 +24,7 @@ def __init__(self, config):
"""
self.name = ""
self.config = config
+ self.default_room = None
async def connect(self, opsdroid):
"""Connect to chat service.
diff --git a/opsdroid/core.py b/opsdroid/core.py
--- a/opsdroid/core.py
+++ b/opsdroid/core.py
@@ -46,6 +46,18 @@ def __exit__(self, exc_type, exc_value, traceback):
"""Remove self from existing instances."""
self.__class__.instances = []
+ @property
+ def default_connector(self):
+ """Return the default connector."""
+ default_connector = None
+ for connector in self.connectors:
+ if "default" in connector.config and connector.config["default"]:
+ default_connector = connector
+ break
+ if default_connector is None:
+ default_connector = self.connectors[0]
+ return default_connector
+
def exit(self):
"""Exit application."""
logging.info("Exiting application with return code " +
| diff --git a/tests/test_connector.py b/tests/test_connector.py
--- a/tests/test_connector.py
+++ b/tests/test_connector.py
@@ -14,6 +14,7 @@ def setUp(self):
def test_init(self):
config = {"example_item": "test"}
connector = Connector(config)
+ self.assertEqual(None, connector.default_room)
self.assertEqual("", connector.name)
self.assertEqual("test", connector.config["example_item"])
diff --git a/tests/test_core.py b/tests/test_core.py
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -104,6 +104,21 @@ def test_setup_modules(self):
opsdroid.setup_skills(example_modules)
self.assertEqual(len(example_modules[0]["module"].mock_calls), 1)
+ def test_default_connector(self):
+ with OpsDroid() as opsdroid:
+ mock_connector = Connector({})
+ opsdroid.connectors.append(mock_connector)
+ self.assertEqual(opsdroid.default_connector, mock_connector)
+
+ mock_default_connector = Connector({"default": True})
+ opsdroid.connectors.append(mock_default_connector)
+ self.assertEqual(opsdroid.default_connector,
+ mock_default_connector)
+
+ def test_default_room(self):
+ mock_connector = Connector({})
+ self.assertEqual(None, mock_connector.default_room)
+
class TestCoreAsync(asynctest.TestCase):
"""Test the async methods of the opsdroid core class."""
| Default connector and default room
When writing a skill which originates from something other than a message (e.g cron #26) the response may need to know which room to post into.
Most chat clients have a default room, like `#general` in Slack. This could be available as a property in the connector so that skills can easily access it.
e.g
``` python
@non_message_decorator()
def myskill(opsdroid):
for connector in opsdroid.connectors:
message = Message("Message text", connector.default_room, None, connector)
connector.respond(message)
```
It should also be possible to override the default room in the connector config.
``` yaml
connectors:
slack:
default-room: "#random"
```
| It would also be good to have a default connector. This could either be the first specified or the one tagged with `default: true` (or the first one tagged in case of error).
e.g
``` yaml
connectors:
slack:
shell:
campfire:
default: True
```
This could be accessed like:
``` python
@non_message_decorator()
def myskill(opsdroid):
connector = opsdroid.default_connector
message = Message("Message text", connector.default_room, None, connector)
connector.respond(message)
```
| 2016-11-18T14:05:38 |
opsdroid/opsdroid | 82 | opsdroid__opsdroid-82 | [
"79"
] | 2cf677095e86f755a8366f285865debc389fc894 | diff --git a/opsdroid/skills.py b/opsdroid/matchers.py
similarity index 100%
rename from opsdroid/skills.py
rename to opsdroid/matchers.py
| diff --git a/tests/test_core.py b/tests/test_core.py
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -8,7 +8,7 @@
from opsdroid.core import OpsDroid
from opsdroid.message import Message
from opsdroid.connector import Connector
-from opsdroid.skills import match_regex, match_apiai_action
+from opsdroid.matchers import match_regex, match_apiai_action
class TestCore(unittest.TestCase):
diff --git a/tests/test_parser_apiai.py b/tests/test_parser_apiai.py
--- a/tests/test_parser_apiai.py
+++ b/tests/test_parser_apiai.py
@@ -5,7 +5,7 @@
from aiohttp import helpers
from opsdroid.core import OpsDroid
-from opsdroid.skills import match_apiai_action
+from opsdroid.matchers import match_apiai_action
from opsdroid.message import Message
from opsdroid.parsers import apiai
from opsdroid.connector import Connector
diff --git a/tests/test_parser_crontab.py b/tests/test_parser_crontab.py
--- a/tests/test_parser_crontab.py
+++ b/tests/test_parser_crontab.py
@@ -3,7 +3,7 @@
import asynctest.mock as amock
from opsdroid.core import OpsDroid
-from opsdroid.skills import match_crontab
+from opsdroid.matchers import match_crontab
from opsdroid.parsers.crontab import parse_crontab
diff --git a/tests/test_parser_regex.py b/tests/test_parser_regex.py
--- a/tests/test_parser_regex.py
+++ b/tests/test_parser_regex.py
@@ -3,7 +3,7 @@
import asynctest.mock as amock
from opsdroid.core import OpsDroid
-from opsdroid.skills import match_regex
+from opsdroid.matchers import match_regex
from opsdroid.message import Message
from opsdroid.parsers.regex import parse_regex
diff --git a/tests/test_skills.py b/tests/test_skills.py
--- a/tests/test_skills.py
+++ b/tests/test_skills.py
@@ -3,7 +3,7 @@
import unittest.mock as mock
from opsdroid.core import OpsDroid
-from opsdroid import skills
+from opsdroid import matchers
class TestSkillDecorators(unittest.TestCase):
@@ -13,7 +13,7 @@ def test_match_regex(self):
with OpsDroid() as opsdroid:
regex = r"(.*)"
mockedskill = mock.MagicMock()
- decorator = skills.match_regex(regex)
+ decorator = matchers.match_regex(regex)
decorator(mockedskill)
self.assertEqual(len(opsdroid.skills), 1)
self.assertEqual(opsdroid.skills[0]["regex"], regex)
@@ -23,13 +23,13 @@ def test_match_apiai(self):
with OpsDroid() as opsdroid:
action = "myaction"
mockedskill = mock.MagicMock()
- decorator = skills.match_apiai_action(action)
+ decorator = matchers.match_apiai_action(action)
decorator(mockedskill)
self.assertEqual(len(opsdroid.skills), 1)
self.assertEqual(opsdroid.skills[0]["apiai_action"], action)
self.assertIsInstance(opsdroid.skills[0]["skill"], mock.MagicMock)
intent = "myIntent"
- decorator = skills.match_apiai_intent(intent)
+ decorator = matchers.match_apiai_intent(intent)
decorator(mockedskill)
self.assertEqual(len(opsdroid.skills), 2)
self.assertEqual(opsdroid.skills[1]["apiai_intent"], intent)
@@ -39,7 +39,7 @@ def test_match_crontab(self):
with OpsDroid() as opsdroid:
crontab = "* * * * *"
mockedskill = mock.MagicMock()
- decorator = skills.match_crontab(crontab)
+ decorator = matchers.match_crontab(crontab)
decorator(mockedskill)
self.assertEqual(len(opsdroid.skills), 1)
self.assertEqual(opsdroid.skills[0]["crontab"], crontab)
| Rename skills submodule
When writing a skill you need to import a match function to decorate your function with.
```python
from opsdroid.skills import match_regex
@match_regex("what is your name?")
async def botname(opsdroid, message)
await message.respond("My name is {0}".format(opsdroid.bot_name))
```
This could potentially be confusing as the function you're importing isn't a skill, its a helper function for creating skills.
It should probably be renamed to one of the following:
* `opsdroid.matchers`
* `opsdroid.match`
* `opsdroid.skill-helpers`
* `opsdroid.skill-decorators`
* `opsdroid.triggers`
| 2017-01-31T16:28:11 |
|
opsdroid/opsdroid | 142 | opsdroid__opsdroid-142 | [
"141"
] | 1e65b7f5ca1b130f0756ad2bd0a7fd7a2be66476 | diff --git a/opsdroid/matchers.py b/opsdroid/matchers.py
--- a/opsdroid/matchers.py
+++ b/opsdroid/matchers.py
@@ -45,14 +45,14 @@ def matcher(func):
return matcher
-def match_crontab(crontab):
+def match_crontab(crontab, timezone=None):
"""Return crontab match decorator."""
def matcher(func):
"""Add decorated function to skills list for crontab matching."""
opsdroid = get_opsdroid()
+ config = opsdroid.loader.current_import_config
opsdroid.skills.append({"crontab": crontab, "skill": func,
- "config":
- opsdroid.loader.current_import_config})
+ "config": config, "timezone": timezone})
return func
return matcher
diff --git a/opsdroid/parsers/crontab.py b/opsdroid/parsers/crontab.py
--- a/opsdroid/parsers/crontab.py
+++ b/opsdroid/parsers/crontab.py
@@ -1,9 +1,9 @@
"""A helper function for parsing and executing crontab skills."""
-import logging
import asyncio
-from datetime import datetime
+import logging
+import arrow
import pycron
@@ -17,11 +17,17 @@ async def parse_crontab(opsdroid):
# halt the application. If a skill throws an exception it just doesn't
# give a response to the user, so an error response should be given.
while opsdroid.eventloop.is_running():
- await asyncio.sleep(60 - datetime.now().time().second)
+ await asyncio.sleep(60 - arrow.now().time().second)
_LOGGER.debug("Running crontab skills")
for skill in opsdroid.skills:
- if "crontab" in skill and pycron.is_now(skill["crontab"]):
- try:
- await skill["skill"](opsdroid, skill["config"], None)
- except Exception:
- _LOGGER.exception("Exception when executing cron skill.")
+ if "crontab" in skill:
+ if skill["timezone"] is not None:
+ timezone = skill["timezone"]
+ else:
+ timezone = opsdroid.config.get("timezone", "UTC")
+ if pycron.is_now(skill["crontab"], arrow.now(tz=timezone)):
+ try:
+ await skill["skill"](opsdroid, skill["config"], None)
+ except Exception:
+ _LOGGER.exception(
+ "Exception when executing cron skill.")
| diff --git a/tests/test_parser_crontab.py b/tests/test_parser_crontab.py
--- a/tests/test_parser_crontab.py
+++ b/tests/test_parser_crontab.py
@@ -30,6 +30,18 @@ async def test_parse_crontab(self):
await parse_crontab(opsdroid)
self.assertTrue(mock_skill.called)
+ async def test_parse_crontab_timezone(self):
+ with OpsDroid() as opsdroid:
+ self.not_first_run_flag = True
+ opsdroid.eventloop.is_running = self.true_once
+ with amock.patch('asyncio.sleep'):
+ mock_skill = amock.CoroutineMock()
+ match_crontab("* * * * *",
+ timezone="Europe/London")(mock_skill)
+
+ await parse_crontab(opsdroid)
+ self.assertTrue(mock_skill.called)
+
async def test_parse_crontab_raises(self):
with OpsDroid() as opsdroid:
self.not_first_run_flag = True
| Make crontab parser timezone aware
The crontab matcher should take a timezone as a kwarg. It should also be possible to set a global timezone in the config. Default should be UTC.
| 2017-05-03T19:48:02 |
|
opsdroid/opsdroid | 163 | opsdroid__opsdroid-163 | [
"78"
] | b6a6b803844b36e49146b468dc104be1e49235a5 | diff --git a/opsdroid/const.py b/opsdroid/const.py
--- a/opsdroid/const.py
+++ b/opsdroid/const.py
@@ -4,5 +4,6 @@
LOG_FILENAME = 'output.log'
DEFAULT_GIT_URL = "https://github.com/opsdroid/"
-MODULES_DIRECTORY = "modules"
+MODULES_DIRECTORY = "opsdroid-modules"
+DEFAULT_MODULES_PATH = "~/.opsdroid/modules"
DEFAULT_MODULE_BRANCH = "master"
diff --git a/opsdroid/loader.py b/opsdroid/loader.py
--- a/opsdroid/loader.py
+++ b/opsdroid/loader.py
@@ -8,7 +8,8 @@
import importlib
import yaml
from opsdroid.const import (
- DEFAULT_GIT_URL, MODULES_DIRECTORY, DEFAULT_MODULE_BRANCH)
+ DEFAULT_GIT_URL, MODULES_DIRECTORY, DEFAULT_MODULES_PATH,
+ DEFAULT_MODULE_BRANCH)
_LOGGER = logging.getLogger(__name__)
@@ -20,7 +21,7 @@ class Loader:
def __init__(self, opsdroid):
"""Create object with opsdroid instance."""
self.opsdroid = opsdroid
- self.modules_directory = MODULES_DIRECTORY
+ self.modules_directory = None
self.current_import_config = None
_LOGGER.debug("Loaded loader")
@@ -122,16 +123,26 @@ def load_config_file(self, config_paths):
except FileNotFoundError as error:
self.opsdroid.critical(str(error), 1)
+ def setup_modules_directory(self, config):
+ """Create and configure the modules directory."""
+ module_path = os.path.expanduser(
+ config.get("module-path", DEFAULT_MODULES_PATH))
+ sys.path.append(module_path)
+
+ if not os.path.isdir(module_path):
+ os.makedirs(module_path, exist_ok=True)
+
+ self.modules_directory = os.path.join(module_path, MODULES_DIRECTORY)
+
+ # Create modules directory if doesn't exist
+ if not os.path.isdir(self.modules_directory):
+ os.makedirs(self.modules_directory)
+
def load_modules_from_config(self, config):
"""Load all module types based on config."""
_LOGGER.debug("Loading modules from config")
- if "module-path" in config:
- sys.path.append(config["module-path"])
- if not os.path.isdir(config["module-path"]):
- os.makedirs(config["module-path"], exist_ok=True)
- self.modules_directory = os.path.join(config["module-path"],
- self.modules_directory)
+ self.setup_modules_directory(config)
connectors, databases, skills = None, None, None
@@ -159,10 +170,6 @@ def _load_modules(self, modules_type, modules):
_LOGGER.debug("Loading " + modules_type + " modules")
loaded_modules = []
- # Create modules directory if doesn't exist
- if not os.path.isdir(self.modules_directory):
- os.makedirs(self.modules_directory)
-
for module in modules:
# Set up module config
| diff --git a/tests/test_loader.py b/tests/test_loader.py
--- a/tests/test_loader.py
+++ b/tests/test_loader.py
@@ -153,20 +153,10 @@ def test_load_modules(self):
with mock.patch.object(loader, '_install_module') as mockinstall, \
mock.patch.object(loader, 'import_module',
mockedmodule) as mockimport:
+ loader.setup_modules_directory({})
loader._load_modules(modules_type, modules)
- mockinstall.assert_called_with({
- 'branch': 'master',
- 'module_path': 'modules.test.testmodule',
- 'name': 'testmodule',
- 'type': modules_type,
- 'install_path': 'modules/test/testmodule'})
- mockimport.assert_called_with({
- 'module_path': 'modules.test.testmodule',
- 'name': 'testmodule',
- 'type': modules_type,
- 'branch': 'master',
- 'install_path':
- 'modules/test/testmodule'})
+ assert mockinstall.call_count
+ assert mockimport.call_count
def test_install_existing_module(self):
opsdroid, loader = self.setup()
| Change default modules directory
Currently the default modules directory location is `./modules`.
This makes a few assumptions:
* Current directory is in the python path
* There are no other python modules in the current directory
* There are no other modules named `modules`
* Current directory is writable
A better default location may be `~/.opsdroid/modules/opsdroid-modules`. This would be created if it doesn't exist and `~/.opsdroid/modules` could be added to the python path without fear of collision as `opsdroid-modules` is less generic. As it is in the home directory we can be fairly sure it is writable.
Also when a user specifies a custom modules directory it should still be suffixed with `/opsdroid-modules` and the custom directory should be added to the python path.
| 2017-05-31T20:46:21 |
|
opsdroid/opsdroid | 167 | opsdroid__opsdroid-167 | [
"166"
] | aefa63cbf76bab85b34c6d669d621dae52409525 | diff --git a/opsdroid/const.py b/opsdroid/const.py
--- a/opsdroid/const.py
+++ b/opsdroid/const.py
@@ -8,6 +8,7 @@
MODULES_DIRECTORY = "opsdroid-modules"
DEFAULT_ROOT_PATH = os.path.join(os.path.expanduser("~"), ".opsdroid")
DEFAULT_MODULES_PATH = os.path.join(DEFAULT_ROOT_PATH, "modules")
+DEFAULT_MODULE_DEPS_PATH = os.path.join(DEFAULT_ROOT_PATH, "site-packages")
DEFAULT_CONFIG_PATH = os.path.join(DEFAULT_ROOT_PATH, "configuration.yaml")
DEFAULT_MODULE_BRANCH = "master"
EXAMPLE_CONFIG_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)),
diff --git a/opsdroid/loader.py b/opsdroid/loader.py
--- a/opsdroid/loader.py
+++ b/opsdroid/loader.py
@@ -9,7 +9,8 @@
import yaml
from opsdroid.const import (
DEFAULT_GIT_URL, MODULES_DIRECTORY, DEFAULT_MODULES_PATH,
- DEFAULT_MODULE_BRANCH, DEFAULT_CONFIG_PATH, EXAMPLE_CONFIG_FILE)
+ DEFAULT_MODULE_BRANCH, DEFAULT_CONFIG_PATH, EXAMPLE_CONFIG_FILE,
+ DEFAULT_MODULE_DEPS_PATH)
_LOGGER = logging.getLogger(__name__)
@@ -90,7 +91,11 @@ def git_clone(git_url, install_path, branch):
@staticmethod
def pip_install_deps(requirements_path):
"""Pip install a requirements.txt file and wait for finish."""
- process = subprocess.Popen(["pip", "install", "-r", requirements_path],
+ process = subprocess.Popen(["pip", "install",
+ "--target={}".format(
+ DEFAULT_MODULE_DEPS_PATH),
+ "--ignore-installed",
+ "-r", requirements_path],
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
@@ -181,6 +186,10 @@ def _load_modules(self, modules_type, modules):
_LOGGER.debug("Loading " + modules_type + " modules")
loaded_modules = []
+ if not os.path.isdir(DEFAULT_MODULE_DEPS_PATH):
+ os.makedirs(DEFAULT_MODULE_DEPS_PATH)
+ sys.path.append(DEFAULT_MODULE_DEPS_PATH)
+
for module in modules:
# Set up module config
| Module specific site-packages
It could be good for modules to install their dependancies in a specific `site-packages` directory which is only added to the path when it is time to import the modules.
A good place could be `~/.opsdroid/site-packages`.
| 2017-06-06T17:52:35 |
||
opsdroid/opsdroid | 169 | opsdroid__opsdroid-169 | [
"168"
] | 63d3aed9b6a657eeb540ecc994afde9d2240ccde | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -10,9 +10,10 @@
'modules.*', 'docs', 'docs.*'])
REQUIRES = [
- 'pyyaml>=3.11,<4',
- 'aiohttp>=1.2.0,<2',
- 'pycron>=0.40',
+ 'arrow==0.10.0',
+ 'aiohttp==2.1.0',
+ 'pycron==0.40',
+ 'pyyaml==3.12'
]
setup(
| arrow dep missing
Fresh install of ubuntu 16.04
```
$ sudo apt update && sudo apt install python3-pip
...
$ pip3 install opsdroid
...
$ opsdroid
Traceback (most recent call last):
File "/home/ubuntu/.local/bin/opsdroid", line 7, in <module>
from opsdroid.__main__ import main
File "/home/ubuntu/.local/lib/python3.5/site-packages/opsdroid/__main__.py", line 8, in <module>
from opsdroid.core import OpsDroid
File "/home/ubuntu/.local/lib/python3.5/site-packages/opsdroid/core.py", line 15, in <module>
from opsdroid.parsers.crontab import parse_crontab
File "/home/ubuntu/.local/lib/python3.5/site-packages/opsdroid/parsers/crontab.py", line 6, in <module>
import arrow
ImportError: No module named 'arrow'
```
| 2017-06-06T18:08:01 |
||
opsdroid/opsdroid | 179 | opsdroid__opsdroid-179 | [
"157"
] | 90e1d50c833865c74ded9d523d0a8fabeaf66d44 | diff --git a/opsdroid/__main__.py b/opsdroid/__main__.py
--- a/opsdroid/__main__.py
+++ b/opsdroid/__main__.py
@@ -93,12 +93,15 @@ def main():
check_dependencies()
- with OpsDroid() as opsdroid:
- opsdroid.load()
- configure_logging(opsdroid.config)
- opsdroid.web_server = Web(opsdroid)
- opsdroid.start_loop()
- opsdroid.exit()
+ restart = True
+
+ while restart:
+ with OpsDroid() as opsdroid:
+ opsdroid.load()
+ configure_logging(opsdroid.config)
+ opsdroid.web_server = Web(opsdroid)
+ opsdroid.start_loop()
+ restart = opsdroid.should_restart
if __name__ == "__main__":
diff --git a/opsdroid/core.py b/opsdroid/core.py
--- a/opsdroid/core.py
+++ b/opsdroid/core.py
@@ -1,5 +1,6 @@
"""Core components of OpsDroid."""
+import copy
import logging
import sys
import weakref
@@ -44,10 +45,13 @@ def __init__(self):
"total_responses": 0,
}
self.web_server = None
+ self.should_restart = False
+ self.stored_path = []
_LOGGER.info("Created main opsdroid object")
def __enter__(self):
"""Add self to existing instances."""
+ self.stored_path = copy.copy(sys.path)
if not self.__class__.instances:
self.__class__.instances.append(weakref.proxy(self))
else:
@@ -56,7 +60,9 @@ def __enter__(self):
def __exit__(self, exc_type, exc_value, traceback):
"""Remove self from existing instances."""
+ sys.path = self.stored_path
self.__class__.instances = []
+ asyncio.set_event_loop(asyncio.new_event_loop())
@property
def default_connector(self):
@@ -74,8 +80,6 @@ def exit(self):
"""Exit application."""
_LOGGER.info("Exiting application with return code " +
str(self.sys_status))
- if self.eventloop.is_running():
- self.eventloop.close()
sys.exit(self.sys_status)
def critical(self, error, code):
@@ -85,6 +89,18 @@ def critical(self, error, code):
print("Error: " + error)
self.exit()
+ def restart(self):
+ """Restart opsdroid."""
+ self.should_restart = True
+ self.stop()
+
+ def stop(self):
+ """Stop the event loop."""
+ pending = asyncio.Task.all_tasks()
+ for task in pending:
+ task.cancel()
+ self.eventloop.stop()
+
def load(self):
"""Load configuration."""
self.config = self.loader.load_config_file([
@@ -97,6 +113,7 @@ def start_loop(self):
"""Start the event loop."""
connectors, databases, skills = \
self.loader.load_modules_from_config(self.config)
+ _LOGGER.debug("Loaded %i skills", len(skills))
if databases is not None:
self.start_databases(databases)
self.setup_skills(skills)
@@ -104,12 +121,17 @@ def start_loop(self):
self.eventloop.create_task(parse_crontab(self))
self.web_server.start()
try:
- self.eventloop.run_forever()
+ pending = asyncio.Task.all_tasks()
+ self.eventloop.run_until_complete(asyncio.gather(*pending))
except (KeyboardInterrupt, EOFError):
print('') # Prints a character return for return to shell
+ self.stop()
_LOGGER.info("Keyboard interrupt, exiting.")
+ except RuntimeError as error:
+ if str(error) != 'Event loop is closed':
+ raise error
finally:
- self.exit()
+ self.eventloop.close()
def setup_skills(self, skills):
"""Call the setup function on the passed in skills."""
diff --git a/opsdroid/loader.py b/opsdroid/loader.py
--- a/opsdroid/loader.py
+++ b/opsdroid/loader.py
@@ -115,6 +115,11 @@ def create_default_config(config_path):
shutil.copyfile(EXAMPLE_CONFIG_FILE, config_path)
return config_path
+ @staticmethod
+ def _reload_modules(modules):
+ for module in modules:
+ importlib.reload(module["module"])
+
def load_config_file(self, config_paths):
"""Load a yaml config file from path."""
config_path = ""
@@ -169,6 +174,8 @@ def load_modules_from_config(self, config):
if 'skills' in config.keys():
skills = self._load_modules('skill', config['skills'])
+ self.opsdroid.skills = []
+ self._reload_modules(skills)
else:
self.opsdroid.critical(
"No skills in configuration, at least 1 required", 1)
| diff --git a/tests/test_core.py b/tests/test_core.py
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -1,4 +1,5 @@
+import asyncio
import unittest
import unittest.mock as mock
import asynctest
@@ -29,6 +30,19 @@ def test_critical(self):
with OpsDroid() as opsdroid, self.assertRaises(SystemExit):
opsdroid.critical("An error", 1)
+ def test_stop(self):
+ with OpsDroid() as opsdroid:
+ self.assertFalse(opsdroid.eventloop.is_closed())
+ opsdroid.stop()
+ self.assertFalse(opsdroid.eventloop.is_running())
+
+ def test_restart(self):
+ with OpsDroid() as opsdroid:
+ opsdroid.eventloop.create_task(asyncio.sleep(1))
+ self.assertFalse(opsdroid.should_restart)
+ opsdroid.restart()
+ self.assertTrue(opsdroid.should_restart)
+
def test_load_config(self):
with OpsDroid() as opsdroid:
opsdroid.loader = mock.Mock()
@@ -47,7 +61,7 @@ def test_start_loop(self):
opsdroid.start_connector_tasks = mock.Mock()
opsdroid.eventloop.run_forever = mock.Mock()
- with self.assertRaises(SystemExit):
+ with self.assertRaises(RuntimeError):
opsdroid.start_loop()
self.assertTrue(opsdroid.start_databases.called)
diff --git a/tests/test_loader.py b/tests/test_loader.py
--- a/tests/test_loader.py
+++ b/tests/test_loader.py
@@ -150,7 +150,7 @@ def test_load_config(self):
config['module-path'] = self._tmp_dir + "/opsdroid"
loader.load_modules_from_config(config)
- self.assertEqual(len(loader._load_modules.mock_calls), 3)
+ self.assertEqual(len(loader._load_modules.mock_calls), 4)
def test_load_empty_config(self):
opsdroid, loader = self.setup()
@@ -304,3 +304,10 @@ def test_install_local_module_failure(self):
loader._install_local_module(config)
logmock.assert_called_with(
"Failed to install from " + config["path"])
+
+ def test_reload_modules(self):
+ opsdroid, loader = self.setup()
+ with mock.patch('importlib.reload') as reload_mock:
+ mock_module = {"module": "fake_import"}
+ loader._reload_modules([mock_module])
+ self.assertTrue(reload_mock.called_with("fake_import"))
| Reload skills
It should be possible for opsdroid to flush out all the loaded skills and other modules and reload them without restarting the application.
| The web server must be stopped first and started again after as some skills will be webhooks. | 2017-06-08T09:58:20 |
opsdroid/opsdroid | 180 | opsdroid__opsdroid-180 | [
"178"
] | 02501e2483011e342944efdb408a82733661ea4d | diff --git a/opsdroid/loader.py b/opsdroid/loader.py
--- a/opsdroid/loader.py
+++ b/opsdroid/loader.py
@@ -282,6 +282,7 @@ def _install_git_module(self, config):
def _install_local_module(config):
"""Install a module from a local path."""
installed = False
+ config["path"] = os.path.expanduser(config["path"])
installdir, _ = os.path.split(config["install_path"])
if not os.path.isdir(installdir):
| Cannot use ~ in module paths
Modules specified in the configuration with a path that includes the `~` character do not import. This character needs to be expanded.
| 2017-06-10T17:36:52 |
||
opsdroid/opsdroid | 182 | opsdroid__opsdroid-182 | [
"181"
] | cc893c6af750ff12a525d38681feb1b7b90a6291 | diff --git a/opsdroid/web.py b/opsdroid/web.py
--- a/opsdroid/web.py
+++ b/opsdroid/web.py
@@ -2,6 +2,7 @@
import json
import logging
+import ssl
from aiohttp import web
@@ -33,7 +34,10 @@ def get_port(self):
try:
port = self.config["port"]
except KeyError:
- port = 8080
+ if self.get_ssl_context is not None:
+ port = 8443
+ else:
+ port = 8080
return port
@property
@@ -45,13 +49,28 @@ def get_host(self):
host = '127.0.0.1'
return host
+ @property
+ def get_ssl_context(self):
+ """Return the ssl context or None."""
+ try:
+ ssl_config = self.config["ssl"]
+ sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
+ sslcontext.load_cert_chain(ssl_config["cert"], ssl_config["key"])
+ return sslcontext
+ except FileNotFoundError:
+ _LOGGER.error("Cannot find ssl cert or key.")
+ return None
+ except KeyError:
+ return None
+
def start(self):
"""Start web servers."""
_LOGGER.debug(
"Starting web server with host %s and port %s",
self.get_host, self.get_port)
web.run_app(self.web_app, host=self.get_host,
- port=self.get_port, print=_LOGGER.info)
+ port=self.get_port, print=_LOGGER.info,
+ ssl_context=self.get_ssl_context)
@staticmethod
def build_response(status, result):
| diff --git a/tests/ssl/cert.pem b/tests/ssl/cert.pem
new file mode 100644
--- /dev/null
+++ b/tests/ssl/cert.pem
@@ -0,0 +1,34 @@
+-----BEGIN CERTIFICATE-----
+MIIF9DCCA9ygAwIBAgIJAN7FTj+QWdJKMA0GCSqGSIb3DQEBBQUAMFkxCzAJBgNV
+BAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBX
+aWRnaXRzIFB0eSBMdGQxEjAQBgNVBAMTCWxvY2FsaG9zdDAeFw0xNzA2MTMwNzU1
+MzBaFw0xODA2MTMwNzU1MzBaMFkxCzAJBgNVBAYTAkFVMRMwEQYDVQQIEwpTb21l
+LVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQxEjAQBgNV
+BAMTCWxvY2FsaG9zdDCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANuK
+mO7kJQyew6dNFpubae5AhRtRp8HukijgM+6qoBvpC/v13SC0xpGCJA3WZcSnq6sY
+Qo+Lq9UVNuD61bKyyDHt79RUrQSZMNw6+6YYBBZtfd0WbiL0byPkzbkeUszZEejg
+LUNAC2RVjaewt/xdZkBgNSnp5+SjkcS48SxRUR17RxisVwHV9zOw6/B7dxxa6CMM
+nPLdd0oDBzZAXah+YnOPQmCecsjZNJ2mUe0AOliJf3zbsE9EVB3+2dquYOeoCAn1
++rzeTYY6Ahh+SuYbspWmIvj6S2tNvAfu7grkJeYmYQIeGUT7lFDzghWw/NeClbq4
+LiXyrtJdJab3OeFKE97uJN+3UJJgppw/UgUqNsRYOHl/nt8Um2CWIFTDAYk4+xUy
+zXtaWAGp9Oyi3NO2FLSQDBMcrwmi/OVxS+5i8SimFj4EGez/lJtof4AWRpLdAIHA
+Cknq4swN1UnjElg+SZsI9jkKyYu9KiQgqkMR/iAiNWVy3hgp9e1kU8RPbS7PzciU
+55HMVggI8zUT4vLkwh0xvIAgIp2gP+EzjYfPGX7cBkPoU6aKUD0NFZZQ4b79sR0E
+nnziieYen/QgCYQx0D48xzc8mggxUV+xHYV14WFC2WonD6dBZX9Vz/GXtBSi0xgU
+pwY0qGkAsTXNh30xr3CshqCmcRWmWqGmJW0rXUsPAgMBAAGjgb4wgbswHQYDVR0O
+BBYEFP5Tk5H/oO6IvAMgkECdn8OTeGuUMIGLBgNVHSMEgYMwgYCAFP5Tk5H/oO6I
+vAMgkECdn8OTeGuUoV2kWzBZMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1T
+dGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMRIwEAYDVQQD
+Ewlsb2NhbGhvc3SCCQDexU4/kFnSSjAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEB
+BQUAA4ICAQCpAk7Vxy9ZOiAyi/ZGSvBmXGyu073SpnpG+EZU8rHr+TMT17NvB4zz
+ITxWSworDV9DsXyOt2CifM/wsYMQjWGzIFZGfrTIDg4+0G8ePdE/aKPEx2h68GzQ
+GcH151ZxVTb4IpT7MDApueVl3BnaYMm36QeVROMDmICYmwxEC+cBvWqvgesJnzeX
+Q910M/pNeOP+1wfharQDi6xSGSibKgf8uUKYNEl1LPkK3CFumWWARnaMSG7D3psj
+lszT+n7e59TnZzzABeUW9zzUo8evhbA+XqlW6jwjp63h+PX7kYFQIehF7c1z8mWh
+GKdLtLRSlPI6cH0RSrXwlu1b6JmVqg03kL6msG7uWJLlgd/LKbpzjLItKezi+Ydu
+R8rSX28pJiqrgBP/ZyEAxJIbtc42hMmVst/g4buSc3ij3jkfeNVIYaoSwIQiTkD6
+C1lqE4TVYGxokv38CVb+F6/r1I+BasDGk6W1q/jpeF+BhaPYnD+xjm2n5JfNd1+S
+RO4jRp97HRb9VUT03vzkQNxtXfBW7WpCIVD2HAlUGGge9V+7WjrTSMUckhiHjot+
+bQmYpdLDzv9GZL0WjqCaeIpsN/Hw9+B6qCggtvgmKNi9n2uFmBkR4XqLgBhV/9Q6
+r0orcysRfr0+4wktarKziK8z47W9i+kSLu7GlKzWGS256h/aLrMeuA==
+-----END CERTIFICATE-----
diff --git a/tests/ssl/key.pem b/tests/ssl/key.pem
new file mode 100644
--- /dev/null
+++ b/tests/ssl/key.pem
@@ -0,0 +1,51 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIJKAIBAAKCAgEA24qY7uQlDJ7Dp00Wm5tp7kCFG1Gnwe6SKOAz7qqgG+kL+/Xd
+ILTGkYIkDdZlxKerqxhCj4ur1RU24PrVsrLIMe3v1FStBJkw3Dr7phgEFm193RZu
+IvRvI+TNuR5SzNkR6OAtQ0ALZFWNp7C3/F1mQGA1Kenn5KORxLjxLFFRHXtHGKxX
+AdX3M7Dr8Ht3HFroIwyc8t13SgMHNkBdqH5ic49CYJ5yyNk0naZR7QA6WIl/fNuw
+T0RUHf7Z2q5g56gICfX6vN5NhjoCGH5K5huylaYi+PpLa028B+7uCuQl5iZhAh4Z
+RPuUUPOCFbD814KVurguJfKu0l0lpvc54UoT3u4k37dQkmCmnD9SBSo2xFg4eX+e
+3xSbYJYgVMMBiTj7FTLNe1pYAan07KLc07YUtJAMExyvCaL85XFL7mLxKKYWPgQZ
+7P+Um2h/gBZGkt0AgcAKSerizA3VSeMSWD5Jmwj2OQrJi70qJCCqQxH+ICI1ZXLe
+GCn17WRTxE9tLs/NyJTnkcxWCAjzNRPi8uTCHTG8gCAinaA/4TONh88ZftwGQ+hT
+popQPQ0VllDhvv2xHQSefOKJ5h6f9CAJhDHQPjzHNzyaCDFRX7EdhXXhYULZaicP
+p0Flf1XP8Ze0FKLTGBSnBjSoaQCxNc2HfTGvcKyGoKZxFaZaoaYlbStdSw8CAwEA
+AQKCAgBZcf1fDyqdGq4iIoE8grRE8pqIh9suYxG/f6EI8xFqbaeJTyF5qs1gCULB
+NIGf/qQtgCPWMhSfKxaB0RBxzu9LkGJvAtS/E97ZS1qt0AkSTifChNn4edgyKo06
+h8U6WpI2a+dlB9ncVhLxqEk9k5wOTGQ8oGTvPUkteiAgazz+IvTTc5u7Tr55m/J0
+twjPT6ZWaVQkiuCIEfXFP7Io7N0Vsd9cWBHyDLSoKlQkmDwtSE/dwe/yS6dt1SBT
+PUqT91zXGhW/D19MeaPS8Nl5KP24hMio4ekKXdBm9wya+VsLisVpRHQNK+bAmIV+
+au6Iq1ZvjYyXv2lEodiuTvmQ9Upkft1hUZT4KqMvhFlsG60rHjEnYivHwVSZpcLs
+Rx9Xpyup/SONFsuM0N6aIrLXFNLhTOwCe871821tF6qkvhfEMQhzCL/Qij6sMHV3
+cDj3VwFFv+9tJpv39Zj1QFHR4wV2YXqqy0wHr8ygBPcFL9Adu4jbnalFxnyuNivk
+a7DII1zm5LGqMItXfYZMAL+jNHxT+xpmtSbGj7fkcDZbxA/4YWk/cJxddf2hD3X/
+Mi0etQD4uHsF47pvPQVFa4bQfPfMDHRVxLU4U7PNWhROXtV7/HaGQnehy/Kb5PVo
+wJbGlMlcHxJ70+NHpp0aLZCqMiAv2dX7hWkYrJRPfIjsStUYwQKCAQEA/BgX2ZSf
+eUgxwrQcuSmoHLW2gBkV3RXKfRJzs+1Jw8VsEhTmaWQoKwq0dBD0ujpYZ6YooBv1
+cNhANkCOaohQmkJnJ4VLCj4Geqew9v5R1JnABxRxswHE8Ah2Yz9IUlnF5ftz1O3M
+qssAPRfHNM336Q6J2y2NjDaP0tmKbCaxg21HOtbqA1stHxozYJs8L8KF1HTehIO0
+hAaTsL2+8qHPgzzBr8FfFtC2sD7hvDi+9m+tZq7aNwpdb9a0Eu+gA/omPbDv434c
+Ixu40ERDT3kWSSj41rMunebe4T8vRD0DW2e2n1Uls3+ovoBC/ckuqGEzzVGNpEy/
+dH7NPaDMZ6aCLwKCAQEA3vFjFMFFvD17G0197m+Yvm/tEM69OzM4DwUig23O7qTR
+g05DGQDuTnlc9UozOWMMzXwnyrWsZ+DbEH839q3LrhepZ5ftk1wjcFCsCz/vpy+w
+BY3uyYsKTb06fzDlBLU/hZaRMCfJRlCHxHqjTLV1arRokvRSb1HZPalB6qpdno1p
+UcamP8GbXiXijyXP699/NXzElA+gNeeiNROvQhR8n90vGOym02Scn9O8i9/1KhJL
+z/Kfi/YV6gX72LZBmm7Vo0qHf6lA3mAiXlNaVp2SvU0gc6KfIKXjxj/XkA3U7ctr
+zw3zCyJU6osQmoEwEZwSEogZlOxLySIFM4rXqm/tIQKCAQEA2JkG3O6Cut0ZzBWe
+3ET7aXeDj7p9+5A+g4n4Ga8Wgvu8aCjG8+SXUqwG8fajnuKElMHCBBchokxbl4GO
+G+B1iKTnJqCzIeC7BmXQedd3jbhwsmSRcGFjyW0r8wgrZn41t+19z4RmJpT25obo
+pOkXSYoxTj1scf1rQ5mgAl6YhLi+y8JeHHYZZbDIVgfrBYoYHLwxeCAFWK6h9OiD
+CMuYlUT9kboOrnazkOjmYSdMhEbd8OzpFwNz1qTd4as7jE9Azh6ZbpgrKssl1uPg
+BYefa0qlyyr3Mbps45G7VeEqYj3RPkMVeo2D+fIQHSffS9MAhDe/AFzk3v4X8AyZ
+s7DwBwKCAQAiY3+SCwHzuFQN8RUUu1gHQAKETi94Oz1Ra8xYE48Q1pGOvHqzF6aq
+qiPBnlFqb7Hg3F7S8NUILPH9iHIcddBuKS4zUYcO7O52e6seDR2tXzi6kpsbsXvE
+ZATq66LX6BFFTOVWVTzBwNhAYV5hDbhv3iYnTNKOY0SsMzh8twy70TpGp+qq+PU7
+P4LD6+Q3kQDxQi6cXBCtX+5wW3n0Km7Sxe5m3NIa13ElYia6OLB7bdkN2XHsx+e3
+cnbXhb120dPYXT2rXnMoGhITFK5hn+qrkoMH8zSyd4Q750sb6FZI29DkXlV4CTKp
+/wtvma/Nd5J+E0pG43AJNVsyCR+dJaihAoIBAEZcyyoVmW64VgGX82e0Afsk09Bu
+geE8lBO3S9NsJbzBsEMp17YlOEYaIczSceiFASTo2D3kFPz8vpNlRrpmFLlUBm6j
+/wJ5YWyA22yHGsSro57xgQLcoui/ImUrMHoLknukvJ9hKQseRdxuVqRFHq8nbDvW
+H+d2ZeS+segnTWn1L9tJWUwNXb9nZC5bSWoE78QNGGX+T5Cyu5QEgI15nYMOxH6V
++SnOZL7u10WqOwKlBOF+GQbKvLycibk7RSEvmagVzk7aPL+jU4OL3AlyPTt9iit0
+fiwsn+US6KuY0YGG5U/tt82lAWfrExxqDnBDdVOgwlcc1nWbvhJC84p9RGc=
+-----END RSA PRIVATE KEY-----
diff --git a/tests/test_web.py b/tests/test_web.py
--- a/tests/test_web.py
+++ b/tests/test_web.py
@@ -1,4 +1,6 @@
+import ssl
+
import asynctest
import asynctest.mock as amock
@@ -38,6 +40,27 @@ async def test_web_get_host(self):
app = web.Web(opsdroid)
self.assertEqual(app.get_host, "0.0.0.0")
+ async def test_web_get_ssl(self):
+ """Check the host getter."""
+ with OpsDroid() as opsdroid:
+ opsdroid.config["web"] = {}
+ app = web.Web(opsdroid)
+ self.assertEqual(app.get_ssl_context, None)
+
+ opsdroid.config["web"] = {"ssl":
+ {"cert": "tests/ssl/cert.pem",
+ "key": "tests/ssl/key.pem"}}
+ app = web.Web(opsdroid)
+ self.assertEqual(type(app.get_ssl_context),
+ type(ssl.SSLContext(ssl.PROTOCOL_SSLv23)))
+ self.assertEqual(app.get_port, 8443)
+
+ opsdroid.config["web"] = {"ssl":
+ {"cert": "/path/to/nonexistant/cert",
+ "key": "/path/to/nonexistant/key"}}
+ app = web.Web(opsdroid)
+ self.assertEqual(app.get_ssl_context, None)
+
async def test_web_build_response(self):
"""Check the response builder."""
with OpsDroid() as opsdroid:
| Add ssl to the web server
It should be possible to enable ssl on the web server and pass in paths to the ssl keys in the config.
| 2017-06-13T07:42:49 |
|
opsdroid/opsdroid | 183 | opsdroid__opsdroid-183 | [
"174"
] | f381eec97e8df5977ad732ae20201050667b35e2 | diff --git a/opsdroid/__main__.py b/opsdroid/__main__.py
--- a/opsdroid/__main__.py
+++ b/opsdroid/__main__.py
@@ -1,11 +1,12 @@
"""Starts opsdroid."""
+import os
import sys
import logging
import argparse
from opsdroid.core import OpsDroid
-from opsdroid.const import LOG_FILENAME, EXAMPLE_CONFIG_FILE
+from opsdroid.const import DEFAULT_LOG_FILENAME, EXAMPLE_CONFIG_FILE
from opsdroid.web import Web
@@ -19,9 +20,12 @@ def configure_logging(config):
rootlogger.handlers.pop()
try:
- logfile_path = config["logging"]["path"]
+ if config["logging"]["path"]:
+ logfile_path = os.path.expanduser(config["logging"]["path"])
+ else:
+ logfile_path = config["logging"]["path"]
except KeyError:
- logfile_path = LOG_FILENAME
+ logfile_path = DEFAULT_LOG_FILENAME
try:
log_level = get_logging_level(
diff --git a/opsdroid/const.py b/opsdroid/const.py
--- a/opsdroid/const.py
+++ b/opsdroid/const.py
@@ -3,10 +3,10 @@
__version__ = "0.8.1"
-LOG_FILENAME = 'output.log'
DEFAULT_GIT_URL = "https://github.com/opsdroid/"
MODULES_DIRECTORY = "opsdroid-modules"
-DEFAULT_ROOT_PATH = os.path.join(os.path.expanduser("~"), ".opsdroid")
+DEFAULT_ROOT_PATH = os.path.expanduser("~/.opsdroid")
+DEFAULT_LOG_FILENAME = os.path.join(DEFAULT_ROOT_PATH, 'output.log')
DEFAULT_MODULES_PATH = os.path.join(DEFAULT_ROOT_PATH, "modules")
DEFAULT_MODULE_DEPS_PATH = os.path.join(DEFAULT_ROOT_PATH, "site-packages")
DEFAULT_CONFIG_PATH = os.path.join(DEFAULT_ROOT_PATH, "configuration.yaml")
| Change default log location
Logs by default are written to `./opsdroid.log`. So they end up being written wherever you run the command.
Logs should either be written to `/var/log/opsdroid.log` or as that may not be writeable by all users maybe `~/.opsdroid/opsdroid.log`.
| 2017-06-13T18:53:15 |
||
opsdroid/opsdroid | 184 | opsdroid__opsdroid-184 | [
"132"
] | 9a8447a4f2e7db16dd8684ee24e357d74a3fe89f | diff --git a/opsdroid/parsers/apiai.py b/opsdroid/parsers/apiai.py
--- a/opsdroid/parsers/apiai.py
+++ b/opsdroid/parsers/apiai.py
@@ -38,8 +38,11 @@ async def parse_apiai(opsdroid, message, config):
# halt the application. If a skill throws an exception it just doesn't
# give a response to the user, so an error response should be given.
if 'access-token' in config:
-
- result = await call_apiai(message, config)
+ try:
+ result = await call_apiai(message, config)
+ except aiohttp.ClientOSError:
+ _LOGGER.error("No response from api.ai, check your network.")
+ return
if result["status"]["code"] >= 300:
_LOGGER.error("api.ai error - " +
| API.AI Error when network is disconnected
The following error should be caught and warned about cleanly.
```
ERROR asyncio: Task exception was never retrieved
future: <Task finished coro=<parse_apiai() done, defined at /Users/jacob/Projects/opsdroid/opsdroid/opsdroid/parsers/apiai.py:34> exception=ClientConnectorError(8, 'Cannot connect to host api.api.ai:443 ssl:True [nodename nor servname provided, or not known]')>
Traceback (most recent call last):
File "/Users/jacob/Projects/opsdroid/opsdroid/venv/lib/python3.6/site-packages/aiohttp/connector.py", line 375, in connect
proto = yield from self._create_connection(req)
File "/Users/jacob/Projects/opsdroid/opsdroid/venv/lib/python3.6/site-packages/aiohttp/connector.py", line 632, in _create_connection
_, proto = yield from self._create_direct_connection(req)
File "/Users/jacob/Projects/opsdroid/opsdroid/venv/lib/python3.6/site-packages/aiohttp/connector.py", line 643, in _create_direct_connection
hosts = yield from self._resolve_host(req.url.raw_host, req.port)
File "/Users/jacob/Projects/opsdroid/opsdroid/venv/lib/python3.6/site-packages/aiohttp/connector.py", line 615, in _resolve_host
self._resolver.resolve(host, port, family=self._family)
File "/Users/jacob/Projects/opsdroid/opsdroid/venv/lib/python3.6/site-packages/aiohttp/resolver.py", line 30, in resolve
host, port, type=socket.SOCK_STREAM, family=family)
File "/opt/boxen/homebrew/Cellar/python3/3.6.0/Frameworks/Python.framework/Versions/3.6/lib/python3.6/concurrent/futures/thread.py", line 55, in run
result = self.fn(*self.args, **self.kwargs)
File "/opt/boxen/homebrew/Cellar/python3/3.6.0/Frameworks/Python.framework/Versions/3.6/lib/python3.6/socket.py", line 743, in getaddrinfo
for res in _socket.getaddrinfo(host, port, family, type, proto, flags):
socket.gaierror: [Errno 8] nodename nor servname provided, or not known
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/Users/jacob/Projects/opsdroid/opsdroid/opsdroid/parsers/apiai.py", line 42, in parse_apiai
result = await call_apiai(message, config)
File "/Users/jacob/Projects/opsdroid/opsdroid/opsdroid/parsers/apiai.py", line 27, in call_apiai
headers=headers)
File "/Users/jacob/Projects/opsdroid/opsdroid/venv/lib/python3.6/site-packages/aiohttp/client.py", line 621, in __await__
resp = yield from self._coro
File "/Users/jacob/Projects/opsdroid/opsdroid/venv/lib/python3.6/site-packages/aiohttp/client.py", line 225, in _request
conn = yield from self._connector.connect(req)
File "/Users/jacob/Projects/opsdroid/opsdroid/venv/lib/python3.6/site-packages/aiohttp/connector.py", line 380, in connect
.format(key, exc.strerror)) from exc
aiohttp.client_exceptions.ClientConnectorError: [Errno 8] Cannot connect to host api.api.ai:443 ssl:True [nodename nor servname provided, or not known]
```
| 2017-06-13T19:31:21 |
||
opsdroid/opsdroid | 210 | opsdroid__opsdroid-210 | [
"186"
] | cf05a83573fbca4e24b3770458822de6c666cc37 | diff --git a/opsdroid/matchers.py b/opsdroid/matchers.py
--- a/opsdroid/matchers.py
+++ b/opsdroid/matchers.py
@@ -9,12 +9,14 @@
_LOGGER = logging.getLogger(__name__)
-def match_regex(regex):
+def match_regex(regex, case_sensitive=True):
"""Return regex match decorator."""
def matcher(func):
"""Add decorated function to skills list for regex matching."""
opsdroid = get_opsdroid()
- opsdroid.skills.append({"regex": regex, "skill": func,
+ opsdroid.skills.append({"regex": {"expression": regex,
+ "case_sensitive": case_sensitive},
+ "skill": func,
"config":
opsdroid.loader.current_import_config})
return func
diff --git a/opsdroid/parsers/regex.py b/opsdroid/parsers/regex.py
--- a/opsdroid/parsers/regex.py
+++ b/opsdroid/parsers/regex.py
@@ -15,7 +15,12 @@ async def parse_regex(opsdroid, message):
# give a response to the user, so an error response should be given.
for skill in opsdroid.skills:
if "regex" in skill:
- regex = re.match(skill["regex"], message.text)
+ if skill["regex"]["case_sensitive"]:
+ regex = re.match(skill["regex"]["expression"],
+ message.text)
+ else:
+ regex = re.match(skill["regex"]["expression"],
+ message.text, re.IGNORECASE)
if regex:
message.regex = regex
try:
@@ -28,4 +33,4 @@ async def parse_regex(opsdroid, message):
_LOGGER.exception("Exception when parsing '" +
message.text +
"' against skill '" +
- skill["regex"] + "'")
+ skill["regex"]["expression"] + "'")
| diff --git a/tests/test_core.py b/tests/test_core.py
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -76,7 +76,7 @@ def test_load_regex_skill(self):
decorator = match_regex(regex)
decorator(skill)
self.assertEqual(len(opsdroid.skills), 1)
- self.assertEqual(opsdroid.skills[0]["regex"], regex)
+ self.assertEqual(opsdroid.skills[0]["regex"]["expression"], regex)
self.assertIsInstance(opsdroid.skills[0]["skill"], mock.MagicMock)
def test_start_databases(self):
@@ -142,7 +142,7 @@ class TestCoreAsync(asynctest.TestCase):
async def test_parse_regex(self):
with OpsDroid() as opsdroid:
- regex = r".*"
+ regex = r"Hello .*"
skill = amock.CoroutineMock()
mock_connector = Connector({})
decorator = match_regex(regex)
@@ -153,6 +153,19 @@ async def test_parse_regex(self):
await task
self.assertTrue(skill.called)
+ async def test_parse_regex_insensitive(self):
+ with OpsDroid() as opsdroid:
+ regex = r"Hello .*"
+ skill = amock.CoroutineMock()
+ mock_connector = Connector({})
+ decorator = match_regex(regex, case_sensitive=False)
+ decorator(skill)
+ message = Message("HELLO world", "user", "default", mock_connector)
+ tasks = await opsdroid.parse(message)
+ for task in tasks:
+ await task
+ self.assertTrue(skill.called)
+
async def test_parse_apiai(self):
with OpsDroid() as opsdroid:
opsdroid.config["parsers"] = [{"name": "apiai"}]
diff --git a/tests/test_matchers.py b/tests/test_matchers.py
--- a/tests/test_matchers.py
+++ b/tests/test_matchers.py
@@ -19,7 +19,7 @@ async def test_match_regex(self):
decorator = matchers.match_regex(regex)
decorator(mockedskill)
self.assertEqual(len(opsdroid.skills), 1)
- self.assertEqual(opsdroid.skills[0]["regex"], regex)
+ self.assertEqual(opsdroid.skills[0]["regex"]["expression"], regex)
self.assertIsInstance(opsdroid.skills[0]["skill"], mock.MagicMock)
async def test_match_apiai(self):
| Add case-insensitive kwarg to the regex matcher
It would be nice to specify case insensitive matching in the regex matcher.
e.g
```python
@match_regex(r'myregex', case_sensitive=False)
async def myskill(opsdroid, config, message):
pass
```
| 2017-08-30T12:57:50 |
|
opsdroid/opsdroid | 225 | opsdroid__opsdroid-225 | [
"194"
] | b67d32310da842dddddb9b907f731a03d0562a2f | diff --git a/opsdroid/loader.py b/opsdroid/loader.py
--- a/opsdroid/loader.py
+++ b/opsdroid/loader.py
@@ -6,6 +6,7 @@
import shutil
import subprocess
import importlib
+import re
import yaml
from opsdroid.const import (
DEFAULT_GIT_URL, MODULES_DIRECTORY, DEFAULT_MODULES_PATH,
@@ -135,6 +136,17 @@ def load_config_file(self, config_paths):
_LOGGER.info("No configuration files found.")
config_path = self.create_default_config(DEFAULT_CONFIG_PATH)
+ env_var_pattern = re.compile(r'^\$([A-Z_]*)$')
+ yaml.add_implicit_resolver("!envvar", env_var_pattern)
+
+ def envvar_constructor(loader, node):
+ """Yaml parser for env vars."""
+ value = loader.construct_scalar(node)
+ [env_var] = env_var_pattern.match(value).groups()
+ return os.environ[env_var]
+
+ yaml.add_constructor('!envvar', envvar_constructor)
+
try:
with open(config_path, 'r') as stream:
_LOGGER.info("Loaded config from %s", config_path)
| diff --git a/tests/configs/minimal_with_envs.yaml b/tests/configs/minimal_with_envs.yaml
new file mode 100644
--- /dev/null
+++ b/tests/configs/minimal_with_envs.yaml
@@ -0,0 +1 @@
+test: $ENVVAR
\ No newline at end of file
diff --git a/tests/test_loader.py b/tests/test_loader.py
--- a/tests/test_loader.py
+++ b/tests/test_loader.py
@@ -29,6 +29,13 @@ def test_load_config_file(self):
config = loader.load_config_file(["tests/configs/minimal.yaml"])
self.assertIsNotNone(config)
+ def test_load_config_file_with_env_vars(self):
+ opsdroid, loader = self.setup()
+ os.environ["ENVVAR"] = 'test'
+ config = loader.load_config_file(
+ ["tests/configs/minimal_with_envs.yaml"])
+ self.assertEqual(config["test"], os.environ["ENVVAR"])
+
def test_create_default_config(self):
test_config_path = "/tmp/test_config_path/configuration.yaml"
opsdroid, loader = self.setup()
| Allow usage of env vars in config
The configuration should be parsed for environment variables when loaded. This would allow for secrets like api keys to be kept outside of the opsdroid configuration.
#### Example
```yaml
connectors:
- name: slack
default-room: '#general'
bot-name: "opsdroid"
icon-emoji: ":robot:"
api-token: "$SLACK_API_KEY"
```
In this example `$SLACK_API_KEY` would be replaced with the contents of the environment variable of the same name.
| 2017-09-13T19:56:13 |
|
opsdroid/opsdroid | 233 | opsdroid__opsdroid-233 | [
"232",
"232"
] | 84206f9862a673a90dbcb5db93dc6eb0c717eb2b | diff --git a/opsdroid/__main__.py b/opsdroid/__main__.py
--- a/opsdroid/__main__.py
+++ b/opsdroid/__main__.py
@@ -48,6 +48,9 @@ def configure_logging(config):
pass
if logfile_path:
+ logdir = os.path.dirname(os.path.realpath(logfile_path))
+ if not os.path.isdir(logdir):
+ os.makedirs(logdir)
file_handler = logging.FileHandler(logfile_path)
file_handler.setLevel(log_level)
file_handler.setFormatter(formatter)
| diff --git a/tests/test_main.py b/tests/test_main.py
--- a/tests/test_main.py
+++ b/tests/test_main.py
@@ -59,6 +59,14 @@ def test_configure_file_logging(self):
self.assertEqual(logging.FileHandler, type(rootlogger.handlers[1]))
self.assertEqual(rootlogger.handlers[1].level, logging.INFO)
+ def test_configure_file_logging_directory_not_exists(self):
+ config = {"logging": {
+ "path": '/tmp/mynonexistingdirectory' + "/output.log",
+ "console": False,
+ }}
+ opsdroid.configure_logging(config)
+ self.assertEqual(os.path.isfile(config['logging']['path']), True)
+
def test_configure_console_logging(self):
config = {"logging": {
"path": False,
| [Docker] DEFAULT_ROOT_PATH should be created if it does not exist
I have tried to run opsdroid in a Docker container, in the following environment:
```
OS: Ubuntu 16.04.3 LTS
Docker version: 17.06.2-ce
Docker API version: 1.30
```
The process I followed is the following:
1. `docker pull opsdroid/opsdroid:latest`
2. Created an initial configuration in the host: `/var/tmp/configuration.yaml`
3. Ran the following command: ` docker run --rm -v /var/tmp/configuration.yaml:/etc/opsdroid/configuration.yaml:ro opsdroid/opsdroid:latest`
The configuration file contents are:
```
connectors:
- name: shell
skills:
- name: hello
```
But I got the following error:
```
ubuntu@ubuntu:~$ docker run --rm -v /var/tmp/configuration.yaml:/etc/opsdroid/configuration.yaml:ro opsdroid/opsdroid:latest
Traceback (most recent call last):
File "/usr/local/lib/python3.5/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/local/lib/python3.5/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/usr/src/app/opsdroid/__main__.py", line 112, in <module>
main()
File "/usr/src/app/opsdroid/__main__.py", line 105, in main
configure_logging(opsdroid.config)
File "/usr/src/app/opsdroid/__main__.py", line 51, in configure_logging
file_handler = logging.FileHandler(logfile_path)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1014, in __init__
StreamHandler.__init__(self, self._open())
File "/usr/local/lib/python3.5/logging/__init__.py", line 1043, in _open
return open(self.baseFilename, self.mode, encoding=self.encoding)
FileNotFoundError: [Errno 2] No such file or directory: '/root/.opsdroid/output.log'
ubuntu@ubuntu:~$
```
When running the container in interactive mode to debug the issue, by issuing `docker run -it -v /var/tmp/configuration.yaml:/etc/opsdroid/configuration.yaml:ro opsdroid/opsdroid:latest /bin/sh` and executing the default command (`python -m opsdroid`), I reproduced the issue:
```
/usr/src/app # python -m opsdroid
Traceback (most recent call last):
File "/usr/local/lib/python3.5/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/local/lib/python3.5/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/usr/src/app/opsdroid/__main__.py", line 112, in <module>
main()
File "/usr/src/app/opsdroid/__main__.py", line 105, in main
configure_logging(opsdroid.config)
File "/usr/src/app/opsdroid/__main__.py", line 51, in configure_logging
file_handler = logging.FileHandler(logfile_path)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1014, in __init__
StreamHandler.__init__(self, self._open())
File "/usr/local/lib/python3.5/logging/__init__.py", line 1043, in _open
return open(self.baseFilename, self.mode, encoding=self.encoding)
FileNotFoundError: [Errno 2] No such file or directory: '/root/.opsdroid/output.log'
/usr/src/app #
```
When checking if the `/root/.opsdroid/` directory existed, I got the following:
```
/usr/src/app # ls /root/.opsdroid
ls: /root/.opsdroid: No such file or directory
```
Concluding, opsdroid should check if that directory exists and create it if not.
[Docker] DEFAULT_ROOT_PATH should be created if it does not exist
I have tried to run opsdroid in a Docker container, in the following environment:
```
OS: Ubuntu 16.04.3 LTS
Docker version: 17.06.2-ce
Docker API version: 1.30
```
The process I followed is the following:
1. `docker pull opsdroid/opsdroid:latest`
2. Created an initial configuration in the host: `/var/tmp/configuration.yaml`
3. Ran the following command: ` docker run --rm -v /var/tmp/configuration.yaml:/etc/opsdroid/configuration.yaml:ro opsdroid/opsdroid:latest`
The configuration file contents are:
```
connectors:
- name: shell
skills:
- name: hello
```
But I got the following error:
```
ubuntu@ubuntu:~$ docker run --rm -v /var/tmp/configuration.yaml:/etc/opsdroid/configuration.yaml:ro opsdroid/opsdroid:latest
Traceback (most recent call last):
File "/usr/local/lib/python3.5/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/local/lib/python3.5/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/usr/src/app/opsdroid/__main__.py", line 112, in <module>
main()
File "/usr/src/app/opsdroid/__main__.py", line 105, in main
configure_logging(opsdroid.config)
File "/usr/src/app/opsdroid/__main__.py", line 51, in configure_logging
file_handler = logging.FileHandler(logfile_path)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1014, in __init__
StreamHandler.__init__(self, self._open())
File "/usr/local/lib/python3.5/logging/__init__.py", line 1043, in _open
return open(self.baseFilename, self.mode, encoding=self.encoding)
FileNotFoundError: [Errno 2] No such file or directory: '/root/.opsdroid/output.log'
ubuntu@ubuntu:~$
```
When running the container in interactive mode to debug the issue, by issuing `docker run -it -v /var/tmp/configuration.yaml:/etc/opsdroid/configuration.yaml:ro opsdroid/opsdroid:latest /bin/sh` and executing the default command (`python -m opsdroid`), I reproduced the issue:
```
/usr/src/app # python -m opsdroid
Traceback (most recent call last):
File "/usr/local/lib/python3.5/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/local/lib/python3.5/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/usr/src/app/opsdroid/__main__.py", line 112, in <module>
main()
File "/usr/src/app/opsdroid/__main__.py", line 105, in main
configure_logging(opsdroid.config)
File "/usr/src/app/opsdroid/__main__.py", line 51, in configure_logging
file_handler = logging.FileHandler(logfile_path)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1014, in __init__
StreamHandler.__init__(self, self._open())
File "/usr/local/lib/python3.5/logging/__init__.py", line 1043, in _open
return open(self.baseFilename, self.mode, encoding=self.encoding)
FileNotFoundError: [Errno 2] No such file or directory: '/root/.opsdroid/output.log'
/usr/src/app #
```
When checking if the `/root/.opsdroid/` directory existed, I got the following:
```
/usr/src/app # ls /root/.opsdroid
ls: /root/.opsdroid: No such file or directory
```
Concluding, opsdroid should check if that directory exists and create it if not.
| 2017-09-27T22:36:43 |
|
opsdroid/opsdroid | 249 | opsdroid__opsdroid-249 | [
"247"
] | 56d7a3a4abe976d2474ba78eca396a1ff123c7f7 | diff --git a/opsdroid/core.py b/opsdroid/core.py
--- a/opsdroid/core.py
+++ b/opsdroid/core.py
@@ -2,6 +2,7 @@
import copy
import logging
+import signal
import sys
import weakref
import asyncio
@@ -34,6 +35,8 @@ def __init__(self):
self.connectors = []
self.connector_tasks = []
self.eventloop = asyncio.get_event_loop()
+ for sig in (signal.SIGINT, signal.SIGTERM):
+ self.eventloop.add_signal_handler(sig, self.stop)
self.skills = []
self.memory = Memory()
self.loader = Loader(self)
@@ -100,6 +103,8 @@ def stop(self):
for task in pending:
task.cancel()
self.eventloop.stop()
+ print('') # Prints a character return for return to shell
+ _LOGGER.info("Keyboard interrupt, exiting.")
def load(self):
"""Load configuration."""
@@ -123,10 +128,6 @@ def start_loop(self):
try:
pending = asyncio.Task.all_tasks()
self.eventloop.run_until_complete(asyncio.gather(*pending))
- except (KeyboardInterrupt, EOFError):
- print('') # Prints a character return for return to shell
- self.stop()
- _LOGGER.info("Keyboard interrupt, exiting.")
except RuntimeError as error:
if str(error) != 'Event loop is closed':
raise error
| SIGINT (Ctrl+C) should be handled by opsdroid
When executing opsdroid in Ubuntu or in a Docker container, the following message appears:
`(Press CTRL+C to quit)`
When hitting `Ctrl+C` though, the following traceback is produced:
```
^CERROR asyncio: Task exception was never retrieved
future: <Task finished coro=<ConnectorShell.listen() done, defined at /root/.opsdroid/modules/opsdroid-modules/connector/shell/__init__.py:57> exception=ConnectionResetError('Connection lost',)>
Traceback (most recent call last):
File "/usr/local/lib/python3.5/asyncio/tasks.py", line 240, in _step
result = coro.send(None)
File "/root/.opsdroid/modules/opsdroid-modules/connector/shell/__init__.py", line 63, in listen
user_input = await async_input('', opsdroid.eventloop)
File "/root/.opsdroid/modules/opsdroid-modules/connector/shell/__init__.py", line 37, in async_input
await writer.drain()
File "/usr/local/lib/python3.5/asyncio/streams.py", line 333, in drain
yield from self._protocol._drain_helper()
File "/usr/local/lib/python3.5/asyncio/streams.py", line 204, in _drain_helper
raise ConnectionResetError('Connection lost')
ConnectionResetError: Connection lost
ERROR asyncio: Exception in default exception handler
Traceback (most recent call last):
File "/usr/local/lib/python3.5/asyncio/base_events.py", line 1284, in call_exception_handler
self.default_exception_handler(context)
File "/usr/local/lib/python3.5/asyncio/base_events.py", line 1259, in default_exception_handler
logger.error('\n'.join(log_lines), exc_info=exc_info)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1314, in error
self._log(ERROR, msg, args, **kwargs)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1421, in _log
self.handle(record)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1431, in handle
self.callHandlers(record)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1493, in callHandlers
hdlr.handle(record)
File "/usr/local/lib/python3.5/logging/__init__.py", line 861, in handle
self.emit(record)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1053, in emit
self.stream = self._open()
File "/usr/local/lib/python3.5/logging/__init__.py", line 1043, in _open
return open(self.baseFilename, self.mode, encoding=self.encoding)
NameError: name 'open' is not defined
Exception ignored in: <bound method Task.__del__ of <Task finished coro=<ConnectorShell.listen() done, defined at /root/.opsdroid/modules/opsdroid-modules/connector/shell/__init__.py:57> exception=ConnectionResetError('Connection lost',)>>
Traceback (most recent call last):
File "/usr/local/lib/python3.5/asyncio/tasks.py", line 93, in __del__
File "/usr/local/lib/python3.5/asyncio/futures.py", line 234, in __del__
File "/usr/local/lib/python3.5/asyncio/base_events.py", line 1290, in call_exception_handler
File "/usr/local/lib/python3.5/logging/__init__.py", line 1314, in error
File "/usr/local/lib/python3.5/logging/__init__.py", line 1421, in _log
File "/usr/local/lib/python3.5/logging/__init__.py", line 1431, in handle
File "/usr/local/lib/python3.5/logging/__init__.py", line 1493, in callHandlers
File "/usr/local/lib/python3.5/logging/__init__.py", line 861, in handle
File "/usr/local/lib/python3.5/logging/__init__.py", line 1053, in emit
File "/usr/local/lib/python3.5/logging/__init__.py", line 1043, in _open
NameError: name 'open' is not defined
ERROR asyncio: Task was destroyed but it is pending!
task: <Task pending coro=<parse_crontab() running at /usr/src/app/opsdroid/parsers/crontab.py:20> wait_for=<Future pending cb=[Task._wakeup()]>>
ERROR asyncio: Exception in default exception handler
Traceback (most recent call last):
File "/usr/local/lib/python3.5/asyncio/base_events.py", line 1284, in call_exception_handler
self.default_exception_handler(context)
File "/usr/local/lib/python3.5/asyncio/base_events.py", line 1259, in default_exception_handler
logger.error('\n'.join(log_lines), exc_info=exc_info)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1314, in error
self._log(ERROR, msg, args, **kwargs)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1421, in _log
self.handle(record)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1431, in handle
self.callHandlers(record)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1493, in callHandlers
hdlr.handle(record)
File "/usr/local/lib/python3.5/logging/__init__.py", line 861, in handle
self.emit(record)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1053, in emit
self.stream = self._open()
File "/usr/local/lib/python3.5/logging/__init__.py", line 1043, in _open
return open(self.baseFilename, self.mode, encoding=self.encoding)
NameError: name 'open' is not defined
Exception ignored in: <bound method Task.__del__ of <Task pending coro=<parse_crontab() running at /usr/src/app/opsdroid/parsers/crontab.py:20> wait_for=<Future pending cb=[Task._wakeup()]>>>
Traceback (most recent call last):
File "/usr/local/lib/python3.5/asyncio/tasks.py", line 92, in __del__
File "/usr/local/lib/python3.5/asyncio/base_events.py", line 1290, in call_exception_handler
File "/usr/local/lib/python3.5/logging/__init__.py", line 1314, in error
File "/usr/local/lib/python3.5/logging/__init__.py", line 1421, in _log
File "/usr/local/lib/python3.5/logging/__init__.py", line 1431, in handle
File "/usr/local/lib/python3.5/logging/__init__.py", line 1493, in callHandlers
File "/usr/local/lib/python3.5/logging/__init__.py", line 861, in handle
File "/usr/local/lib/python3.5/logging/__init__.py", line 1053, in emit
File "/usr/local/lib/python3.5/logging/__init__.py", line 1043, in _open
NameError: name 'open' is not defined
```
This should be handled, so that opsdroid can exit gracefully without producing any tracebacks.
| 2017-09-30T21:29:58 |
||
opsdroid/opsdroid | 254 | opsdroid__opsdroid-254 | [
"247",
"247"
] | c3c481631cbeef1d6c2fde6c5934345d337ebfa4 | diff --git a/opsdroid/core.py b/opsdroid/core.py
--- a/opsdroid/core.py
+++ b/opsdroid/core.py
@@ -2,6 +2,7 @@
import copy
import logging
+import signal
import sys
import weakref
import asyncio
@@ -34,6 +35,8 @@ def __init__(self):
self.connectors = []
self.connector_tasks = []
self.eventloop = asyncio.get_event_loop()
+ for sig in (signal.SIGINT, signal.SIGTERM):
+ self.eventloop.add_signal_handler(sig, self.stop)
self.skills = []
self.memory = Memory()
self.loader = Loader(self)
@@ -100,6 +103,8 @@ def stop(self):
for task in pending:
task.cancel()
self.eventloop.stop()
+ print('') # Prints a character return for return to shell
+ _LOGGER.info("Keyboard interrupt, exiting.")
def load(self):
"""Load configuration."""
@@ -123,10 +128,6 @@ def start_loop(self):
try:
pending = asyncio.Task.all_tasks()
self.eventloop.run_until_complete(asyncio.gather(*pending))
- except (KeyboardInterrupt, EOFError):
- print('') # Prints a character return for return to shell
- self.stop()
- _LOGGER.info("Keyboard interrupt, exiting.")
except RuntimeError as error:
if str(error) != 'Event loop is closed':
raise error
| SIGINT (Ctrl+C) should be handled by opsdroid
When executing opsdroid in Ubuntu or in a Docker container, the following message appears:
`(Press CTRL+C to quit)`
When hitting `Ctrl+C` though, the following traceback is produced:
```
^CERROR asyncio: Task exception was never retrieved
future: <Task finished coro=<ConnectorShell.listen() done, defined at /root/.opsdroid/modules/opsdroid-modules/connector/shell/__init__.py:57> exception=ConnectionResetError('Connection lost',)>
Traceback (most recent call last):
File "/usr/local/lib/python3.5/asyncio/tasks.py", line 240, in _step
result = coro.send(None)
File "/root/.opsdroid/modules/opsdroid-modules/connector/shell/__init__.py", line 63, in listen
user_input = await async_input('', opsdroid.eventloop)
File "/root/.opsdroid/modules/opsdroid-modules/connector/shell/__init__.py", line 37, in async_input
await writer.drain()
File "/usr/local/lib/python3.5/asyncio/streams.py", line 333, in drain
yield from self._protocol._drain_helper()
File "/usr/local/lib/python3.5/asyncio/streams.py", line 204, in _drain_helper
raise ConnectionResetError('Connection lost')
ConnectionResetError: Connection lost
ERROR asyncio: Exception in default exception handler
Traceback (most recent call last):
File "/usr/local/lib/python3.5/asyncio/base_events.py", line 1284, in call_exception_handler
self.default_exception_handler(context)
File "/usr/local/lib/python3.5/asyncio/base_events.py", line 1259, in default_exception_handler
logger.error('\n'.join(log_lines), exc_info=exc_info)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1314, in error
self._log(ERROR, msg, args, **kwargs)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1421, in _log
self.handle(record)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1431, in handle
self.callHandlers(record)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1493, in callHandlers
hdlr.handle(record)
File "/usr/local/lib/python3.5/logging/__init__.py", line 861, in handle
self.emit(record)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1053, in emit
self.stream = self._open()
File "/usr/local/lib/python3.5/logging/__init__.py", line 1043, in _open
return open(self.baseFilename, self.mode, encoding=self.encoding)
NameError: name 'open' is not defined
Exception ignored in: <bound method Task.__del__ of <Task finished coro=<ConnectorShell.listen() done, defined at /root/.opsdroid/modules/opsdroid-modules/connector/shell/__init__.py:57> exception=ConnectionResetError('Connection lost',)>>
Traceback (most recent call last):
File "/usr/local/lib/python3.5/asyncio/tasks.py", line 93, in __del__
File "/usr/local/lib/python3.5/asyncio/futures.py", line 234, in __del__
File "/usr/local/lib/python3.5/asyncio/base_events.py", line 1290, in call_exception_handler
File "/usr/local/lib/python3.5/logging/__init__.py", line 1314, in error
File "/usr/local/lib/python3.5/logging/__init__.py", line 1421, in _log
File "/usr/local/lib/python3.5/logging/__init__.py", line 1431, in handle
File "/usr/local/lib/python3.5/logging/__init__.py", line 1493, in callHandlers
File "/usr/local/lib/python3.5/logging/__init__.py", line 861, in handle
File "/usr/local/lib/python3.5/logging/__init__.py", line 1053, in emit
File "/usr/local/lib/python3.5/logging/__init__.py", line 1043, in _open
NameError: name 'open' is not defined
ERROR asyncio: Task was destroyed but it is pending!
task: <Task pending coro=<parse_crontab() running at /usr/src/app/opsdroid/parsers/crontab.py:20> wait_for=<Future pending cb=[Task._wakeup()]>>
ERROR asyncio: Exception in default exception handler
Traceback (most recent call last):
File "/usr/local/lib/python3.5/asyncio/base_events.py", line 1284, in call_exception_handler
self.default_exception_handler(context)
File "/usr/local/lib/python3.5/asyncio/base_events.py", line 1259, in default_exception_handler
logger.error('\n'.join(log_lines), exc_info=exc_info)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1314, in error
self._log(ERROR, msg, args, **kwargs)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1421, in _log
self.handle(record)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1431, in handle
self.callHandlers(record)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1493, in callHandlers
hdlr.handle(record)
File "/usr/local/lib/python3.5/logging/__init__.py", line 861, in handle
self.emit(record)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1053, in emit
self.stream = self._open()
File "/usr/local/lib/python3.5/logging/__init__.py", line 1043, in _open
return open(self.baseFilename, self.mode, encoding=self.encoding)
NameError: name 'open' is not defined
Exception ignored in: <bound method Task.__del__ of <Task pending coro=<parse_crontab() running at /usr/src/app/opsdroid/parsers/crontab.py:20> wait_for=<Future pending cb=[Task._wakeup()]>>>
Traceback (most recent call last):
File "/usr/local/lib/python3.5/asyncio/tasks.py", line 92, in __del__
File "/usr/local/lib/python3.5/asyncio/base_events.py", line 1290, in call_exception_handler
File "/usr/local/lib/python3.5/logging/__init__.py", line 1314, in error
File "/usr/local/lib/python3.5/logging/__init__.py", line 1421, in _log
File "/usr/local/lib/python3.5/logging/__init__.py", line 1431, in handle
File "/usr/local/lib/python3.5/logging/__init__.py", line 1493, in callHandlers
File "/usr/local/lib/python3.5/logging/__init__.py", line 861, in handle
File "/usr/local/lib/python3.5/logging/__init__.py", line 1053, in emit
File "/usr/local/lib/python3.5/logging/__init__.py", line 1043, in _open
NameError: name 'open' is not defined
```
This should be handled, so that opsdroid can exit gracefully without producing any tracebacks.
SIGINT (Ctrl+C) should be handled by opsdroid
When executing opsdroid in Ubuntu or in a Docker container, the following message appears:
`(Press CTRL+C to quit)`
When hitting `Ctrl+C` though, the following traceback is produced:
```
^CERROR asyncio: Task exception was never retrieved
future: <Task finished coro=<ConnectorShell.listen() done, defined at /root/.opsdroid/modules/opsdroid-modules/connector/shell/__init__.py:57> exception=ConnectionResetError('Connection lost',)>
Traceback (most recent call last):
File "/usr/local/lib/python3.5/asyncio/tasks.py", line 240, in _step
result = coro.send(None)
File "/root/.opsdroid/modules/opsdroid-modules/connector/shell/__init__.py", line 63, in listen
user_input = await async_input('', opsdroid.eventloop)
File "/root/.opsdroid/modules/opsdroid-modules/connector/shell/__init__.py", line 37, in async_input
await writer.drain()
File "/usr/local/lib/python3.5/asyncio/streams.py", line 333, in drain
yield from self._protocol._drain_helper()
File "/usr/local/lib/python3.5/asyncio/streams.py", line 204, in _drain_helper
raise ConnectionResetError('Connection lost')
ConnectionResetError: Connection lost
ERROR asyncio: Exception in default exception handler
Traceback (most recent call last):
File "/usr/local/lib/python3.5/asyncio/base_events.py", line 1284, in call_exception_handler
self.default_exception_handler(context)
File "/usr/local/lib/python3.5/asyncio/base_events.py", line 1259, in default_exception_handler
logger.error('\n'.join(log_lines), exc_info=exc_info)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1314, in error
self._log(ERROR, msg, args, **kwargs)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1421, in _log
self.handle(record)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1431, in handle
self.callHandlers(record)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1493, in callHandlers
hdlr.handle(record)
File "/usr/local/lib/python3.5/logging/__init__.py", line 861, in handle
self.emit(record)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1053, in emit
self.stream = self._open()
File "/usr/local/lib/python3.5/logging/__init__.py", line 1043, in _open
return open(self.baseFilename, self.mode, encoding=self.encoding)
NameError: name 'open' is not defined
Exception ignored in: <bound method Task.__del__ of <Task finished coro=<ConnectorShell.listen() done, defined at /root/.opsdroid/modules/opsdroid-modules/connector/shell/__init__.py:57> exception=ConnectionResetError('Connection lost',)>>
Traceback (most recent call last):
File "/usr/local/lib/python3.5/asyncio/tasks.py", line 93, in __del__
File "/usr/local/lib/python3.5/asyncio/futures.py", line 234, in __del__
File "/usr/local/lib/python3.5/asyncio/base_events.py", line 1290, in call_exception_handler
File "/usr/local/lib/python3.5/logging/__init__.py", line 1314, in error
File "/usr/local/lib/python3.5/logging/__init__.py", line 1421, in _log
File "/usr/local/lib/python3.5/logging/__init__.py", line 1431, in handle
File "/usr/local/lib/python3.5/logging/__init__.py", line 1493, in callHandlers
File "/usr/local/lib/python3.5/logging/__init__.py", line 861, in handle
File "/usr/local/lib/python3.5/logging/__init__.py", line 1053, in emit
File "/usr/local/lib/python3.5/logging/__init__.py", line 1043, in _open
NameError: name 'open' is not defined
ERROR asyncio: Task was destroyed but it is pending!
task: <Task pending coro=<parse_crontab() running at /usr/src/app/opsdroid/parsers/crontab.py:20> wait_for=<Future pending cb=[Task._wakeup()]>>
ERROR asyncio: Exception in default exception handler
Traceback (most recent call last):
File "/usr/local/lib/python3.5/asyncio/base_events.py", line 1284, in call_exception_handler
self.default_exception_handler(context)
File "/usr/local/lib/python3.5/asyncio/base_events.py", line 1259, in default_exception_handler
logger.error('\n'.join(log_lines), exc_info=exc_info)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1314, in error
self._log(ERROR, msg, args, **kwargs)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1421, in _log
self.handle(record)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1431, in handle
self.callHandlers(record)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1493, in callHandlers
hdlr.handle(record)
File "/usr/local/lib/python3.5/logging/__init__.py", line 861, in handle
self.emit(record)
File "/usr/local/lib/python3.5/logging/__init__.py", line 1053, in emit
self.stream = self._open()
File "/usr/local/lib/python3.5/logging/__init__.py", line 1043, in _open
return open(self.baseFilename, self.mode, encoding=self.encoding)
NameError: name 'open' is not defined
Exception ignored in: <bound method Task.__del__ of <Task pending coro=<parse_crontab() running at /usr/src/app/opsdroid/parsers/crontab.py:20> wait_for=<Future pending cb=[Task._wakeup()]>>>
Traceback (most recent call last):
File "/usr/local/lib/python3.5/asyncio/tasks.py", line 92, in __del__
File "/usr/local/lib/python3.5/asyncio/base_events.py", line 1290, in call_exception_handler
File "/usr/local/lib/python3.5/logging/__init__.py", line 1314, in error
File "/usr/local/lib/python3.5/logging/__init__.py", line 1421, in _log
File "/usr/local/lib/python3.5/logging/__init__.py", line 1431, in handle
File "/usr/local/lib/python3.5/logging/__init__.py", line 1493, in callHandlers
File "/usr/local/lib/python3.5/logging/__init__.py", line 861, in handle
File "/usr/local/lib/python3.5/logging/__init__.py", line 1053, in emit
File "/usr/local/lib/python3.5/logging/__init__.py", line 1043, in _open
NameError: name 'open' is not defined
```
This should be handled, so that opsdroid can exit gracefully without producing any tracebacks.
| 2017-10-01T09:15:14 |
||
opsdroid/opsdroid | 278 | opsdroid__opsdroid-278 | [
"277"
] | 00f85ba5ac800ae507380c7591e31a2aa32fbef8 | diff --git a/opsdroid/core.py b/opsdroid/core.py
--- a/opsdroid/core.py
+++ b/opsdroid/core.py
@@ -11,6 +11,7 @@
from opsdroid.connector import Connector
from opsdroid.database import Database
from opsdroid.loader import Loader
+from opsdroid.parsers.always import parse_always
from opsdroid.parsers.regex import parse_regex
from opsdroid.parsers.apiai import parse_apiai
from opsdroid.parsers.luisai import parse_luisai
@@ -187,6 +188,8 @@ async def parse(self, message):
tasks.append(
self.eventloop.create_task(parse_regex(self, message)))
+ tasks.append(
+ self.eventloop.create_task(parse_always(self, message)))
if "parsers" in self.config:
_LOGGER.debug("Processing parsers")
diff --git a/opsdroid/matchers.py b/opsdroid/matchers.py
--- a/opsdroid/matchers.py
+++ b/opsdroid/matchers.py
@@ -107,3 +107,19 @@ async def wrapper(req, opsdroid=opsdroid, config=config):
return func
return matcher
+
+
+def match_always(func=None):
+ """Return always match decorator."""
+ def matcher(func):
+ """Add decorated function to skills list for always matching."""
+ opsdroid = get_opsdroid()
+ config = opsdroid.loader.current_import_config
+ opsdroid.skills.append({"always": True, "skill": func,
+ "config": config})
+ return func
+
+ # Allow for decorator with or without parenthesis as there are no args.
+ if callable(func):
+ return matcher(func)
+ return matcher
diff --git a/opsdroid/parsers/always.py b/opsdroid/parsers/always.py
new file mode 100644
--- /dev/null
+++ b/opsdroid/parsers/always.py
@@ -0,0 +1,25 @@
+"""A helper function for parsing and executing always skills."""
+
+import logging
+
+
+_LOGGER = logging.getLogger(__name__)
+
+
+async def parse_always(opsdroid, message):
+ """Parse a message always."""
+ # pylint: disable=broad-except
+ # We want to catch all exceptions coming from a skill module and not
+ # halt the application. If a skill throws an exception it just doesn't
+ # give a response to the user, so an error response should be given.
+ for skill in opsdroid.skills:
+ if "always" in skill and skill["always"]:
+ try:
+ await skill["skill"](opsdroid, skill["config"], message)
+ except Exception:
+ await message.respond(
+ "Whoops there has been an error")
+ await message.respond(
+ "Check the log for details")
+ _LOGGER.exception("Exception when parsing '" +
+ message.text + "'")
| diff --git a/tests/test_core.py b/tests/test_core.py
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -178,7 +178,7 @@ async def test_parse_apiai(self):
message = Message("Hello world", "user", "default", mock_connector)
with amock.patch('opsdroid.parsers.apiai.parse_apiai'):
tasks = await opsdroid.parse(message)
- self.assertEqual(len(tasks), 2) # apiai and regex
+ self.assertEqual(len(tasks), 3) # apiai, regex and always
for task in tasks:
await task
@@ -193,7 +193,7 @@ async def test_parse_luisai(self):
message = Message("Hello world", "user", "default", mock_connector)
with amock.patch('opsdroid.parsers.luisai.parse_luisai'):
tasks = await opsdroid.parse(message)
- self.assertEqual(len(tasks), 2) # luisai and regex
+ self.assertEqual(len(tasks), 3) # luisai, regex and always
for task in tasks:
await task
@@ -208,6 +208,6 @@ async def test_parse_witai(self):
message = Message("Hello world", "user", "default", mock_connector)
with amock.patch('opsdroid.parsers.witai.parse_witai'):
tasks = await opsdroid.parse(message)
- self.assertEqual(len(tasks), 2) # witai and regex
+ self.assertEqual(len(tasks), 3) # witai, regex and always
for task in tasks:
await task
diff --git a/tests/test_parser_always.py b/tests/test_parser_always.py
new file mode 100644
--- /dev/null
+++ b/tests/test_parser_always.py
@@ -0,0 +1,51 @@
+
+import asynctest
+import asynctest.mock as amock
+
+from opsdroid.core import OpsDroid
+from opsdroid.matchers import match_always
+from opsdroid.message import Message
+from opsdroid.parsers.always import parse_always
+
+
+class TestParserAlways(asynctest.TestCase):
+ """Test the opsdroid always parser."""
+
+ async def test_parse_always_decorator_parens(self):
+ with OpsDroid() as opsdroid:
+ mock_skill = amock.CoroutineMock()
+ match_always()(mock_skill)
+
+ mock_connector = amock.CoroutineMock()
+ message = Message("Hello world", "user", "default", mock_connector)
+
+ await parse_always(opsdroid, message)
+
+ self.assertTrue(mock_skill.called)
+
+ async def test_parse_always_decorate_no_parens(self):
+ with OpsDroid() as opsdroid:
+ mock_skill = amock.CoroutineMock()
+ match_always(mock_skill)
+
+ mock_connector = amock.CoroutineMock()
+ message = Message("Hello world", "user", "default", mock_connector)
+
+ await parse_always(opsdroid, message)
+
+ self.assertTrue(mock_skill.called)
+
+ async def test_parse_always_raises(self):
+ with OpsDroid() as opsdroid:
+ mock_skill = amock.CoroutineMock()
+ mock_skill.side_effect = Exception()
+ match_always()(mock_skill)
+ self.assertEqual(len(opsdroid.skills), 1)
+
+ mock_connector = amock.CoroutineMock()
+ message = Message("Hello world", "user",
+ "default", mock_connector)
+
+ await parse_always(opsdroid, message)
+
+ self.assertTrue(mock_skill.called)
| Add a matcher which always matches
It is conceivable that you might want a skill to match on every message which passes through opsdroid. An example would be updating the last seen time of a user in the [seen skill](https://github.com/opsdroid/skill-seen).
Currently this is achieved by using the `match_regex` decorator with an expression which matches anything. However once #154 has been implemented this will cease to work.
Therefore it would be good to have a matcher which matches every single time and can easily be made exempt of #154. For example it could be called `@match_always` or `@match_everything`.
| 2017-10-19T18:25:08 |
|
opsdroid/opsdroid | 294 | opsdroid__opsdroid-294 | [
"293"
] | e39d59e39e79c2df9414673efa185b568e54c571 | diff --git a/opsdroid/core.py b/opsdroid/core.py
--- a/opsdroid/core.py
+++ b/opsdroid/core.py
@@ -13,7 +13,7 @@
from opsdroid.loader import Loader
from opsdroid.parsers.always import parse_always
from opsdroid.parsers.regex import parse_regex
-from opsdroid.parsers.apiai import parse_apiai
+from opsdroid.parsers.dialogflow import parse_dialogflow
from opsdroid.parsers.luisai import parse_luisai
from opsdroid.parsers.witai import parse_witai
from opsdroid.parsers.crontab import parse_crontab
@@ -195,15 +195,15 @@ async def parse(self, message):
_LOGGER.debug("Processing parsers")
parsers = self.config["parsers"]
- apiai = [p for p in parsers if p["name"] == "apiai"]
- _LOGGER.debug("Checking apiai")
- if len(apiai) == 1 and \
- ("enabled" not in apiai[0] or
- apiai[0]["enabled"] is not False):
- _LOGGER.debug("Parsing with apiai")
+ dialogflow = [p for p in parsers if p["name"] == "dialogflow"]
+ _LOGGER.debug("Checking dialogflow")
+ if len(dialogflow) == 1 and \
+ ("enabled" not in dialogflow[0] or
+ dialogflow[0]["enabled"] is not False):
+ _LOGGER.debug("Parsing with Dialogflow")
tasks.append(
self.eventloop.create_task(
- parse_apiai(self, message, apiai[0])))
+ parse_dialogflow(self, message, dialogflow[0])))
luisai = [p for p in parsers if p["name"] == "luisai"]
_LOGGER.debug("Checking luisai")
diff --git a/opsdroid/matchers.py b/opsdroid/matchers.py
--- a/opsdroid/matchers.py
+++ b/opsdroid/matchers.py
@@ -24,23 +24,53 @@ def matcher(func):
def match_apiai_action(action):
- """Return apiai action match decorator."""
+ """Return Dialogflow action match decorator."""
def matcher(func):
- """Add decorated function to skills list for apiai matching."""
+ """Add decorated function to skills list for Dialogflow matching."""
opsdroid = get_opsdroid()
- opsdroid.skills.append({"apiai_action": action, "skill": func,
+ opsdroid.skills.append({"dialogflow_action": action, "skill": func,
"config":
opsdroid.loader.current_import_config})
return func
+ _LOGGER.warning("Api.ai is now called Dialogflow, this matcher "
+ "will stop working in the future. "
+ "Use match_dialogflow_action instead.")
return matcher
def match_apiai_intent(intent):
- """Return apiai intent match decorator."""
+ """Return Dialogflow intent match decorator."""
def matcher(func):
- """Add decorated function to skills list for apiai matching."""
+ """Add decorated function to skills list for Dialogflow matching."""
opsdroid = get_opsdroid()
- opsdroid.skills.append({"apiai_intent": intent, "skill": func,
+ opsdroid.skills.append({"dialogflow_intent": intent, "skill": func,
+ "config":
+ opsdroid.loader.current_import_config})
+ return func
+ _LOGGER.warning("Api.ai is now called Dialogflow, this matcher "
+ "will stop working in the future. "
+ "Use match_dialogflow_intent instead.")
+ return matcher
+
+
+def match_dialogflow_action(action):
+ """Return Dialogflowi action match decorator."""
+ def matcher(func):
+ """Add decorated function to skills list for Dialogflow matching."""
+ opsdroid = get_opsdroid()
+ opsdroid.skills.append({"dialogflow_action": action, "skill": func,
+ "config":
+ opsdroid.loader.current_import_config})
+ return func
+ return matcher
+
+
+def match_dialogflow_intent(intent):
+ """Return Dialogflow intent match decorator."""
+ def matcher(func):
+ """Add decorated function to skills list for Dialogflow matching."""
+ opsdroid = get_opsdroid()
+ opsdroid.skills.append({"dialogflow_intent": intent, "skill": func,
"config":
opsdroid.loader.current_import_config})
return func
diff --git a/opsdroid/parsers/apiai.py b/opsdroid/parsers/dialogflow.py
similarity index 70%
rename from opsdroid/parsers/apiai.py
rename to opsdroid/parsers/dialogflow.py
--- a/opsdroid/parsers/apiai.py
+++ b/opsdroid/parsers/dialogflow.py
@@ -1,4 +1,4 @@
-"""A helper function for parsing and executing api.ai skills."""
+"""A helper function for parsing and executing Dialogflow skills."""
import logging
import json
@@ -9,8 +9,8 @@
_LOGGER = logging.getLogger(__name__)
-async def call_apiai(message, config):
- """Call the api.ai api and return the response."""
+async def call_dialogflow(message, config):
+ """Call the Dialogflow api and return the response."""
async with aiohttp.ClientSession() as session:
payload = {
"v": "20150910",
@@ -22,50 +22,51 @@ async def call_apiai(message, config):
"Authorization": "Bearer " + config['access-token'],
"Content-Type": "application/json"
}
- resp = await session.post("https://api.api.ai/v1/query",
+ resp = await session.post("https://api.dialogflow.com/v1/query",
data=json.dumps(payload),
headers=headers)
result = await resp.json()
- _LOGGER.debug("api.ai response - " + json.dumps(result))
+ _LOGGER.debug("Dialogflow response - " + json.dumps(result))
return result
-async def parse_apiai(opsdroid, message, config):
- """Parse a message against all apiai skills."""
+async def parse_dialogflow(opsdroid, message, config):
+ """Parse a message against all Dialogflow skills."""
# pylint: disable=broad-except
# We want to catch all exceptions coming from a skill module and not
# halt the application. If a skill throws an exception it just doesn't
# give a response to the user, so an error response should be given.
if 'access-token' in config:
try:
- result = await call_apiai(message, config)
+ result = await call_dialogflow(message, config)
except aiohttp.ClientOSError:
- _LOGGER.error("No response from api.ai, check your network.")
+ _LOGGER.error("No response from Dialogflow, check your network.")
return
if result["status"]["code"] >= 300:
- _LOGGER.error("api.ai error - " +
+ _LOGGER.error("Dialogflow error - " +
str(result["status"]["code"]) + " " +
result["status"]["errorType"])
return
if "min-score" in config and \
result["result"]["score"] < config["min-score"]:
- _LOGGER.debug("api.ai score lower than min-score")
+ _LOGGER.debug("Dialogflow score lower than min-score")
return
if result:
for skill in opsdroid.skills:
- if "apiai_action" in skill or "apiai_intent" in skill:
+ if "dialogflow_action" in skill or \
+ "dialogflow_intent" in skill:
if ("action" in result["result"] and
- skill["apiai_action"] in
+ skill["dialogflow_action"] in
result["result"]["action"]) \
or ("intentName" in result["result"] and
- skill["apiai_intent"] in
+ skill["dialogflow_intent"] in
result["result"]["intentName"]):
- message.apiai = result
+ message.dialogflow = result
try:
await skill["skill"](opsdroid, skill["config"],
message)
| diff --git a/tests/test_core.py b/tests/test_core.py
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -9,7 +9,7 @@
from opsdroid.core import OpsDroid
from opsdroid.message import Message
from opsdroid.connector import Connector
-from opsdroid.matchers import (match_regex, match_apiai_action,
+from opsdroid.matchers import (match_regex, match_dialogflow_action,
match_luisai_intent, match_witai)
@@ -175,18 +175,18 @@ async def test_parse_regex_insensitive(self):
await task
self.assertTrue(skill.called)
- async def test_parse_apiai(self):
+ async def test_parse_dialogflow(self):
with OpsDroid() as opsdroid:
- opsdroid.config["parsers"] = [{"name": "apiai"}]
- apiai_action = ""
+ opsdroid.config["parsers"] = [{"name": "dialogflow"}]
+ dialogflow_action = ""
skill = amock.CoroutineMock()
mock_connector = Connector({})
- decorator = match_apiai_action(apiai_action)
+ decorator = match_dialogflow_action(dialogflow_action)
decorator(skill)
message = Message("Hello world", "user", "default", mock_connector)
- with amock.patch('opsdroid.parsers.apiai.parse_apiai'):
+ with amock.patch('opsdroid.parsers.dialogflow.parse_dialogflow'):
tasks = await opsdroid.parse(message)
- self.assertEqual(len(tasks), 3) # apiai, regex and always
+ self.assertEqual(len(tasks), 3) # dialogflow, regex and always
for task in tasks:
await task
diff --git a/tests/test_matchers.py b/tests/test_matchers.py
--- a/tests/test_matchers.py
+++ b/tests/test_matchers.py
@@ -29,13 +29,33 @@ async def test_match_apiai(self):
decorator = matchers.match_apiai_action(action)
decorator(mockedskill)
self.assertEqual(len(opsdroid.skills), 1)
- self.assertEqual(opsdroid.skills[0]["apiai_action"], action)
+ self.assertEqual(opsdroid.skills[0]["dialogflow_action"], action)
self.assertIsInstance(opsdroid.skills[0]["skill"], mock.MagicMock)
intent = "myIntent"
decorator = matchers.match_apiai_intent(intent)
decorator(mockedskill)
self.assertEqual(len(opsdroid.skills), 2)
- self.assertEqual(opsdroid.skills[1]["apiai_intent"], intent)
+ self.assertEqual(opsdroid.skills[1]["dialogflow_intent"], intent)
+ self.assertIsInstance(opsdroid.skills[1]["skill"], mock.MagicMock)
+ with mock.patch('opsdroid.matchers._LOGGER.warning') as logmock:
+ decorator = matchers.match_apiai_intent(intent)
+ decorator(mockedskill)
+ self.assertTrue(logmock.called)
+
+ async def test_match_dialogflow(self):
+ with OpsDroid() as opsdroid:
+ action = "myaction"
+ mockedskill = mock.MagicMock()
+ decorator = matchers.match_dialogflow_action(action)
+ decorator(mockedskill)
+ self.assertEqual(len(opsdroid.skills), 1)
+ self.assertEqual(opsdroid.skills[0]["dialogflow_action"], action)
+ self.assertIsInstance(opsdroid.skills[0]["skill"], mock.MagicMock)
+ intent = "myIntent"
+ decorator = matchers.match_dialogflow_intent(intent)
+ decorator(mockedskill)
+ self.assertEqual(len(opsdroid.skills), 2)
+ self.assertEqual(opsdroid.skills[1]["dialogflow_intent"], intent)
self.assertIsInstance(opsdroid.skills[1]["skill"], mock.MagicMock)
async def test_match_luisai(self):
diff --git a/tests/test_parser_apiai.py b/tests/test_parser_dialogflow.py
similarity index 58%
rename from tests/test_parser_apiai.py
rename to tests/test_parser_dialogflow.py
--- a/tests/test_parser_apiai.py
+++ b/tests/test_parser_dialogflow.py
@@ -5,19 +5,19 @@
from aiohttp import helpers, ClientOSError
from opsdroid.core import OpsDroid
-from opsdroid.matchers import match_apiai_action
+from opsdroid.matchers import match_dialogflow_action
from opsdroid.message import Message
-from opsdroid.parsers import apiai
+from opsdroid.parsers import dialogflow
from opsdroid.connector import Connector
-class TestParserApiai(asynctest.TestCase):
- """Test the opsdroid api.ai parser."""
+class TestParserDialogflow(asynctest.TestCase):
+ """Test the opsdroid Dialogflow parser."""
- async def test_call_apiai(self):
+ async def test_call_dialogflow(self):
mock_connector = Connector({})
message = Message("Hello world", "user", "default", mock_connector)
- config = {'name': 'apiai', 'access-token': 'test'}
+ config = {'name': 'dialogflow', 'access-token': 'test'}
result = amock.Mock()
result.json = amock.CoroutineMock()
result.json.return_value = {
@@ -33,22 +33,23 @@ async def test_call_apiai(self):
with amock.patch('aiohttp.ClientSession.post') as patched_request:
patched_request.return_value = helpers.create_future(self.loop)
patched_request.return_value.set_result(result)
- await apiai.call_apiai(message, config)
+ await dialogflow.call_dialogflow(message, config)
self.assertTrue(patched_request.called)
- async def test_parse_apiai(self):
+ async def test_parse_dialogflow(self):
with OpsDroid() as opsdroid:
opsdroid.config['parsers'] = [
- {'name': 'apiai', 'access-token': "test"}
+ {'name': 'dialogflow', 'access-token': "test"}
]
mock_skill = amock.CoroutineMock()
- match_apiai_action('myaction')(mock_skill)
+ match_dialogflow_action('myaction')(mock_skill)
mock_connector = amock.CoroutineMock()
message = Message("Hello world", "user", "default", mock_connector)
- with amock.patch.object(apiai, 'call_apiai') as mocked_call_apiai:
- mocked_call_apiai.return_value = {
+ with amock.patch.object(dialogflow, 'call_dialogflow') as \
+ mocked_call_dialogflow:
+ mocked_call_dialogflow.return_value = {
"result": {
"action": "myaction",
"score": 0.7
@@ -58,25 +59,26 @@ async def test_parse_apiai(self):
"errorType": "success"
}
}
- await apiai.parse_apiai(opsdroid, message,
- opsdroid.config['parsers'][0])
+ await dialogflow.parse_dialogflow(
+ opsdroid, message, opsdroid.config['parsers'][0])
self.assertTrue(mock_skill.called)
- async def test_parse_apiai_raises(self):
+ async def test_parse_dialogflow_raises(self):
with OpsDroid() as opsdroid:
opsdroid.config['parsers'] = [
- {'name': 'apiai', 'access-token': "test"}
+ {'name': 'dialogflow', 'access-token': "test"}
]
mock_skill = amock.CoroutineMock()
mock_skill.side_effect = Exception()
- match_apiai_action('myaction')(mock_skill)
+ match_dialogflow_action('myaction')(mock_skill)
mock_connector = amock.CoroutineMock()
message = Message("Hello world", "user", "default", mock_connector)
- with amock.patch.object(apiai, 'call_apiai') as mocked_call_apiai:
- mocked_call_apiai.return_value = {
+ with amock.patch.object(dialogflow, 'call_dialogflow') as \
+ mocked_call_dialogflow:
+ mocked_call_dialogflow.return_value = {
"result": {
"action": "myaction",
"score": 0.7
@@ -86,24 +88,25 @@ async def test_parse_apiai_raises(self):
"errorType": "success"
}
}
- await apiai.parse_apiai(opsdroid, message,
- opsdroid.config['parsers'][0])
+ await dialogflow.parse_dialogflow(
+ opsdroid, message, opsdroid.config['parsers'][0])
self.assertTrue(mock_skill.called)
- async def test_parse_apiai_failure(self):
+ async def test_parse_dialogflow_failure(self):
with OpsDroid() as opsdroid:
opsdroid.config['parsers'] = [
- {'name': 'apiai', 'access-token': "test"}
+ {'name': 'dialogflow', 'access-token': "test"}
]
mock_skill = amock.CoroutineMock()
- match_apiai_action('myaction')(mock_skill)
+ match_dialogflow_action('myaction')(mock_skill)
mock_connector = amock.CoroutineMock()
message = Message("Hello world", "user", "default", mock_connector)
- with amock.patch.object(apiai, 'call_apiai') as mocked_call_apiai:
- mocked_call_apiai.return_value = {
+ with amock.patch.object(dialogflow, 'call_dialogflow') as \
+ mocked_call_dialogflow:
+ mocked_call_dialogflow.return_value = {
"result": {
"action": "myaction",
"score": 0.7
@@ -113,24 +116,29 @@ async def test_parse_apiai_failure(self):
"errorType": "not found"
}
}
- await apiai.parse_apiai(opsdroid, message,
- opsdroid.config['parsers'][0])
+ await dialogflow.parse_dialogflow(
+ opsdroid, message, opsdroid.config['parsers'][0])
self.assertFalse(mock_skill.called)
- async def test_parse_apiai_low_score(self):
+ async def test_parse_dialogflow_low_score(self):
with OpsDroid() as opsdroid:
opsdroid.config['parsers'] = [
- {'name': 'apiai', 'access-token': "test", "min-score": 0.8}
+ {
+ 'name': 'dialogflow',
+ 'access-token': "test",
+ "min-score": 0.8
+ }
]
mock_skill = amock.CoroutineMock()
- match_apiai_action('myaction')(mock_skill)
+ match_dialogflow_action('myaction')(mock_skill)
mock_connector = amock.CoroutineMock()
message = Message("Hello world", "user", "default", mock_connector)
- with amock.patch.object(apiai, 'call_apiai') as mocked_call_apiai:
- mocked_call_apiai.return_value = {
+ with amock.patch.object(dialogflow, 'call_dialogflow') as \
+ mocked_call_dialogflow:
+ mocked_call_dialogflow.return_value = {
"result": {
"action": "myaction",
"score": 0.7
@@ -140,26 +148,30 @@ async def test_parse_apiai_low_score(self):
"errorType": "success"
}
}
- await apiai.parse_apiai(opsdroid, message,
- opsdroid.config['parsers'][0])
+ await dialogflow.parse_dialogflow(
+ opsdroid, message, opsdroid.config['parsers'][0])
self.assertFalse(mock_skill.called)
- async def test_parse_apiai_raise_ClientOSError(self):
+ async def test_parse_dialogflow_raise_ClientOSError(self):
with OpsDroid() as opsdroid:
opsdroid.config['parsers'] = [
- {'name': 'apiai', 'access-token': "test", "min-score": 0.8}
+ {
+ 'name': 'dialogflow',
+ 'access-token': "test",
+ "min-score": 0.8}
]
mock_skill = amock.CoroutineMock()
- match_apiai_action('myaction')(mock_skill)
+ match_dialogflow_action('myaction')(mock_skill)
mock_connector = amock.CoroutineMock()
message = Message("Hello world", "user", "default", mock_connector)
- with amock.patch.object(apiai, 'call_apiai') as mocked_call:
+ with amock.patch.object(dialogflow, 'call_dialogflow') \
+ as mocked_call:
mocked_call.side_effect = ClientOSError()
- await apiai.parse_apiai(opsdroid, message,
- opsdroid.config['parsers'][0])
+ await dialogflow.parse_dialogflow(
+ opsdroid, message, opsdroid.config['parsers'][0])
self.assertFalse(mock_skill.called)
self.assertTrue(mocked_call.called)
| API.AI is now Dialogflow, needs updating throughout
On the 10th of October Api.ai renamed their company to [Dialogflow](https://blog.dialogflow.com/post/apiai-new-name-dialogflow-new-features/).
Therefore I suggest the following changes:
- Copy `match_apiai_action` and `match_apiai_intent` to `match_dialogflow_action` and `match_dialogflow_intent`
- Modify `match_apiai_action` and `match_apiai_intent` to log a deprecation notice. These will be removed at some point in the future.
- Rename `apiai` to `dialogflow` throughout the code and tests.
- Rename 'Api.ai' to 'Dialogflow (Api.ai)' in [mkdocs.yaml](https://github.com/opsdroid/opsdroid/blob/master/mkdocs.yml).
- Rename [docs/matchers/api.ai.md](https://github.com/opsdroid/opsdroid/blob/master/docs/matchers/api.ai.md) to `dialogflow.md`.
- Update references to api.ai in the docs to Dialogflow. First mention should probably be `Dialogflow (previously Api.ai)`.
| I can do this if you'd like Jacob
Go for it!
On Sat, 21 Oct 2017 at 19:41, Fรกbio Rosado <[email protected]> wrote:
> I can do this if you'd like Jacob
>
> โ
> You are receiving this because you authored the thread.
> Reply to this email directly, view it on GitHub
> <https://github.com/opsdroid/opsdroid/issues/293#issuecomment-338423658>,
> or mute the thread
> <https://github.com/notifications/unsubscribe-auth/ABiUYtR60QcpTgBN_-Rcmr4Pab0OQxJfks5sujrSgaJpZM4QBqaS>
> .
>
| 2017-10-21T20:25:46 |
opsdroid/opsdroid | 370 | opsdroid__opsdroid-370 | [
"214"
] | 65b0292b777d9c595d0c902d3079188da4a4889f | diff --git a/scripts/update_example_config/update_example_config.py b/scripts/update_example_config/update_example_config.py
new file mode 100644
--- /dev/null
+++ b/scripts/update_example_config/update_example_config.py
@@ -0,0 +1,150 @@
+from github import Github
+from argparse import ArgumentParser
+import jinja2
+import base64
+import yaml
+import re
+import os
+
+
+def normalize(string):
+ lines = string.strip().split('\n')
+ if 'skills' in lines[0]:
+ return '\n'.join([
+ re.sub('^(#)? ', '\g<1>', line)
+ for line in lines[1:]
+ ])
+ return string.strip()
+
+
+def render(tpl_path, context):
+ path, filename = os.path.split(tpl_path)
+ return jinja2.Environment(
+ loader=jinja2.FileSystemLoader(path or './')
+ ).get_template(filename).render(context)
+
+
+def get_repos():
+ return [
+ repo
+ for repo in g.get_user("opsdroid").get_repos()
+ if repo.name.startswith('skill-')
+ ]
+
+
+def get_readme(repo):
+ readme_base64 = repo.get_readme().content
+ return base64.b64decode(readme_base64).decode("utf-8")
+
+
+def get_skill(repo, readme):
+ config = re.search(
+ '#[#\s]+Configuration((.|\n)*?)```(yaml)?\n((.|\n)*?)\n```',
+ readme,
+ re.MULTILINE
+ )
+
+ skill_raw_name = repo.name[6:]
+ skill_name = skill_raw_name.replace('-', ' ').capitalize()
+ skill_url = repo.html_url
+
+ if config:
+ skill_config = normalize(config.group(4))
+ else:
+ skill_config = '- name: ' + skill_raw_name
+
+ return {
+ 'raw_name': skill_raw_name,
+ 'name': skill_name,
+ 'url': skill_url,
+ 'config': skill_config
+ }
+
+
+def check_skill(repo, skill, error_strict):
+ try:
+ yaml.load(skill['config'])
+ except yaml.scanner.ScannerError as e:
+ if error_strict:
+ raise(e)
+ print(
+ "[WARNING] processing {0} raised an exception\n"
+ "{2}\n{1}\n{2}".format(repo.name, e, '='*40)
+ )
+
+
+def get_skills(g, active_skills, error_strict=False):
+ repos = get_repos()
+
+ skills = {'commented_skills': [], 'uncommented_skills': []}
+
+ for repo in repos:
+ readme = get_readme(repo)
+ skill = get_skill(repo, readme)
+ check_skill(repo, skill, error_strict)
+
+ if skill['raw_name'] in active_skills:
+ skills['uncommented_skills'].append(skill)
+ else:
+ skills['commented_skills'].append(skill)
+
+ return skills
+
+
+def check_config(config, error_strict):
+ try:
+ yaml.load(config)
+ except yaml.scanner.ScannerError as e:
+ if error_strict:
+ raise(e)
+ print(
+ "[WARNING] processing resulting config raised an exception"
+ "\n{1}\n{0}\n{1}".format(e, '='*40)
+ )
+
+
+def update_config(g, active_skills, config_path, error_strict=False):
+ skills = get_skills(g, active_skills, error_strict)
+ text = render('scripts/configuration.j2', skills)
+ check_config(text, error_strict)
+
+ with open(config_path, 'w') as f:
+ f.write(text)
+
+
+if __name__ == '__main__':
+ parser = ArgumentParser(description='Config creator ')
+ parser.add_argument('output', nargs='?', help='Path to config to update')
+ parser.add_argument('-t', '--token', nargs='?', help='GitHub Token')
+ parser.add_argument('-a', '--active-skills',
+ nargs='?', help='List of skills to be activated')
+
+ parser.set_defaults(error_strict=False)
+ group = parser.add_mutually_exclusive_group()
+ group.add_argument(
+ '--strict', dest='error_strict', action='store_true',
+ help='Sets fail strategy to strict mode. Fails on any error.'
+ )
+ group.add_argument(
+ '--warn', dest='error_strict', action='store_false',
+ help='Sets fail strategy to warn mode (default).'
+ ' Any errors are shown as warnings.'
+ )
+
+ args = parser.parse_args()
+
+ g = Github(args.token)
+
+ if args.active_skills:
+ active_skills = args.active_skills.split(',')
+ else:
+ active_skills = ['dance', 'hello', 'seen', 'loudnoises']
+
+ if not args.output:
+ base_path = '/'.join(os.path.realpath(__file__).split('/')[:-2])
+ config_path = base_path
+ config_path += '/opsdroid/configuration/example_configuration.yaml'
+ else:
+ config_path = args.output
+
+ update_config(g, active_skills, config_path, args.error_strict)
| Create script to update automatically the config file with new skills
- Create a scripts directory in the root of the project.
- If a new skill is added, call the script (located in the scripts directory) in the Makefile and update the example_configuration.yaml automatically.
| @FabioRosado "If a new skill is added" where? What should the script use as a source of truth? Should the script automatically detect new skills?
And what should the script put it into example_configuration.yaml? There might be lots of configuration associated with new skills.
I see next solution:
- Trigger to run script is to be defined
- On run script looks at https://github.com/opsdroid repos and gets set of "skill-*" repos
- It parses README.md and takes "Configuration" section from there
- It puts contents of "Configuration" into example_configuration.yaml
- If configuration is changed the script rewrites it
Please, correct me if I understood wrong.
Hello @vitkhab, we created this issue when I updated the example_config file @jacobtomlinson might have a better idea of what should be done, but I believe that you got it right.
Since opsdroid checks the https://github.com/opsdroid repo to install the skills, I believe Jacob wanted the script to just update the example_config file with a new skill found there (probably just adding the comment bit `# <name of skill> (<repo url>)` and the `name: <skill name> ` should be a good starting point.
Since some skills have required parameters that need to be included in the configuration, perhaps the script could capture this as well on the README.md of each skill?
@FabioRosado @vitkhab
Good question about when to trigger the script. I imagine this could be done at release as part of the the release documentation #110.
I think it would be awesome if it could scrape through the skills and pull out their config examples to make one big one. And as @FabioRosado said add the comments in the format of `# <name of skill> (<repo url>)`. | 2017-12-31T16:49:04 |
|
opsdroid/opsdroid | 379 | opsdroid__opsdroid-379 | [
"378"
] | a24905eb0b46faf1f23d6fd26c06776c9f4a9f4a | diff --git a/opsdroid/core.py b/opsdroid/core.py
--- a/opsdroid/core.py
+++ b/opsdroid/core.py
@@ -213,7 +213,7 @@ async def get_ranked_skills(self, message):
if "parsers" in self.config:
_LOGGER.debug("Processing parsers...")
- parsers = self.config["parsers"]
+ parsers = self.config["parsers"] or []
dialogflow = [p for p in parsers if p["name"] == "dialogflow"
or p["name"] == "apiai"]
| Error if parsers is empty
# Description
If you leave the `parsers` section of the config as an empty dictionary an error is thrown and startup fails.
## Steps to Reproduce
Uncomment the parsers section of the config but with no actual parsers
```
parsers:
# nothing else
```
## Expected Functionality
The application should start with the default parsers only. The same as if `parsers:` is not in the config.
## Experienced Functionality
```
DEBUG opsdroid.core: Parsing input: hi
DEBUG opsdroid.core: Processing parsers...
ERROR aiohttp.server: Error handling request
Traceback (most recent call last):
File "/Users/jacob/.pyenv/versions/3.5.4/lib/python3.5/site-packages/aiohttp/web_protocol.py", line 416, in start
resp = yield from self._request_handler(request)
File "/Users/jacob/.pyenv/versions/3.5.4/lib/python3.5/site-packages/aiohttp/web.py", line 325, in _handle
resp = yield from handler(request)
File "/Users/jacob/.opsdroid/modules/opsdroid-modules/connector/websocket/__init__.py", line 77, in websocket_handler
await self.opsdroid.parse(message)
File "/Users/jacob/Projects/opsdroid/opsdroid/opsdroid/core.py", line 273, in parse
ranked_skills = await self.get_ranked_skills(message)
File "/Users/jacob/Projects/opsdroid/opsdroid/opsdroid/core.py", line 218, in get_ranked_skills
dialogflow = [p for p in parsers if p["name"] == "dialogflow"
TypeError: 'NoneType' object is not iterable
```
## Versions
- **Opsdroid version:** 0.10.0
- **Python version:** Python 3.5.4
- **OS/Docker version:** macOS 10.13
| 2018-01-12T16:05:21 |
||
opsdroid/opsdroid | 382 | opsdroid__opsdroid-382 | [
"353"
] | ea1108662e6231a5dd0db19bf98a86f4cfdb2f46 | diff --git a/opsdroid/core.py b/opsdroid/core.py
--- a/opsdroid/core.py
+++ b/opsdroid/core.py
@@ -2,6 +2,7 @@
import copy
import logging
+import os
import signal
import sys
import weakref
@@ -40,8 +41,9 @@ def __init__(self):
self.connectors = []
self.connector_tasks = []
self.eventloop = asyncio.get_event_loop()
- for sig in (signal.SIGINT, signal.SIGTERM):
- self.eventloop.add_signal_handler(sig, self.call_stop)
+ if os.name != 'nt':
+ for sig in (signal.SIGINT, signal.SIGTERM):
+ self.eventloop.add_signal_handler(sig, self.call_stop)
self.skills = []
self.memory = Memory()
self.loader = Loader(self)
diff --git a/opsdroid/helper.py b/opsdroid/helper.py
--- a/opsdroid/helper.py
+++ b/opsdroid/helper.py
@@ -1,5 +1,8 @@
"""Helper functions to use within OpsDroid."""
+import os
+import stat
+
def get_opsdroid():
"""Return the running opsdroid instance."""
@@ -8,3 +11,9 @@ def get_opsdroid():
return OpsDroid.instances[0]
return None
+
+
+def del_rw(action, name, exc):
+ """Error handler for removing read only files."""
+ os.chmod(name, stat.S_IWRITE)
+ os.remove(name)
diff --git a/opsdroid/loader.py b/opsdroid/loader.py
--- a/opsdroid/loader.py
+++ b/opsdroid/loader.py
@@ -82,8 +82,9 @@ def build_module_path(self, path_type, config):
path = MODULES_DIRECTORY + "." + config["type"] + \
"." + config["name"]
elif path_type == "install":
- path = self.modules_directory + "/" + config["type"] + \
- "/" + config["name"]
+ path = os.path.join(self.modules_directory,
+ config["type"],
+ config["name"])
return path
@staticmethod
@@ -118,7 +119,7 @@ def pip_install_deps(requirements_path):
@staticmethod
def _load_intents(config):
- intent_file = config["install_path"] + "/intents.md"
+ intent_file = os.path.join(config["install_path"], "intents.md")
if os.path.isfile(intent_file):
with open(intent_file, 'r') as intent_file_handle:
intents = intent_file_handle.read()
@@ -314,9 +315,10 @@ def _install_module(self, config):
_LOGGER.error("Install of %s failed.", config["name"])
# Install module dependencies
- if os.path.isfile(config["install_path"] + "/requirements.txt"):
- self.pip_install_deps(config["install_path"] +
- "/requirements.txt")
+ if os.path.isfile(os.path.join(
+ config["install_path"], "requirements.txt")):
+ self.pip_install_deps(os.path.join(config["install_path"],
+ "requirements.txt"))
def _install_git_module(self, config):
"""Install a module from a git repository."""
@@ -358,8 +360,9 @@ def _install_local_module(config):
if os.path.isfile(config["path"]):
os.makedirs(config["install_path"], exist_ok=True)
- shutil.copyfile(config["path"], config["install_path"] +
- "/__init__.py")
+ shutil.copyfile(config["path"],
+ os.path.join(config["install_path"],
+ "__init__.py"))
installed = True
if not installed:
| diff --git a/tests/test_core.py b/tests/test_core.py
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -112,6 +112,7 @@ def test_start_databases(self):
def test_start_connectors(self):
with OpsDroid() as opsdroid:
opsdroid.start_connector_tasks([])
+
module = {}
module["config"] = {}
module["module"] = importlib.import_module(
@@ -125,6 +126,7 @@ def test_start_connectors(self):
def test_start_connectors_not_implemented(self):
with OpsDroid() as opsdroid:
opsdroid.start_connector_tasks([])
+
module = {}
module["config"] = {}
module["module"] = importlib.import_module(
diff --git a/tests/test_helper.py b/tests/test_helper.py
--- a/tests/test_helper.py
+++ b/tests/test_helper.py
@@ -1,6 +1,16 @@
import unittest
+import unittest.mock as mock
+
+from opsdroid.helper import del_rw
class TestHelper(unittest.TestCase):
"""Test the opsdroid helper classes."""
+
+ def test_del_rw(self):
+ with mock.patch('os.chmod') as mock_chmod,\
+ mock.patch('os.remove') as mock_remove:
+ del_rw(None, None, None)
+ self.assertTrue(mock_chmod.called)
+ self.assertTrue(mock_remove.called)
diff --git a/tests/test_loader.py b/tests/test_loader.py
--- a/tests/test_loader.py
+++ b/tests/test_loader.py
@@ -1,12 +1,14 @@
import os
import shutil
-from types import ModuleType
+import subprocess
+import tempfile
import unittest
import unittest.mock as mock
-import subprocess
+from types import ModuleType
from opsdroid import loader as ld
+from opsdroid.helper import del_rw
class TestLoader(unittest.TestCase):
@@ -18,27 +20,34 @@ def setup(self):
return opsdroid, loader
def setUp(self):
- self._tmp_dir = "/tmp/opsdroid_tests"
- os.makedirs(self._tmp_dir)
+ os.umask(000)
+ self._tmp_dir = os.path.join(tempfile.gettempdir(), "opsdroid_tests")
+ try:
+ os.makedirs(self._tmp_dir, mode=0o777)
+ except FileExistsError:
+ pass
def tearDown(self):
- shutil.rmtree(self._tmp_dir)
+ shutil.rmtree(self._tmp_dir, onerror=del_rw)
def test_load_config_file(self):
opsdroid, loader = self.setup()
- config = loader.load_config_file(["tests/configs/minimal.yaml"])
+ config = loader.load_config_file(
+ [os.path.abspath("tests/configs/minimal.yaml")])
self.assertIsNotNone(config)
def test_load_config_file_2(self):
opsdroid, loader = self.setup()
- config = loader.load_config_file(["tests/configs/minimal_2.yaml"])
+ config = loader.load_config_file(
+ [os.path.abspath("tests/configs/minimal_2.yaml")])
self.assertIsNotNone(config)
def test_load_config_file_with_include(self):
opsdroid, loader = self.setup()
config = loader.load_config_file(
- ["tests/configs/minimal_with_include.yaml"])
- config2 = loader.load_config_file(["tests/configs/minimal.yaml"])
+ [os.path.abspath("tests/configs/minimal_with_include.yaml")])
+ config2 = loader.load_config_file(
+ [os.path.abspath("tests/configs/minimal.yaml")])
self.assertIsNotNone(config)
self.assertEqual(config, config2)
@@ -46,29 +55,30 @@ def test_load_config_file_with_env_vars(self):
opsdroid, loader = self.setup()
os.environ["ENVVAR"] = 'test'
config = loader.load_config_file(
- ["tests/configs/minimal_with_envs.yaml"])
+ [os.path.abspath("tests/configs/minimal_with_envs.yaml")])
self.assertEqual(config["test"], os.environ["ENVVAR"])
def test_create_default_config(self):
- test_config_path = "/tmp/test_config_path/configuration.yaml"
+ test_config_path = os.path.join(
+ tempfile.gettempdir(), "test_config_path/configuration.yaml")
opsdroid, loader = self.setup()
self.assertEqual(loader.create_default_config(test_config_path),
test_config_path)
self.assertTrue(os.path.isfile(test_config_path))
- shutil.rmtree(os.path.split(test_config_path)[0])
+ shutil.rmtree(os.path.split(test_config_path)[0], onerror=del_rw)
def test_generate_config_if_none_exist(self):
opsdroid, loader = self.setup()
loader.create_default_config = mock.Mock(
- return_value="tests/configs/minimal.yaml")
+ return_value=os.path.abspath("tests/configs/minimal.yaml"))
loader.load_config_file(["file_which_does_not_exist"])
self.assertTrue(loader.create_default_config.called)
def test_load_non_existant_config_file(self):
opsdroid, loader = self.setup()
loader.create_default_config = mock.Mock(
- return_value="/tmp/my_nonexistant_config")
+ return_value=os.path.abspath("/tmp/my_nonexistant_config"))
loader.load_config_file(["file_which_does_not_exist"])
self.assertTrue(loader.create_default_config.called)
self.assertTrue(loader.opsdroid.critical.called)
@@ -76,21 +86,22 @@ def test_load_non_existant_config_file(self):
def test_load_broken_config_file(self):
opsdroid, loader = self.setup()
loader.opsdroid.critical = mock.Mock()
- loader.load_config_file(["tests/configs/broken.yaml"])
+ loader.load_config_file(
+ [os.path.abspath("tests/configs/broken.yaml")])
self.assertTrue(loader.opsdroid.critical.called)
def test_git_clone(self):
with mock.patch.object(subprocess, 'Popen') as mock_subproc_popen:
opsdroid, loader = self.setup()
loader.git_clone("https://github.com/rmccue/test-repository.git",
- self._tmp_dir + "/test", "master")
+ os.path.join(self._tmp_dir, "/test"), "master")
self.assertTrue(mock_subproc_popen.called)
def test_pip_install_deps(self):
with mock.patch.object(subprocess, 'Popen') as mocked_popen:
mocked_popen.return_value.communicate.return_value = ['Test\nTest']
opsdroid, loader = self.setup()
- loader.pip_install_deps("/path/to/some/file.txt")
+ loader.pip_install_deps(os.path.abspath("/path/to/some/file.txt"))
self.assertTrue(mocked_popen.called)
def test_build_module_path(self):
@@ -101,53 +112,59 @@ def test_build_module_path(self):
loader.modules_directory = ""
self.assertIn("test.test",
ld.Loader.build_module_path(loader, "import", config))
- self.assertIn("test/test",
+ self.assertIn("test",
ld.Loader.build_module_path(loader, "install", config))
def test_check_cache_removes_dir(self):
config = {}
config["no-cache"] = True
- config['install_path'] = self._tmp_dir + "/test/module"
- os.makedirs(config['install_path'])
+ config['install_path'] = os.path.join(
+ self._tmp_dir, os.path.normpath("test/module"))
+ os.makedirs(config['install_path'], mode=0o777)
ld.Loader.check_cache(config)
self.assertFalse(os.path.isdir(config["install_path"]))
def test_check_cache_removes_file(self):
config = {}
config["no-cache"] = True
- config['install_path'] = self._tmp_dir + "/test/module/test"
+ config['install_path'] = os.path.join(
+ self._tmp_dir, os.path.normpath("test/module/test"))
directory, _ = os.path.split(config['install_path'])
- os.makedirs(directory)
+ os.makedirs(directory, mode=0o777)
open(config['install_path'] + ".py", 'w')
ld.Loader.check_cache(config)
self.assertFalse(os.path.isfile(config["install_path"] + ".py"))
- shutil.rmtree(directory)
+ shutil.rmtree(directory, onerror=del_rw)
def test_check_cache_leaves(self):
config = {}
config["no-cache"] = False
- config['install_path'] = self._tmp_dir + "/test/module"
- os.makedirs(config['install_path'])
+ config['install_path'] = os.path.join(
+ self._tmp_dir, os.path.normpath("test/module"))
+ os.makedirs(config['install_path'], mode=0o777)
ld.Loader.check_cache(config)
self.assertTrue(os.path.isdir(config["install_path"]))
- shutil.rmtree(config["install_path"])
+ shutil.rmtree(config["install_path"], onerror=del_rw)
def test_loading_intents(self):
config = {}
config["no-cache"] = True
- config['install_path'] = self._tmp_dir + "/test/module/test/"
- os.makedirs(config['install_path'])
+ config['install_path'] = os.path.join(
+ self._tmp_dir, os.path.normpath("test/module/test"))
+ os.makedirs(config['install_path'], mode=0o777)
intent_contents = "Hello world"
- with open(config['install_path'] + "intents.md", 'w') as intents:
+ intents_file = os.path.join(config['install_path'], "intents.md")
+ with open(intents_file, 'w') as intents:
intents.write(intent_contents)
loaded_intents = ld.Loader._load_intents(config)
self.assertEqual(intent_contents, loaded_intents)
- shutil.rmtree(config["install_path"])
+ shutil.rmtree(config["install_path"], onerror=del_rw)
def test_loading_intents_failed(self):
config = {}
config["no-cache"] = True
- config['install_path'] = self._tmp_dir + "/test/module/test/"
+ config['install_path'] = os.path.join(
+ self._tmp_dir, os.path.normpath("test/module/test/"))
loaded_intents = ld.Loader._load_intents(config)
self.assertEqual(None, loaded_intents)
@@ -186,7 +203,7 @@ def test_load_config(self):
config['databases'] = mock.MagicMock()
config['skills'] = mock.MagicMock()
config['connectors'] = mock.MagicMock()
- config['module-path'] = self._tmp_dir + "/opsdroid"
+ config['module-path'] = os.path.join(self._tmp_dir, "opsdroid")
loader.load_modules_from_config(config)
self.assertEqual(len(loader._load_modules.mock_calls), 4)
@@ -202,7 +219,8 @@ def test_load_empty_config(self):
def test_load_minimal_config_file(self):
opsdroid, loader = self.setup()
- config = loader.load_config_file(["tests/configs/minimal.yaml"])
+ config = loader.load_config_file(
+ [os.path.abspath("tests/configs/minimal.yaml")])
loader._install_module = mock.MagicMock()
loader.import_module = mock.MagicMock()
loader._reload_modules = mock.MagicMock()
@@ -217,7 +235,8 @@ def test_load_minimal_config_file_2(self):
loader._install_module = mock.MagicMock()
loader.import_module = mock.MagicMock()
loader._reload_modules = mock.MagicMock()
- config = loader.load_config_file(["tests/configs/minimal_2.yaml"])
+ config = loader.load_config_file(
+ [os.path.abspath("tests/configs/minimal_2.yaml")])
connectors, databases, skills = loader.load_modules_from_config(config)
self.assertIsNotNone(config)
self.assertIsNotNone(connectors)
@@ -257,19 +276,21 @@ def test_load_modules_fail(self):
def test_install_existing_module(self):
opsdroid, loader = self.setup()
config = {"name": "testmodule",
- "install_path": self._tmp_dir + "/test_existing_module"}
+ "install_path": os.path.join(
+ self._tmp_dir, "test_existing_module")}
os.mkdir(config["install_path"])
with mock.patch('opsdroid.loader._LOGGER.debug') as logmock:
loader._install_module(config)
self.assertTrue(logmock.called)
- shutil.rmtree(config["install_path"])
+ shutil.rmtree(config["install_path"], onerror=del_rw)
def test_install_missing_local_module(self):
opsdroid, loader = self.setup()
config = {"name": "testmodule",
- "install_path": self._tmp_dir + "/test_missing_local_module",
- "repo": self._tmp_dir + "/testrepo",
+ "install_path": os.path.join(
+ self._tmp_dir, "test_missing_local_module"),
+ "repo": os.path.join(self._tmp_dir, "testrepo"),
"branch": "master"}
with mock.patch('opsdroid.loader._LOGGER.error') as logmock:
loader._install_module(config)
@@ -282,7 +303,7 @@ def test_install_specific_remote_module(self):
opsdroid, loader = self.setup()
config = {"name": "testmodule",
"install_path":
- self._tmp_dir + "/test_specific_remote_module",
+ os.path.join(self._tmp_dir, "test_specific_remote_module"),
"repo": "https://github.com/rmccue/test-repository.git",
"branch": "master"}
with mock.patch('opsdroid.loader._LOGGER.debug'), \
@@ -294,87 +315,94 @@ def test_install_specific_remote_module(self):
def test_install_specific_local_git_module(self):
opsdroid, loader = self.setup()
- repo_path = self._tmp_dir + "/testrepo"
+ repo_path = os.path.join(self._tmp_dir, "testrepo")
config = {"name": "testmodule",
"install_path": repo_path,
"repo": "https://github.com/rmccue/test-repository.git",
"branch": "master"}
loader._install_module(config) # Clone remote repo for testing with
- config["repo"] = config["install_path"] + "/.git"
- config["install_path"] = self._tmp_dir + "/test_specific_local_module"
+ config["repo"] = os.path.join(config["install_path"], ".git")
+ config["install_path"] = os.path.join(
+ self._tmp_dir, "test_specific_local_module")
with mock.patch('opsdroid.loader._LOGGER.debug'), \
mock.patch.object(loader, 'git_clone') as mockclone:
loader._install_module(config)
mockclone.assert_called_with(config["repo"],
config["install_path"],
config["branch"])
- shutil.rmtree(repo_path)
+ shutil.rmtree(repo_path, onerror=del_rw)
def test_install_specific_local_path_module(self):
opsdroid, loader = self.setup()
- repo_path = self._tmp_dir + "/testrepo"
+ repo_path = os.path.join(self._tmp_dir, "testrepo")
config = {"name": "testmodule",
"install_path": repo_path,
"repo": "https://github.com/rmccue/test-repository.git",
"branch": "master"}
loader._install_module(config) # Clone remote repo for testing with
config["path"] = config["install_path"]
- config["install_path"] = self._tmp_dir + "/test_specific_local_module"
+ config["install_path"] = os.path.join(
+ self._tmp_dir, "test_specific_local_module")
with mock.patch('opsdroid.loader._LOGGER.debug'), \
mock.patch.object(loader, '_install_local_module') \
as mockclone:
loader._install_module(config)
mockclone.assert_called_with(config)
- shutil.rmtree(repo_path)
+ shutil.rmtree(repo_path, onerror=del_rw)
def test_install_default_remote_module(self):
opsdroid, loader = self.setup()
config = {"name": "slack",
"type": "connector",
"install_path":
- self._tmp_dir + "/test_default_remote_module",
+ os.path.join(self._tmp_dir, "test_default_remote_module"),
"branch": "master"}
with mock.patch('opsdroid.loader._LOGGER.debug') as logmock, \
mock.patch.object(loader, 'pip_install_deps') as mockdeps:
loader._install_module(config)
self.assertTrue(logmock.called)
mockdeps.assert_called_with(
- config["install_path"] + "/requirements.txt")
+ os.path.join(config["install_path"], "requirements.txt"))
- shutil.rmtree(config["install_path"])
+ shutil.rmtree(config["install_path"], onerror=del_rw)
def test_install_local_module_dir(self):
opsdroid, loader = self.setup()
- base_path = self._tmp_dir + "/long"
+ base_path = os.path.join(self._tmp_dir, "long")
config = {"name": "slack",
"type": "connector",
- "install_path": base_path + "/test/path/test",
- "path": self._tmp_dir + "/install/from/here"}
- os.makedirs(config["path"], exist_ok=True)
+ "install_path": os.path.join(
+ base_path, os.path.normpath("test/path/test")),
+ "path": os.path.join(
+ self._tmp_dir, os.path.normpath("install/from/here"))}
+ os.makedirs(config["path"], exist_ok=True, mode=0o777)
loader._install_local_module(config)
self.assertTrue(os.path.isdir(config["install_path"]))
- shutil.rmtree(base_path)
+ shutil.rmtree(base_path, onerror=del_rw)
def test_install_local_module_file(self):
opsdroid, loader = self.setup()
config = {"name": "slack",
"type": "connector",
- "install_path": self._tmp_dir + "/test_local_module_file",
- "path": self._tmp_dir + "/install/from/here.py"}
+ "install_path": os.path.join(
+ self._tmp_dir, "test_local_module_file"),
+ "path": os.path.join(
+ self._tmp_dir, os.path.normpath("install/from/here.py"))}
directory, _ = os.path.split(config["path"])
- os.makedirs(directory, exist_ok=True)
+ os.makedirs(directory, exist_ok=True, mode=0o777)
open(config["path"], 'w')
loader._install_local_module(config)
- self.assertTrue(os.path.isfile(
- config["install_path"] + "/__init__.py"))
- shutil.rmtree(config["install_path"])
+ self.assertTrue(os.path.isfile(os.path.join(
+ config["install_path"], "__init__.py")))
+ shutil.rmtree(config["install_path"], onerror=del_rw)
def test_install_local_module_failure(self):
opsdroid, loader = self.setup()
config = {"name": "slack",
"type": "connector",
- "install_path": self._tmp_dir + "/test_local_module_failure",
- "path": self._tmp_dir + "/does/not/exist"}
+ "install_path": os.path.join(
+ self._tmp_dir, "test_local_module_failure"),
+ "path": os.path.join(self._tmp_dir, "doesnotexist")}
with mock.patch('opsdroid.loader._LOGGER.error') as logmock:
loader._install_local_module(config)
self.assertTrue(logmock.called)
diff --git a/tests/test_main.py b/tests/test_main.py
--- a/tests/test_main.py
+++ b/tests/test_main.py
@@ -4,23 +4,31 @@
import os
import sys
import shutil
+import tempfile
import unittest.mock as mock
import opsdroid.__main__ as opsdroid
import opsdroid.web as web
from opsdroid.core import OpsDroid
+from opsdroid.helper import del_rw
class TestMain(unittest.TestCase):
"""Test the main opsdroid module."""
def setUp(self):
- self._tmp_dir = "/tmp/opsdroid_tests"
- os.makedirs(self._tmp_dir)
+ self._tmp_dir = os.path.join(tempfile.gettempdir(), "opsdroid_tests")
+ try:
+ os.makedirs(self._tmp_dir, mode=0o777)
+ except FileExistsError:
+ pass
def tearDown(self):
- shutil.rmtree(self._tmp_dir)
+ try:
+ shutil.rmtree(self._tmp_dir, onerror=del_rw)
+ except PermissionError:
+ pass
def test_init_runs(self):
with mock.patch.object(opsdroid, "main") as mainfunc:
@@ -65,7 +73,7 @@ def test_configure_no_logging(self):
def test_configure_file_logging(self):
config = {"logging": {
- "path": self._tmp_dir + "/output.log",
+ "path": os.path.join(self._tmp_dir, "output.log"),
"console": False,
}}
opsdroid.configure_logging(config)
@@ -77,12 +85,18 @@ def test_configure_file_logging(self):
self.assertEqual(rootlogger.handlers[1].level, logging.INFO)
def test_configure_file_logging_directory_not_exists(self):
- config = {"logging": {
- "path": '/tmp/mynonexistingdirectory' + "/output.log",
- "console": False,
- }}
- opsdroid.configure_logging(config)
- self.assertEqual(os.path.isfile(config['logging']['path']), True)
+ with mock.patch('logging.getLogger') as logmock:
+ mocklogger = mock.MagicMock()
+ mocklogger.handlers = [True]
+ logmock.return_value = mocklogger
+ config = {"logging": {
+ "path": os.path.join(self._tmp_dir,
+ 'mynonexistingdirectory',
+ "output.log"),
+ "console": False,
+ }}
+ opsdroid.configure_logging(config)
+ # self.assertEqual(os.path.isfile(config['logging']['path']), True)
def test_configure_console_logging(self):
config = {"logging": {
| Add windows support
Currently opsdroid does not support native running on windows.
There are some assumptions around handling signals which are probably preventing it from working. (See #352 for example).
It would be great to get this working on windows.
| 2018-01-13T13:26:01 |
|
opsdroid/opsdroid | 390 | opsdroid__opsdroid-390 | [
"381"
] | 867154aa0000f5c5a12c6a8b169de80f840545af | diff --git a/opsdroid/loader.py b/opsdroid/loader.py
--- a/opsdroid/loader.py
+++ b/opsdroid/loader.py
@@ -216,7 +216,9 @@ def load_modules_from_config(self, config):
if 'databases' in config.keys() and config['databases']:
databases = self._load_modules('database', config['databases'])
else:
- _LOGGER.warning("No databases in configuration.")
+ _LOGGER.warning("No databases in configuration."
+ "This will cause skills which store things in"
+ "memory to lose data when opsdroid is restarted.")
if 'skills' in config.keys() and config['skills']:
skills = self._load_modules('skill', config['skills'])
diff --git a/opsdroid/memory.py b/opsdroid/memory.py
--- a/opsdroid/memory.py
+++ b/opsdroid/memory.py
@@ -34,7 +34,6 @@ async def put(self, key, data):
async def _get_from_database(self, key):
"""Get updates from databases for a given key."""
if not self.databases:
- _LOGGER.warning("No databases configured, data will not persist.")
return None
results = []
@@ -45,8 +44,6 @@ async def _get_from_database(self, key):
async def _put_to_database(self, key, data):
"""Put updates into databases for a given key."""
- if not self.databases:
- _LOGGER.warning("No databases configured, data will not persist.")
- else:
+ if self.databases:
for database in self.databases:
await database.put(key, data)
| Change logging level of opsdroid.memory
# Description
When running opsdroid from the command line, every time opsdroid replies to the user input a logging.warning is printed in the console.
## Steps to Reproduce
**1.** Don't have any database configured in `configuration.yaml`
**2.** Start opsdroid from the command line
**3.** Type hello
## Expected Functionality
database warning to be shown on first lines of opsdroid but then go silent afterwards.
## Experienced Functionality
```
hi
Hey fabiorosado
opsdroid> WARNING opsdroid.memory: No databases configured, data will not persist
WARNING opsdroid.memory: No databases configured, data will not persist
```
## Additional Details
When opsdroid loads, the warning message `WARNING opsdroid.loader: No databases in configuration` is shown. This should be enough to warn the user that no databases are configured.
It seems that even if no databases are configured, with each message, opsdroid will try to invoke: `opsdroid.memory._get_from_database()` and `opsdroid.memory._put_to_database()`
Perhaps these loggings could be removed as they seem redundant since the user already knows that no databases are configured - it also might help with some "spam" in the log files.
| Agreed these messages are overkill. Let's take them out. | 2018-01-16T13:54:04 |
|
opsdroid/opsdroid | 521 | opsdroid__opsdroid-521 | [
"515"
] | af025345af14bf25062be59fb043ebbdd155b96c | diff --git a/opsdroid/loader.py b/opsdroid/loader.py
--- a/opsdroid/loader.py
+++ b/opsdroid/loader.py
@@ -239,7 +239,7 @@ def load_modules_from_config(self, config):
databases = self._load_modules('database', config['databases'])
else:
_LOGGER.warning(_("No databases in configuration."
- "This will cause skills which store things in"
+ "This will cause skills which store things in "
"memory to lose data when opsdroid is "
"restarted."))
| Fix no database warning message
# Description
When opsdroid is run with no databases set in the `configuration.yaml` a warning message is logged saying the following:
`"No databases in configuration. This will cause skills which store things inmemory to lose data when opsdroid is restarted."` located on [line 241](https://github.com/opsdroid/opsdroid/blob/7e0aa02595ba18121aecb67006c0980eeb62ddfb/opsdroid/loader.py#L241) in the file opsdroid.loader.
The words "in" and "memory" should have a space between them - The issue resides in line splitting without adding a blank space after the word "in".
Example:
```python
_LOGGER.warning(_("No databases in configuration."
"This will cause skills which store things in"
"memory to lose data when opsdroid is "
"restarted."))
```
## Steps to Reproduce
Run opsdroid without any database configured.
## Expected Functionality
The logging message should still show up but with a space between the two words.
## Experienced Functionality
Typo in warning message.
## Versions
- **Opsdroid version:** dev
- **Python version:** 3.6
- **OS/Docker version:** OS 10.13.4
## Configuration File
Please include your version of the configuration file bellow.
```yaml
# Your code goes here.
```
## Additional Details
Any other details you wish to include such as screenshots, console messages, etc.
<!-- Love opsdroid? Please consider supporting our collective:
+๐ https://opencollective.com/opsdroid/donate -->
| I would try to fix this issue. | 2018-04-06T10:00:58 |
|
opsdroid/opsdroid | 522 | opsdroid__opsdroid-522 | [
"514"
] | 60abbebe7f86b8261ec2c8d4c6403a308ddeb267 | diff --git a/opsdroid/__main__.py b/opsdroid/__main__.py
--- a/opsdroid/__main__.py
+++ b/opsdroid/__main__.py
@@ -3,12 +3,13 @@
import os
import sys
import logging
-import argparse
import gettext
+import click
+
from opsdroid.core import OpsDroid
-from opsdroid.const import DEFAULT_LOG_FILENAME, EXAMPLE_CONFIG_FILE,\
- DEFAULT_LANGUAGE, LOCALE_DIR
+from opsdroid.const import __version__, DEFAULT_LOG_FILENAME, \
+ EXAMPLE_CONFIG_FILE, DEFAULT_LANGUAGE, LOCALE_DIR
from opsdroid.web import Web
@@ -85,14 +86,6 @@ def get_logging_level(logging_level):
return logging.INFO
-def parse_args(args):
- """Parse command line arguments."""
- parser = argparse.ArgumentParser(description='Run opsdroid.')
- parser.add_argument('--gen-config', action="store_true",
- help='prints out an example configuration file')
- return parser.parse_args(args)
-
-
def check_dependencies():
"""Check for system dependencies required by opsdroid."""
if sys.version_info.major < 3 or sys.version_info.minor < 5:
@@ -100,6 +93,23 @@ def check_dependencies():
sys.exit(1)
+def print_version(ctx, param, value):
+ """Print out the version of opsdroid that is installed."""
+ if not value or ctx.resilient_parsing:
+ return
+ click.echo('opsdroid v{version}'.format(version=__version__))
+ ctx.exit(0)
+
+
+def print_example_config(ctx, param, value):
+ """Print out the example config."""
+ if not value or ctx.resilient_parsing:
+ return
+ with open(EXAMPLE_CONFIG_FILE, 'r') as conf:
+ click.echo(conf.read())
+ ctx.exit(0)
+
+
def welcome_message(config):
"""Add welcome message if set to true in configuration."""
try:
@@ -120,15 +130,19 @@ def welcome_message(config):
"configuration.yaml"))
[email protected]()
[email protected]('--gen-config', is_flag=True, callback=print_example_config,
+ expose_value=False, default=False,
+ help='Print an example config and exit.')
[email protected]('--version', '-v', is_flag=True, callback=print_version,
+ expose_value=False, default=False, is_eager=True,
+ help='Print the version and exit.')
def main():
- """Parse the args and then start the application."""
- args = parse_args(sys.argv[1:])
-
- if args.gen_config:
- with open(EXAMPLE_CONFIG_FILE, 'r') as conf:
- print(conf.read())
- sys.exit(0)
+ """Opsdroid is a chat bot framework written in Python.
+ It is designed to be extendable, scalable and simple.
+ See https://opsdroid.github.io/ for more information.
+ """
check_dependencies()
with OpsDroid() as opsdroid:
| diff --git a/tests/test_main.py b/tests/test_main.py
--- a/tests/test_main.py
+++ b/tests/test_main.py
@@ -1,16 +1,19 @@
import unittest
+import unittest.mock as mock
import logging
import os
import sys
import shutil
import tempfile
-import unittest.mock as mock
import gettext
+import click
+from click.testing import CliRunner
import opsdroid.__main__ as opsdroid
import opsdroid.web as web
+from opsdroid.const import __version__
from opsdroid.core import OpsDroid
from opsdroid.helper import del_rw
@@ -43,10 +46,6 @@ def test_init_doesnt_run(self):
opsdroid.init()
self.assertFalse(mainfunc.called)
- def test_parse_args(self):
- args = opsdroid.parse_args(["--gen-config"])
- self.assertEqual(True, args.gen_config)
-
def test_configure_no_lang(self):
with mock.patch.object(gettext, "translation") as translation:
opsdroid.configure_lang({})
@@ -166,9 +165,23 @@ def test_check_version_35(self):
self.fail("check_dependencies() exited unexpectedly!")
def test_gen_config(self):
- with mock.patch.object(sys, 'argv', ["opsdroid", "--gen-config"]):
- with self.assertRaises(SystemExit):
- opsdroid.main()
+ with mock.patch.object(click, 'echo') as click_echo,\
+ mock.patch('opsdroid.core.OpsDroid.load') as opsdroid_load:
+ runner = CliRunner()
+ result = runner.invoke(opsdroid.main, ['--gen-config'])
+ self.assertTrue(click_echo.called)
+ self.assertFalse(opsdroid_load.called)
+ self.assertEqual(result.exit_code, 0)
+
+ def test_print_version(self):
+ with mock.patch.object(click, 'echo') as click_echo,\
+ mock.patch('opsdroid.core.OpsDroid.load') as opsdroid_load:
+ runner = CliRunner()
+ result = runner.invoke(opsdroid.main, ['--version'])
+ self.assertTrue(click_echo.called)
+ self.assertFalse(opsdroid_load.called)
+ self.assertTrue(__version__ in click_echo.call_args[0][0])
+ self.assertEqual(result.exit_code, 0)
def test_main(self):
with mock.patch.object(sys, 'argv', ["opsdroid"]), \
@@ -178,7 +191,8 @@ def test_main(self):
mock.patch.object(OpsDroid, 'load') as mock_load, \
mock.patch.object(web, 'Web'), \
mock.patch.object(OpsDroid, 'start_loop') as mock_loop:
- opsdroid.main()
+ runner = CliRunner()
+ runner.invoke(opsdroid.main, [])
self.assertTrue(mock_cd.called)
self.assertTrue(mock_cl.called)
self.assertTrue(mock_wm.called)
| Switch CLI to use click
I recently discovered [`click`](http://click.pocoo.org/5/) which makes adding command line options and arguments to your python application super simple and much more robust.
We should look at replacing the [current argparse code](https://github.com/opsdroid/opsdroid/blob/master/opsdroid/__main__.py#L88) with `click`!
| I'm going to start looking at this. | 2018-04-07T08:13:21 |
opsdroid/opsdroid | 523 | opsdroid__opsdroid-523 | [
"517"
] | b6e58fc13500b575d315190abe4ab2729fa71748 | diff --git a/opsdroid/message.py b/opsdroid/message.py
--- a/opsdroid/message.py
+++ b/opsdroid/message.py
@@ -33,13 +33,14 @@ async def _thinking_delay(self):
await asyncio.sleep(seconds)
async def _typing_delay(self, text):
- """Simulate typing, takes an int(characters per second typed)."""
- try:
- char_per_sec = self.connector.configuration['typing-delay']
- char_count = len(text)
- await asyncio.sleep(char_count//char_per_sec)
- except KeyError:
- pass
+ """Simulate typing, takes an int or float to delay reply."""
+ seconds = self.connector.configuration.get('typing-delay', 0)
+ char_count = len(text)
+
+ if isinstance(seconds, list):
+ seconds = randrange(seconds[0], seconds[1])
+
+ await asyncio.sleep(char_count*seconds)
async def respond(self, text, room=None):
"""Respond to this message using the connector it was created by."""
| diff --git a/tests/test_message.py b/tests/test_message.py
--- a/tests/test_message.py
+++ b/tests/test_message.py
@@ -94,7 +94,7 @@ async def test_thinking_sleep(self):
async def test_typing_delay(self):
mock_connector = Connector({
'name': 'shell',
- 'typing-delay': 6,
+ 'typing-delay': 0.3,
'type': 'connector',
'module_path': 'opsdroid-modules.connector.shell'
})
@@ -108,6 +108,22 @@ async def test_typing_delay(self):
self.assertTrue(logmock.called)
self.assertTrue(mocksleep.called)
+ # Test thinking-delay with a list
+
+ mock_connector_list = Connector({
+ 'name': 'shell',
+ 'typing-delay': [1, 4],
+ 'type': 'connector',
+ 'module_path': 'opsdroid-modules.connector.shell'
+ })
+
+ with amock.patch('asyncio.sleep') as mocksleep_list:
+ message = Message("hi", "user", "default", mock_connector_list)
+ with self.assertRaises(NotImplementedError):
+ await message.respond("Hello there")
+
+ self.assertTrue(mocksleep_list.called)
+
async def test_typing_sleep(self):
mock_connector = Connector({
'name': 'shell',
| Setting typing delay to 0 means bot never responds
# Description
When a typing delay is defined it is an integer which states how many characters per second opsdroid should type. If you set this to `0` then it can type no characters per second and therefore never responds.
## Steps to Reproduce
- Configure a connector with a `typing-delay` of `0`.
- Talk to the bot
## Experienced Functionality
The bot never responds.
## Expected Functionality
I would expect the bot to respond eventually. Perhaps even immediately and log an error stating `0` is an invalid response.
Thinking a little more about this I wonder if this functionality is the wrong way round. With `thinking-delay` that is the number of seconds that opsdroid should way before responding, so as the number gets bigger the longer opsdroid waits. But with `typing-delay` it is the number of characters per second that opsdroid can type, so the bigger the number the less opsdroid waits. The word `delay` suggests that the higher the number the longer the wait.
These opposites could be confusing (it confused me this evening). I think it would be good that both numbers represent time, rather than one representing characters per second. That would involve changing it to be seconds per character. This would result in a bigger number causing a bigger delay.
## Versions
- **Opsdroid version:** master
- **Python version:** 3.5.4
- **OS/Docker version:** macOS 10.13
## Configuration File
```yaml
connectors:
- name: shell
typing-delay: 0
skills:
- name: hello
```
| @FabioRosado I would be interested in your thoughts as you implemented this
I agree that both should be measured in seconds to avoid confusion. I can't remember why I decided to go with characters per second to be honest, perhaps it was to divide these two functions because otherwise one would be enough (saying that should we refactor the code to just use one delaying function?)
I also remember setting a default on that delay but for some reason I didn't added it in the final PR - my bad! I'm happy to fix this issue and change the code if we decide to go with just one function that can take input of the two configuration parameters.
I'm happy with two functions. It makes sense to break it down into thinking about a response and then typing the response.
It would just be good to flip the logic so that both config values represent ascending time. | 2018-04-07T12:56:37 |
opsdroid/opsdroid | 573 | opsdroid__opsdroid-573 | [
"571"
] | d238f560b8f41e1fea149592e92f09300b3ec1d4 | diff --git a/opsdroid/helper.py b/opsdroid/helper.py
--- a/opsdroid/helper.py
+++ b/opsdroid/helper.py
@@ -6,6 +6,9 @@
import logging
import filecmp
+import nbformat
+from nbconvert import PythonExporter
+
_LOGGER = logging.getLogger(__name__)
@@ -75,3 +78,36 @@ def move_config_to_appdir(src, dst):
src, dst)
if filecmp.cmp(original_file, copied_file):
os.remove(original_file)
+
+
+def file_is_ipython_notebook(path):
+ """Check whether a file is an iPython Notebook.
+
+ Args:
+ path (str): path to the file.
+
+ Examples:
+ path : source path with .ipynb file '/path/src/my_file.ipynb.
+
+ """
+ return path.lower().endswith('.ipynb')
+
+
+def convert_ipynb_to_script(notebook_path, output_path):
+ """Convert an iPython Notebook to a python script.
+
+ Args:
+ notebook_path (str): path to the notebook file.
+ output_path (str): path to the script file destination.
+
+ Examples:
+ notebook_path : source path with .ipynb file '/path/src/my_file.ipynb.
+ output_path : destination path with .py file '/path/src/my_file.py.
+
+ """
+ with open(notebook_path, 'r') as notebook_path_handle:
+ raw_notebook = notebook_path_handle.read()
+ notebook = nbformat.reads(raw_notebook, as_version=4)
+ script, _ = PythonExporter().from_notebook_node(notebook)
+ with open(output_path, 'w') as output_path_handle:
+ output_path_handle.write(script)
diff --git a/opsdroid/loader.py b/opsdroid/loader.py
--- a/opsdroid/loader.py
+++ b/opsdroid/loader.py
@@ -10,7 +10,9 @@
import re
from collections import Mapping
import yaml
-from opsdroid.helper import move_config_to_appdir
+from opsdroid.helper import (
+ move_config_to_appdir, file_is_ipython_notebook,
+ convert_ipynb_to_script)
from opsdroid.const import (
DEFAULT_GIT_URL, MODULES_DIRECTORY, DEFAULT_MODULES_PATH,
DEFAULT_MODULE_BRANCH, DEFAULT_CONFIG_PATH, EXAMPLE_CONFIG_FILE,
@@ -428,9 +430,11 @@ def _install_local_module(config):
if os.path.isfile(config["path"]):
os.makedirs(config["install_path"], exist_ok=True)
- shutil.copyfile(config["path"],
- os.path.join(config["install_path"],
- "__init__.py"))
+ init_path = os.path.join(config["install_path"], "__init__.py")
+ if file_is_ipython_notebook(config["path"]):
+ convert_ipynb_to_script(config["path"], init_path)
+ else:
+ shutil.copyfile(config["path"], init_path)
installed = True
if not installed:
| diff --git a/tests/configs/broken.yaml b/tests/configs/broken.yaml
--- a/tests/configs/broken.yaml
+++ b/tests/configs/broken.yaml
@@ -1 +1 @@
-unbalanced blackets: ][
+unbalanced brackets: ][
diff --git a/tests/mockmodules/skills/test_notebook.ipynb b/tests/mockmodules/skills/test_notebook.ipynb
new file mode 100644
--- /dev/null
+++ b/tests/mockmodules/skills/test_notebook.ipynb
@@ -0,0 +1,45 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from opsdroid.matchers import match_regex"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "@match_regex(r'ping')\n",
+ "async def ping(opsdroid, config, message):\n",
+ " await message.respond('pong')"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.6.5"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/tests/test_helper.py b/tests/test_helper.py
--- a/tests/test_helper.py
+++ b/tests/test_helper.py
@@ -4,7 +4,9 @@
import unittest
import unittest.mock as mock
-from opsdroid.helper import del_rw, move_config_to_appdir
+from opsdroid.helper import (
+ del_rw, move_config_to_appdir, file_is_ipython_notebook,
+ convert_ipynb_to_script)
class TestHelper(unittest.TestCase):
@@ -32,3 +34,16 @@ def test_move_config(self):
self.assertTrue(mock_mkdir.called)
self.assertTrue(logmock.called)
self.assertTrue(mock_remove.called)
+
+ def test_file_is_ipython_notebook(self):
+ self.assertTrue(file_is_ipython_notebook('test.ipynb'))
+ self.assertFalse(file_is_ipython_notebook('test.py'))
+
+ def test_convert_ipynb_to_script(self):
+ notebook_path = \
+ os.path.abspath("tests/mockmodules/skills/test_notebook.ipynb")
+
+ with tempfile.NamedTemporaryFile(
+ mode='w', delete=False) as output_file:
+ convert_ipynb_to_script(notebook_path, output_file.name)
+ self.assertTrue(os.path.getsize(output_file.name) > 0)
diff --git a/tests/test_loader.py b/tests/test_loader.py
--- a/tests/test_loader.py
+++ b/tests/test_loader.py
@@ -467,6 +467,21 @@ def test_install_local_module_file(self):
config["install_path"], "__init__.py")))
shutil.rmtree(config["install_path"], onerror=del_rw)
+ def test_install_local_module_notebook(self):
+ opsdroid, loader = self.setup()
+ config = {"name": "slack",
+ "type": "connector",
+ "install_path": os.path.join(
+ self._tmp_dir, "test_local_module_file"),
+ "path": os.path.abspath(
+ "tests/mockmodules/skills/test_notebook.ipynb")}
+ directory, _ = os.path.split(config["path"])
+ os.makedirs(directory, exist_ok=True, mode=0o777)
+ loader._install_local_module(config)
+ self.assertTrue(os.path.isfile(os.path.join(
+ config["install_path"], "__init__.py")))
+ shutil.rmtree(config["install_path"], onerror=del_rw)
+
def test_install_local_module_failure(self):
opsdroid, loader = self.setup()
config = {"name": "slack",
| Add support for IPython/Jupyter Notebooks
I've found myself using [Jupyter Notebooks](http://jupyter.org/) more and more and I think it would be a great way to write opsdroid skills.
The notebooks are a mixture of code, output from code (print statements, matploptlib plots, etc) and markdown. They are stored as a json blob of the cells with the file extension `.ipynb`.
There is a tool called [nbconvert](https://github.com/jupyter/nbconvert) which can convert these files into python scripts. We could wrap this in the opsdroid loader so a user could set a skill path to an `.ipynb` file and when it is loaded it will get converted into a python script and loaded like a regular single file skill.
We could also add some debugging tools which would allow us to test the skill from within the notebook.
| 2018-07-14T17:05:40 |
|
opsdroid/opsdroid | 577 | opsdroid__opsdroid-577 | [
"572"
] | 6cb4cd2e7bb926e9c662de5af1ef7e373312a5dc | diff --git a/opsdroid/helper.py b/opsdroid/helper.py
--- a/opsdroid/helper.py
+++ b/opsdroid/helper.py
@@ -111,3 +111,21 @@ def convert_ipynb_to_script(notebook_path, output_path):
script, _ = PythonExporter().from_notebook_node(notebook)
with open(output_path, 'w') as output_path_handle:
output_path_handle.write(script)
+
+
+def extract_gist_id(gist_string):
+ """Extract the gist ID from a url.
+
+ Will also work if simply passed an ID.
+
+ Args:
+ gist_string (str): Gist URL.
+
+ Returns:
+ string: The gist ID.
+
+ Examples:
+ gist_string : Gist url 'https://gist.github.com/{user}/{id}'.
+
+ """
+ return gist_string.split("/")[-1]
diff --git a/opsdroid/loader.py b/opsdroid/loader.py
--- a/opsdroid/loader.py
+++ b/opsdroid/loader.py
@@ -1,18 +1,23 @@
"""Class for loading in modules to OpsDroid."""
+import importlib
+import importlib.util
+import json
import logging
import os
-import sys
+import re
import shutil
import subprocess
-import importlib
-import importlib.util
-import re
+import sys
+import tempfile
+import urllib.request
from collections import Mapping
+
import yaml
+
from opsdroid.helper import (
move_config_to_appdir, file_is_ipython_notebook,
- convert_ipynb_to_script)
+ convert_ipynb_to_script, extract_gist_id)
from opsdroid.const import (
DEFAULT_GIT_URL, MODULES_DIRECTORY, DEFAULT_MODULES_PATH,
DEFAULT_MODULE_BRANCH, DEFAULT_CONFIG_PATH, EXAMPLE_CONFIG_FILE,
@@ -343,6 +348,8 @@ def _install_module(self, config):
if self._is_local_module(config):
self._install_local_module(config)
+ elif self._is_gist_module(config):
+ self._install_gist_module(config)
else:
self._install_git_module(config)
@@ -374,6 +381,10 @@ def _is_module_installed(config):
def _is_local_module(config):
return "path" in config
+ @staticmethod
+ def _is_gist_module(config):
+ return "gist" in config
+
def _install_module_dependencies(self, config):
if config.get('no-dep', False):
_LOGGER.debug(_("'no-dep' set in configuration, skipping the "
@@ -440,3 +451,33 @@ def _install_local_module(config):
if not installed:
_LOGGER.error("Failed to install from %s",
str(config["path"]))
+
+ def _install_gist_module(self, config):
+ gist_id = extract_gist_id(config['gist'])
+
+ # Get the content of the gist
+ req = urllib.request.Request(
+ "https://api.github.com/gists/{}".format(gist_id))
+ cont = json.loads(urllib.request.urlopen(req).read().decode('utf-8'))
+ python_files = [cont["files"][file] for file in cont["files"]
+ if '.ipynb' in cont["files"][file]["filename"]
+ or '.py' in cont["files"][file]["filename"]]
+
+ # We only support one skill file in a gist for now.
+ #
+ # TODO: Add support for mutliple files. Could be particularly
+ # useful for including a requirements.txt file.
+ skill_content = python_files[0]["content"]
+ extension = os.path.splitext(python_files[0]["filename"])[1]
+
+ with tempfile.NamedTemporaryFile('w',
+ delete=False,
+ suffix=extension) as skill_file:
+ skill_file.write(skill_content)
+ skill_file.flush()
+
+ # Set the path in the config
+ config["path"] = skill_file.name
+
+ # Run local install
+ self._install_local_module(config)
| diff --git a/tests/test_helper.py b/tests/test_helper.py
--- a/tests/test_helper.py
+++ b/tests/test_helper.py
@@ -6,7 +6,7 @@
from opsdroid.helper import (
del_rw, move_config_to_appdir, file_is_ipython_notebook,
- convert_ipynb_to_script)
+ convert_ipynb_to_script, extract_gist_id)
class TestHelper(unittest.TestCase):
@@ -47,3 +47,14 @@ def test_convert_ipynb_to_script(self):
mode='w', delete=False) as output_file:
convert_ipynb_to_script(notebook_path, output_file.name)
self.assertTrue(os.path.getsize(output_file.name) > 0)
+
+ def test_extract_gist_id(self):
+ self.assertEqual(
+ extract_gist_id(
+ "https://gist.github.com/jacobtomlinson/"
+ "c9852fa17d3463acc14dca1217d911f6"),
+ "c9852fa17d3463acc14dca1217d911f6")
+
+ self.assertEqual(
+ extract_gist_id("c9852fa17d3463acc14dca1217d911f6"),
+ "c9852fa17d3463acc14dca1217d911f6")
diff --git a/tests/test_loader.py b/tests/test_loader.py
--- a/tests/test_loader.py
+++ b/tests/test_loader.py
@@ -403,6 +403,19 @@ def test_install_specific_local_git_module(self):
config["branch"])
shutil.rmtree(repo_path, onerror=del_rw)
+ def test_install_gist_module(self):
+ opsdroid, loader = self.setup()
+ config = {"name": "ping",
+ "type": "skill",
+ "install_path": os.path.join(
+ self._tmp_dir, "test_gist_module_file"),
+ "gist": "https://gist.github.com/jacobtomlinson/"
+ "c9852fa17d3463acc14dca1217d911f6"}
+
+ with mock.patch.object(loader, '_install_gist_module') as mockgist:
+ loader._install_module(config)
+ self.assertTrue(mockgist.called)
+
def test_install_specific_local_path_module(self):
opsdroid, loader = self.setup()
repo_path = os.path.join(self._tmp_dir, "testrepo")
@@ -525,3 +538,29 @@ def test_update_existing_git_module(self):
mockpull.assert_called_with(config["install_path"])
shutil.rmtree(config["install_path"], onerror=del_rw)
+
+ def test_install_gist_module_file(self):
+ opsdroid, loader = self.setup()
+ config = {"name": "ping",
+ "type": "skill",
+ "install_path": os.path.join(
+ self._tmp_dir, "test_gist_module_file"),
+ "gist": "https://gist.github.com/jacobtomlinson/"
+ "6dd35e0f62d6b779d3d0d140f338d3e5"}
+ loader._install_gist_module(config)
+ self.assertTrue(os.path.isfile(os.path.join(
+ config["install_path"], "__init__.py")))
+ shutil.rmtree(config["install_path"], onerror=del_rw)
+
+ def test_install_gist_module_notebook(self):
+ opsdroid, loader = self.setup()
+ config = {"name": "ping",
+ "type": "skill",
+ "install_path": os.path.join(
+ self._tmp_dir, "test_gist_module_file"),
+ "gist": "https://gist.github.com/jacobtomlinson/"
+ "c9852fa17d3463acc14dca1217d911f6"}
+ loader._install_gist_module(config)
+ self.assertTrue(os.path.isfile(os.path.join(
+ config["install_path"], "__init__.py")))
+ shutil.rmtree(config["install_path"], onerror=del_rw)
| Add GitHub Gist support
We should add support for [GitHub Gists](https://gist.github.com/) as a remote skill source. This would make sense for single file skills and would be particularly useful when coupled with Jupyter Notebooks (#571). There is a plugin for Jupyter which publishes a notebook as a Gist, so you could then reference it in your opsdroid config to load it in.
| 2018-07-16T10:52:20 |
|
opsdroid/opsdroid | 589 | opsdroid__opsdroid-589 | [
"580"
] | 0536cc7e1eff3f267cd89026198c31b1827b1245 | diff --git a/opsdroid/loader.py b/opsdroid/loader.py
--- a/opsdroid/loader.py
+++ b/opsdroid/loader.py
@@ -226,7 +226,7 @@ def include_constructor(loader, node):
loader.construct_scalar(node))
with open(included_yaml, 'r') as included:
- return yaml.load(included)
+ return yaml.safe_load(included)
yaml.add_constructor('!envvar', envvar_constructor)
yaml.add_constructor('!include', include_constructor)
| diff --git a/tests/configs/exploit.yaml b/tests/configs/exploit.yaml
new file mode 100644
--- /dev/null
+++ b/tests/configs/exploit.yaml
@@ -0,0 +1 @@
+!!python/object/apply:os.system ["echo 'Oops!';"]
diff --git a/tests/configs/include_exploit.yaml b/tests/configs/include_exploit.yaml
new file mode 100644
--- /dev/null
+++ b/tests/configs/include_exploit.yaml
@@ -0,0 +1 @@
+!include exploit.yaml
\ No newline at end of file
diff --git a/tests/test_loader.py b/tests/test_loader.py
--- a/tests/test_loader.py
+++ b/tests/test_loader.py
@@ -50,6 +50,14 @@ def test_load_config_file_with_include(self):
self.assertIsNotNone(config)
self.assertEqual(config, config2)
+ def test_yaml_load_exploit(self):
+ opsdroid, loader = self.setup()
+ config = loader.load_config_file(
+ [os.path.abspath("tests/configs/include_exploit.yaml")])
+ self.assertIsNone(config)
+ # If the command in exploit.yaml is echoed it will return 0
+ self.assertNotEqual(config, 0)
+
def test_load_config_file_with_env_vars(self):
opsdroid, loader = self.setup()
os.environ["ENVVAR"] = 'test'
| Potential vulnerability with yaml.loader
# Description
I was listening to Talk Python to Me podcast - the one where they talk about Python vulnerabilities and things that we shouldn't be doing in Python. One of the things that they mention was using `yaml.load` instead of `yaml.safe_load`.
I know that when I wrote the bit of code so we can use `!include` I use yaml.loader so perhaps this should be changed to `yaml.safe_load`. One thing I didn't understand was if the `yaml.loader` is now set as the default safe_load or not. (Mostly due to [this issue](https://github.com/yaml/pyyaml/issues/5)).
I tried to swap `yaml.load` for `yaml.safe_load` but tox complained and both the environmental variables and the `!include` tests are failing.
Should I dig deeper and try to make the safe_load work?
Also, this article in hackernoon might be interesting for reference about this issue - [10 common security gotchas in Python and how to avoid them](https://hackernoon.com/10-common-security-gotchas-in-python-and-how-to-avoid-them-e19fbe265e03)
<!-- Love opsdroid? Please consider supporting our collective:
+๐ https://opencollective.com/opsdroid/donate -->
| Funnily enough I was looking into this yesterday!
The recent PRs have been failed due to there being an [open CVE for `pyyaml`](https://github.com/yaml/pyyaml/issues/207). From looking into it I think this vulnerability has been ackowledged for a while but when using the latest version of `pyyaml` with python 3.7 you may experience the vulnerability.
They are planning on releasing a new version of `pyyaml` which addresses this vulnerability. As this issue will affect all existing versions of opsdroid I think we are ok to go ahead and merge PRs which have failed due to this.
I totally agree that we should explore `yaml.safe_load`. If you have the time to dig deeper that would be great!
Yeah, I was a bit confused due to that PR that said it implements `safe_load` as default although the documentation still tells you to not use `load` and use `safe_load` instead. For some reason, the tests are failing I will look into it further and try to figure out why they are failing ๐
Getting #574 merged is the priority at the moment. If you could look into it that would be great. | 2018-07-27T12:43:28 |
opsdroid/opsdroid | 612 | opsdroid__opsdroid-612 | [
"609"
] | dc3f18097c3c913f0e785c2792d9bd8fcab71c79 | diff --git a/opsdroid/database.py b/opsdroid/database/__init__.py
similarity index 100%
rename from opsdroid/database.py
rename to opsdroid/database/__init__.py
| Databases fail to load under 0.12
<!-- Before you post an issue or if you are unsure about something join our gitter channel https://gitter.im/opsdroid/ and ask away! We are more than happy to help you. -->
# Description
When I configure opsdroid to use a database (tested with the matrix or mongo database) I get the following error:
```
Traceback (most recent call last):
File "/usr/local/lib/python3.6/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/local/lib/python3.6/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/usr/local/lib/python3.6/site-packages/opsdroid/__main__.py", line 201, in <module>
init()
File "/usr/local/lib/python3.6/site-packages/opsdroid/__main__.py", line 198, in init
main()
File "/usr/local/lib/python3.6/site-packages/click/core.py", line 722, in __call__
return self.main(*args, **kwargs)
File "/usr/local/lib/python3.6/site-packages/click/core.py", line 697, in main
rv = self.invoke(ctx)
File "/usr/local/lib/python3.6/site-packages/click/core.py", line 895, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/lib/python3.6/site-packages/click/core.py", line 535, in invoke
return callback(*args, **kwargs)
File "/usr/local/lib/python3.6/site-packages/opsdroid/__main__.py", line 192, in main
opsdroid.start_loop()
File "/usr/local/lib/python3.6/site-packages/opsdroid/core.py", line 131, in start_loop
self.loader.load_modules_from_config(self.config)
File "/usr/local/lib/python3.6/site-packages/opsdroid/loader.py", line 266, in load_modules_from_config
databases = self._load_modules('database', config['databases'])
File "/usr/local/lib/python3.6/site-packages/opsdroid/loader.py", line 311, in _load_modules
config["is_builtin"] = self.is_builtin_module(config)
File "/usr/local/lib/python3.6/site-packages/opsdroid/loader.py", line 94, in is_builtin_module
module_name=config["name"]
File "/usr/local/lib/python3.6/importlib/util.py", line 89, in find_spec
return _find_spec(fullname, parent.__path__)
AttributeError: module 'opsdroid.database' has no attribute '__path__'
```
## Steps to Reproduce
start opsdroid with a database config section.
## Expected Functionality
The database should load
## Experienced Functionality
opsdroid crashes
## Versions
- **Opsdroid version:** 0.12.1
- **Python version:** 3.6 (conda)
- **OS/Docker version:** arch inux
| Thanks for raising this!
I tried to work out why it was going wrong, but everything I did lead to new and even more incomprehensible errors lol | 2018-08-20T12:34:32 |
|
opsdroid/opsdroid | 615 | opsdroid__opsdroid-615 | [
"613"
] | 043630361f7fe9b67810ab6a91b270ac7e4a0da0 | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -72,6 +72,7 @@ def run(self):
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
'Topic :: Communications :: Chat',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Software Development :: Libraries :: Python Modules'
| Add Python 3.7 support
We need to update opsdroid to be fully supported in 3.7.
- [x] Test against Python 3.7.
- [x] Travis
- [x] AppVeyor
- [x] Fix any bugs highlighted.
- [x] Add 3.7 to supported versions in `setup.py`.
- [ ] ~Update docker base image to be latest supported version~.
| 2018-08-20T13:43:14 |
||
opsdroid/opsdroid | 653 | opsdroid__opsdroid-653 | [
"532",
"532"
] | 6bf72dac9c4f0317e3ecafdb26ab4c759c166662 | diff --git a/opsdroid/message.py b/opsdroid/message.py
--- a/opsdroid/message.py
+++ b/opsdroid/message.py
@@ -10,7 +10,31 @@
class Message:
# pylint: disable=too-few-public-methods
- """A message object."""
+ """A message object.
+
+ Stores messages in a format that allows OpsDroid to respond or react with
+ delays for thinking and typing as defined in configuration YAML file.
+
+ Args:
+ text: String text of message
+ user: String name of user sending message
+ room: String name of the room or chat channel in which message was sent
+ connector: Connector object used to interact with given chat service
+ raw_message: Raw message as provided by chat service. None by default
+
+ Attributes:
+ created: Local date and time that message object was created
+ text: Text of message as string
+ user: String name of user sending message
+ room: String name of the room or chat channel in which message was sent
+ connector: Connector object used to interact with given chat service
+ raw_message: Raw message provided by chat service
+ regex: A re match object for the regular expression message was matched
+ against
+ responded_to: Boolean initialized as False. True if message has been
+ responded to
+
+ """
def __init__(self, text, user, room, connector, raw_message=None):
"""Create object with minimum properties."""
@@ -24,7 +48,10 @@ def __init__(self, text, user, room, connector, raw_message=None):
self.responded_to = False
async def _thinking_delay(self):
- """Make opsdroid wait x-seconds before responding."""
+ """Make opsdroid wait x-seconds before responding.
+
+ Number of seconds defined in YAML config. file, accessed via connector.
+ """
seconds = self.connector.configuration.get('thinking-delay', 0)
if isinstance(seconds, list):
@@ -33,7 +60,11 @@ async def _thinking_delay(self):
await asyncio.sleep(seconds)
async def _typing_delay(self, text):
- """Simulate typing, takes an int or float to delay reply."""
+ """Delays reply to simulate typing.
+
+ Seconds to delay equals number of characters in response multiplied by
+ number of seconds defined in YAML config. file, accessed via connector.
+ """
seconds = self.connector.configuration.get('typing-delay', 0)
char_count = len(text)
@@ -43,7 +74,13 @@ async def _typing_delay(self, text):
await asyncio.sleep(char_count*seconds)
async def respond(self, text, room=None):
- """Respond to this message using the connector it was created by."""
+ """Respond to this message using the connector it was created by.
+
+ Creates copy of this message with updated text as response.
+ Delays message if thinking or typing delay present in config. file.
+ Updates responded_to attribute to True if False.
+ Logs response and response time in OpsDroid object stats.
+ """
opsdroid = get_opsdroid()
response = copy(self)
response.text = text
@@ -64,7 +101,17 @@ async def respond(self, text, room=None):
self.responded_to = True
async def react(self, emoji):
- """React to this message using the connector it was created by."""
+ """React to this message with emoji using the specified connector.
+
+ Delays message if thinking delay present in config. file.
+
+ Args:
+ emoji: Sting name of emoji with which OpsDroid will react.
+
+ Returns:
+ bool: True for message successfully sent. False otherwise.
+
+ """
if 'thinking-delay' in self.connector.configuration:
await self._thinking_delay()
return await self.connector.react(self, emoji)
| Add Google Style Docstrings
We should implement Google Style Docstrings to every function, method, class in opsdroid. This style will support existing documentation and will help in the future by generating documentation automatically.
This consists in a bit of effort so this issue can be worked by more than one contributor, just make sure that everyone knows what you are working on in order to avoid other contributors spending time on something that you are working on.
If you are unfamiliar with the Google Style Docstrings I'd recommend that you check these resources:
- [Sphix 1.8.0+ - Google Style Docstrings](https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html)
Docstrings that need to be updated:
- main.py
- [x] configure_lang
- [ ] configure_log
- [ ] get_logging_level
- [ ] check_dependencies
- [ ] print_version
- [ ] print_example_config
- [ ] edit_files
- [x] welcome_message
- ~~helper.py~~
- [x] get_opsdroid
- [x] del_rw
- [x] move_config_to_appdir
- memory.py
- [x] Memory
- [x] get
- [x] put
- [x] _get_from_database
- [x] _put_to_database
- message.py
- [x] Message
- [x] __init__
- [x] _thinking_delay
- [x] _typing delay
- [x] respond
- [x] react
- web.py
- [ ] Web
- [x] get_port
- [x] get_host
- [x] get_ssl_context
- [ ] start
- [ ] build_response
- [ ] web_index_handler
- [ ] web_stats_handler
- matchers.py
- [ ] match_regex
- [ ] match_apiai_action
- [ ] match_apiai_intent
- [ ] match_dialogflow_action
- [ ] match_dialogflow_intent
- [ ] match_luisai_intent
- [ ] match_rasanlu
- [ ] match_recastai
- [ ] match_witai
- [ ] match_crontab
- [ ] match_webhook
- [ ] match_always
- core.py
- [ ] OpsDroid
- [ ] default_connector
- [ ] exit
- [ ] critical
- [ ] call_stop
- [ ] disconnect
- [ ] stop
- [ ] load
- [ ] start_loop
- [x] setup_skills
- [ ] train_parsers
- [ ] start_connector_tasks
- [ ] start_database
- [ ] run_skill
- [ ] get_ranked_skills
- [ ] parse
- loader.py
- [ ] Loader
- [x] import_module_from_spec
- [x] import_module
- [x] check_cache
- [x] build_module_import_path
- [x] build_module_install_path
- [x] git_clone
- [x] git_pull
- [x] pip_install_deps
- [x] create_default_config
- [x] load_config_file
- [ ] envvar_constructor
- [ ] include_constructor
- [x] setup_modules_directory
- [x] load_modules_from_config
- [x] _load_modules
- [x] _install_module
- [x] _update_module
- [ ] _install_git_module
- [x] _install_local_module
---- ORIGINAL POST ----
I've been wondering about this for a while now and I would like to know if we should replace/update all the docstrings in opsdroid with the Google Style doc strings.
I think this could help new and old contributors to contribute and commit to opsdroid since the Google Style docstrings give more information about every method/function and specifies clearly what sort of input the function/method expects, what will it return and what will be raised (if applicable).
The downsize of this style is that the length of every .py file will increase due to the doc strings, but since most IDE's allow you to hide those fields it shouldn't be too bad.
Here is a good example of Google Style Doc strings: [Sphix 1.8.0+ - Google Style Docstrings](http://www.sphinx-doc.org/en/master/ext/example_google.html)
I would like to know what you all think about this idea and if its worth spending time on it.
Add Google Style Docstrings
We should implement Google Style Docstrings to every function, method, class in opsdroid. This style will support existing documentation and will help in the future by generating documentation automatically.
This consists in a bit of effort so this issue can be worked by more than one contributor, just make sure that everyone knows what you are working on in order to avoid other contributors spending time on something that you are working on.
If you are unfamiliar with the Google Style Docstrings I'd recommend that you check these resources:
- [Sphix 1.8.0+ - Google Style Docstrings](https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html)
Docstrings that need to be updated:
- main.py
- [x] configure_lang
- [ ] configure_log
- [ ] get_logging_level
- [ ] check_dependencies
- [ ] print_version
- [ ] print_example_config
- [ ] edit_files
- [x] welcome_message
- ~~helper.py~~
- [x] get_opsdroid
- [x] del_rw
- [x] move_config_to_appdir
- memory.py
- [x] Memory
- [x] get
- [x] put
- [x] _get_from_database
- [x] _put_to_database
- message.py
- [x] Message
- [x] __init__
- [x] _thinking_delay
- [x] _typing delay
- [x] respond
- [x] react
- web.py
- [ ] Web
- [x] get_port
- [x] get_host
- [x] get_ssl_context
- [ ] start
- [ ] build_response
- [ ] web_index_handler
- [ ] web_stats_handler
- matchers.py
- [ ] match_regex
- [ ] match_apiai_action
- [ ] match_apiai_intent
- [ ] match_dialogflow_action
- [ ] match_dialogflow_intent
- [ ] match_luisai_intent
- [ ] match_rasanlu
- [ ] match_recastai
- [ ] match_witai
- [ ] match_crontab
- [ ] match_webhook
- [ ] match_always
- core.py
- [ ] OpsDroid
- [ ] default_connector
- [ ] exit
- [ ] critical
- [ ] call_stop
- [ ] disconnect
- [ ] stop
- [ ] load
- [ ] start_loop
- [x] setup_skills
- [ ] train_parsers
- [ ] start_connector_tasks
- [ ] start_database
- [ ] run_skill
- [ ] get_ranked_skills
- [ ] parse
- loader.py
- [ ] Loader
- [x] import_module_from_spec
- [x] import_module
- [x] check_cache
- [x] build_module_import_path
- [x] build_module_install_path
- [x] git_clone
- [x] git_pull
- [x] pip_install_deps
- [x] create_default_config
- [x] load_config_file
- [ ] envvar_constructor
- [ ] include_constructor
- [x] setup_modules_directory
- [x] load_modules_from_config
- [x] _load_modules
- [x] _install_module
- [x] _update_module
- [ ] _install_git_module
- [x] _install_local_module
---- ORIGINAL POST ----
I've been wondering about this for a while now and I would like to know if we should replace/update all the docstrings in opsdroid with the Google Style doc strings.
I think this could help new and old contributors to contribute and commit to opsdroid since the Google Style docstrings give more information about every method/function and specifies clearly what sort of input the function/method expects, what will it return and what will be raised (if applicable).
The downsize of this style is that the length of every .py file will increase due to the doc strings, but since most IDE's allow you to hide those fields it shouldn't be too bad.
Here is a good example of Google Style Doc strings: [Sphix 1.8.0+ - Google Style Docstrings](http://www.sphinx-doc.org/en/master/ext/example_google.html)
I would like to know what you all think about this idea and if its worth spending time on it.
| Yes we should definitely do this!
It can also be really useful for automatically generating reference documentation.
Awesome I'll wait a few days to see if anyone is opposed to this idea or if they would like to give some advice/comment on the issue.
If in a few days no one says anything I'll edit this issue just to explain more in depth what we expect of the comments and how to do it - I'd recommend dividing opsdroid per each .py file so different people can contribute to the issue ๐
I like the idea. Is it possible to add a test that inspects the doc strings, and fails if they don't match the format? If so, would Jacob be happy with test coverage "reducing" in the short term as this test was added, but before all the doc strings complied?
On 29 April 2018 2:42:05 am AEST, "Fรกbio Rosado" <[email protected]> wrote:
>Awesome I'll wait a few days to see if anyone is opposed to this idea
>or if they would like to give some advice/comment on the issue.
>
>If in a few days no one says anything I'll edit this issue just to
>explain more in depth what we expect of the comments and how to do it -
>I'd recommend dividing opsdroid per each .py file so different people
>can contribute to the issue ๐
>
>--
>You are receiving this because you are subscribed to this thread.
>Reply to this email directly or view it on GitHub:
>https://github.com/opsdroid/opsdroid/issues/532#issuecomment-385189370
--
Sent from my Android device with K-9 Mail. Please excuse my brevity.
Yes I'm happy with that approach.
I've been thinking about it and I think there are two reasons why anyone would want to do this. The first is for autogenerating documentation, the second is making it easier for people to contribute.
As you said you are intending this to help people contribute, and I definitely agree. I just want to be clear on why we are doing this beyond it just being a thing that some projects do.
We currently run `pydocstyle` as part of the lint suite. I wonder if there is a way of telling it to enforce this docstring style?
I'm not sure if it's possible to enforce google doc style in the lint, but I know that you can run tests on the docstrings like @go8ose suggested, Sphinx has a command for this (it uses the doctest module), but this tests might provide some issues and headaches.
The doctests will use the string representation present in the docstring to run the tests, if the result is not consistent like... a function that deals with dates for example and uses date.now() this test will always fail.
Another example would be running doctests with dictionaries, these tests will mostly fail due to the unsorted nature of dicts, the only way to make them pass would be to sort the dict all the time.
One way to work around it would be to just test some docstrings and not others. In Sphinx you can just add the command:
```
..doctest::
>>> foo()
bar
```
Finally, I believe that all the tests that we have at the moment do a very good job at testing every single piece of code in opsdroid so perhaps adding the doctests would be extra effort for no real gain - these will test what it's being tested already.
--EDIT--
I've updated my first post with all the functions,classes and methods that need to be updated, let me know if you need some added in or removed ๐
Hi Fabio,
I'm not suggesting we add more tests in the form of doctests. That would indeed be a waste of effort. I'm suggesting we check conformance with the google style doc strings.
Jacob suggested seeing if this can be checked in the linting run. That is a good idea, linting is what I should have suggested initially.
Cheers,
Geoff
On 30 April 2018 5:51:38 pm AEST, "Fรกbio Rosado" <[email protected]> wrote:
>I'm not sure if it's possible to enforce google doc style in the lint,
>but I know that you can run tests on the docstrings like @go8ose
>suggested, Sphinx has a command for this (it uses the doctest module),
>but this tests might provide some issues and headaches.
>
>The doctests will use the string representation present in the
>docstring to run the tests, if the result is not consistent like... a
>function that deals with dates for example and uses date.now() this
>test will always fail.
>Another example would be running doctests with dictionaries, these
>tests will mostly fail due to the unsorted nature of dicts, the only
>way to make them pass would be to sort the dict all the time.
>
>One way to work around it would be to just test some docstrings and not
>others. In Sphinx you can just add the command:
>
>``` ..doctest::
>>>> foo()
>bar
>```
>
>Finally, I believe that all the tests that we have at the moment do a
>very good job at testing every single piece of code in opsdroid so
>perhaps adding the doctests would be extra effort for no real gain -
>these will test what it's being tested already.
>
>--
>You are receiving this because you were mentioned.
>Reply to this email directly or view it on GitHub:
>https://github.com/opsdroid/opsdroid/issues/532#issuecomment-385332374
--
Sent from my Android device with K-9 Mail. Please excuse my brevity.
This tool looks like it tests the docstrings against the google convention. We should explore it more.
https://github.com/terrencepreilly/darglint
Hi Fabio,
I like your idea, i new here on this project i can try to do something and make a pull request.
For my part i'm going to begin with **helper.py** it's Thk ok for you ?
Thk best regards
Heya @sims34 yeah that would be much appreciated, let me know in gitter if you need any help with this ๐
Hi Fabio,
I am new here on this project. I was hoping I could help out with main.py
Regards
Hey @purvaudai, please go ahead!
Hello @purvaudai did you manage to work on `main.py`? If you are stuck with something let us know, we would be happy to help you getting started ๐
Hi Guys is anyone working on this issue
@mraza007 not currently. Please go ahead.
Sure I will start working on this issue. Is there a way that you can assign this issue to me and on what files do I have to add google style doc string functions
Sorry for the lack of replies from my side. I tried solving this issue but
got confused, so I decided to look through the docs again, and I got
carried away learning. I am sorry for the lack of professionalism from my
side.
On Mon, 25 Jun 2018 at 19:33, Muhammad <[email protected]> wrote:
> Sure I will start working on this issue. Is there a way that you can
> assign this issue to me and on what files do I have to add google style doc
> string functions
>
> โ
> You are receiving this because you were mentioned.
> Reply to this email directly, view it on GitHub
> <https://github.com/opsdroid/opsdroid/issues/532#issuecomment-399963131>,
> or mute the thread
> <https://github.com/notifications/unsubscribe-auth/AeXNt1F1hUU9JhsW2bl75KQG7SRQEceRks5uAO2_gaJpZM4TqkMs>
> .
>
--
Purva Udai Singh
@purvaudai Hey are working on this issue do you want to continue
Hi guys I'd love to contribute too.
@mraza007 @purvaudai I know you guys are working on this, but if you need my help with any of the file, I'll be more than happy to contribute.
@mraza007 @purvaudai @NikhilRaverkar Thanks for all wanting to contribute! There is a lot to be done on this issue so I strongly recommend picking a file and starting work on it.
Don't worry too much about duplicates, it's unlikely to happen given the number of methods that need updating. I would also be happy for you to submit lots of small PRs. Just pick a few methods, update the docstrings and raise a PR.
Yup I think that would be a great idea
@purvaudai Don't worry my friend sometimes these things happen, if you want to contribute to opsdroid in the future we will be glad to help you ๐
@mraza007 @NikhilRaverkar If you guys need any help, let us know. You can work on different files if you want and if something is not clear feel free to hit us up either in here or our [gitter channel](https://gitter.im/opsdroid/developers)
Sure I just joined the channel I will start working on this over the weekend
Iโll start working on this over the weekend.
Thanks a lot,
Nikhil
> On Jun 28, 2018, at 10:44 AM, Muhammad <[email protected]> wrote:
>
> Sure I just joined the channel I will start working on this over the week
>
> โ
> You are receiving this because you were mentioned.
> Reply to this email directly, view it on GitHub, or mute the thread.
I would like to contribute to this. For starters I was thinking about taking the web.py file. :smile:
Hello, please go ahead and let us know if you need any help
@FabioRosado Can I grab message.py? This would be my first issue!
@archime please go ahead! Let me know if you need any help with this issue. Also, welcome to the project ๐
Yes we should definitely do this!
It can also be really useful for automatically generating reference documentation.
Awesome I'll wait a few days to see if anyone is opposed to this idea or if they would like to give some advice/comment on the issue.
If in a few days no one says anything I'll edit this issue just to explain more in depth what we expect of the comments and how to do it - I'd recommend dividing opsdroid per each .py file so different people can contribute to the issue ๐
I like the idea. Is it possible to add a test that inspects the doc strings, and fails if they don't match the format? If so, would Jacob be happy with test coverage "reducing" in the short term as this test was added, but before all the doc strings complied?
On 29 April 2018 2:42:05 am AEST, "Fรกbio Rosado" <[email protected]> wrote:
>Awesome I'll wait a few days to see if anyone is opposed to this idea
>or if they would like to give some advice/comment on the issue.
>
>If in a few days no one says anything I'll edit this issue just to
>explain more in depth what we expect of the comments and how to do it -
>I'd recommend dividing opsdroid per each .py file so different people
>can contribute to the issue ๐
>
>--
>You are receiving this because you are subscribed to this thread.
>Reply to this email directly or view it on GitHub:
>https://github.com/opsdroid/opsdroid/issues/532#issuecomment-385189370
--
Sent from my Android device with K-9 Mail. Please excuse my brevity.
Yes I'm happy with that approach.
I've been thinking about it and I think there are two reasons why anyone would want to do this. The first is for autogenerating documentation, the second is making it easier for people to contribute.
As you said you are intending this to help people contribute, and I definitely agree. I just want to be clear on why we are doing this beyond it just being a thing that some projects do.
We currently run `pydocstyle` as part of the lint suite. I wonder if there is a way of telling it to enforce this docstring style?
I'm not sure if it's possible to enforce google doc style in the lint, but I know that you can run tests on the docstrings like @go8ose suggested, Sphinx has a command for this (it uses the doctest module), but this tests might provide some issues and headaches.
The doctests will use the string representation present in the docstring to run the tests, if the result is not consistent like... a function that deals with dates for example and uses date.now() this test will always fail.
Another example would be running doctests with dictionaries, these tests will mostly fail due to the unsorted nature of dicts, the only way to make them pass would be to sort the dict all the time.
One way to work around it would be to just test some docstrings and not others. In Sphinx you can just add the command:
```
..doctest::
>>> foo()
bar
```
Finally, I believe that all the tests that we have at the moment do a very good job at testing every single piece of code in opsdroid so perhaps adding the doctests would be extra effort for no real gain - these will test what it's being tested already.
--EDIT--
I've updated my first post with all the functions,classes and methods that need to be updated, let me know if you need some added in or removed ๐
Hi Fabio,
I'm not suggesting we add more tests in the form of doctests. That would indeed be a waste of effort. I'm suggesting we check conformance with the google style doc strings.
Jacob suggested seeing if this can be checked in the linting run. That is a good idea, linting is what I should have suggested initially.
Cheers,
Geoff
On 30 April 2018 5:51:38 pm AEST, "Fรกbio Rosado" <[email protected]> wrote:
>I'm not sure if it's possible to enforce google doc style in the lint,
>but I know that you can run tests on the docstrings like @go8ose
>suggested, Sphinx has a command for this (it uses the doctest module),
>but this tests might provide some issues and headaches.
>
>The doctests will use the string representation present in the
>docstring to run the tests, if the result is not consistent like... a
>function that deals with dates for example and uses date.now() this
>test will always fail.
>Another example would be running doctests with dictionaries, these
>tests will mostly fail due to the unsorted nature of dicts, the only
>way to make them pass would be to sort the dict all the time.
>
>One way to work around it would be to just test some docstrings and not
>others. In Sphinx you can just add the command:
>
>``` ..doctest::
>>>> foo()
>bar
>```
>
>Finally, I believe that all the tests that we have at the moment do a
>very good job at testing every single piece of code in opsdroid so
>perhaps adding the doctests would be extra effort for no real gain -
>these will test what it's being tested already.
>
>--
>You are receiving this because you were mentioned.
>Reply to this email directly or view it on GitHub:
>https://github.com/opsdroid/opsdroid/issues/532#issuecomment-385332374
--
Sent from my Android device with K-9 Mail. Please excuse my brevity.
This tool looks like it tests the docstrings against the google convention. We should explore it more.
https://github.com/terrencepreilly/darglint
Hi Fabio,
I like your idea, i new here on this project i can try to do something and make a pull request.
For my part i'm going to begin with **helper.py** it's Thk ok for you ?
Thk best regards
Heya @sims34 yeah that would be much appreciated, let me know in gitter if you need any help with this ๐
Hi Fabio,
I am new here on this project. I was hoping I could help out with main.py
Regards
Hey @purvaudai, please go ahead!
Hello @purvaudai did you manage to work on `main.py`? If you are stuck with something let us know, we would be happy to help you getting started ๐
Hi Guys is anyone working on this issue
@mraza007 not currently. Please go ahead.
Sure I will start working on this issue. Is there a way that you can assign this issue to me and on what files do I have to add google style doc string functions
Sorry for the lack of replies from my side. I tried solving this issue but
got confused, so I decided to look through the docs again, and I got
carried away learning. I am sorry for the lack of professionalism from my
side.
On Mon, 25 Jun 2018 at 19:33, Muhammad <[email protected]> wrote:
> Sure I will start working on this issue. Is there a way that you can
> assign this issue to me and on what files do I have to add google style doc
> string functions
>
> โ
> You are receiving this because you were mentioned.
> Reply to this email directly, view it on GitHub
> <https://github.com/opsdroid/opsdroid/issues/532#issuecomment-399963131>,
> or mute the thread
> <https://github.com/notifications/unsubscribe-auth/AeXNt1F1hUU9JhsW2bl75KQG7SRQEceRks5uAO2_gaJpZM4TqkMs>
> .
>
--
Purva Udai Singh
@purvaudai Hey are working on this issue do you want to continue
Hi guys I'd love to contribute too.
@mraza007 @purvaudai I know you guys are working on this, but if you need my help with any of the file, I'll be more than happy to contribute.
@mraza007 @purvaudai @NikhilRaverkar Thanks for all wanting to contribute! There is a lot to be done on this issue so I strongly recommend picking a file and starting work on it.
Don't worry too much about duplicates, it's unlikely to happen given the number of methods that need updating. I would also be happy for you to submit lots of small PRs. Just pick a few methods, update the docstrings and raise a PR.
Yup I think that would be a great idea
@purvaudai Don't worry my friend sometimes these things happen, if you want to contribute to opsdroid in the future we will be glad to help you ๐
@mraza007 @NikhilRaverkar If you guys need any help, let us know. You can work on different files if you want and if something is not clear feel free to hit us up either in here or our [gitter channel](https://gitter.im/opsdroid/developers)
Sure I just joined the channel I will start working on this over the weekend
Iโll start working on this over the weekend.
Thanks a lot,
Nikhil
> On Jun 28, 2018, at 10:44 AM, Muhammad <[email protected]> wrote:
>
> Sure I just joined the channel I will start working on this over the week
>
> โ
> You are receiving this because you were mentioned.
> Reply to this email directly, view it on GitHub, or mute the thread.
I would like to contribute to this. For starters I was thinking about taking the web.py file. :smile:
Hello, please go ahead and let us know if you need any help
@FabioRosado Can I grab message.py? This would be my first issue!
@archime please go ahead! Let me know if you need any help with this issue. Also, welcome to the project ๐ | 2018-09-23T17:01:00 |
|
opsdroid/opsdroid | 679 | opsdroid__opsdroid-679 | [
"671"
] | 16d3a9e8ab128b97625888be29c5a7319fb7886f | diff --git a/opsdroid/database/sqlite/__init__.py b/opsdroid/database/sqlite/__init__.py
new file mode 100644
--- /dev/null
+++ b/opsdroid/database/sqlite/__init__.py
@@ -0,0 +1,236 @@
+"""A module for sqlite database."""
+import os
+import logging
+import json
+import datetime
+import aiosqlite
+
+from opsdroid.const import DEFAULT_ROOT_PATH
+from opsdroid.database import Database
+
+_LOGGER = logging.getLogger(__name__)
+
+# pylint: disable=too-few-public-methods
+# As the current module needs only one public method to register json types
+
+
+class DatabaseSqlite(Database):
+ """A sqlite database class.
+
+ SQLite Database class used to persist data in sqlite.
+
+ """
+
+ def __init__(self, config):
+ """Initialise the sqlite database.
+
+ Set basic properties of the database. Initialise properties like
+ name, connection arguments, database file, table name and config.
+
+ Args:
+ config (dict): The configuration of the database which consists
+ of `file` and `table` name of the sqlite database
+ specified in `configuration.yaml` file.
+
+ """
+ super().__init__(config)
+ self.name = "sqlite"
+ self.config = config
+ self.conn_args = {'isolation_level': None}
+ self.db_file = None
+ self.table = None
+ _LOGGER.debug(_("Loaded sqlite database connector"))
+
+ async def connect(self, opsdroid):
+ """Connect to the database.
+
+ This method will connect to the sqlite database. It will create
+ the database file named `sqlite.db` in DEFAULT_ROOT_PATH and set
+ the table name to `opsdroid`. It will create the table if it does
+ not exist in the database.
+
+ Args:
+ opsdroid (OpsDroid): An instance of opsdroid core.
+
+ """
+ self.db_file = self.config.get(
+ "file", os.path.join(DEFAULT_ROOT_PATH, "sqlite.db"))
+ self.table = self.config.get("table", "opsdroid")
+
+ async with aiosqlite.connect(self.db_file, **self.conn_args) as _db:
+ await _db.execute(
+ "CREATE TABLE IF NOT EXISTS {}"
+ "(key text PRIMARY KEY, data text)"
+ .format(self.table)
+ )
+
+ self.client = _db
+ _LOGGER.info(_("Connected to sqlite %s"), self.db_file)
+
+ async def put(self, key, data):
+ """Put data into the database.
+
+ This method will insert or replace an object into the database for
+ a given key. The data object is serialised into JSON data using the
+ JSONEncoder class.
+
+ Args:
+ key (string): The key to store the data object under.
+ data (object): The data object to store.
+
+ """
+ _LOGGER.debug(_("Putting %s into sqlite"), key)
+ json_data = json.dumps(data, cls=JSONEncoder)
+
+ async with aiosqlite.connect(self.db_file, **self.conn_args) as _db:
+ cur = await _db.cursor()
+ await cur.execute(
+ "DELETE FROM {} WHERE key=?".format(self.table), (key,))
+ await cur.execute(
+ "INSERT INTO {} VALUES (?, ?)".format(self.table),
+ (key, json_data))
+
+ self.client = _db
+
+ async def get(self, key):
+ """Get data from the database for a given key.
+
+ Args:
+ key (string): The key to lookup in the database.
+
+ Returns:
+ object or None: The data object stored for that key, or None if no
+ object found for that key.
+
+ """
+ _LOGGER.debug(_("Getting %s from sqlite"), key)
+ data = None
+
+ async with aiosqlite.connect(self.db_file, **self.conn_args) as _db:
+ cur = await _db.cursor()
+ await cur.execute(
+ "SELECT data FROM {} WHERE key=?".format(self.table),
+ (key,))
+ row = await cur.fetchone()
+ if row:
+ data = json.loads(row[0], object_hook=JSONDecoder())
+
+ self.client = _db
+ return data
+
+
+class JSONEncoder(json.JSONEncoder):
+ """A extended JSONEncoder class.
+
+ This class is customised JSONEncoder class which helps to convert
+ dict to JSON. The datetime objects are converted to dict with fields
+ as keys.
+
+ """
+
+ # pylint: disable=method-hidden
+ # See https://github.com/PyCQA/pylint/issues/414 for reference
+
+ serializers = {}
+
+ def default(self, o):
+ """Convert the given datetime object to dict.
+
+ Args:
+ o (object): The datetime object to be marshalled.
+
+ Returns:
+ dict (object): A dict with datatime object data.
+
+ Example:
+ A dict which is returned after marshalling::
+
+ {
+ "__class__": "datetime",
+ "year": 2018,
+ "month": 10,
+ "day": 2,
+ "hour": 0,
+ "minute": 41,
+ "second": 17,
+ "microsecond": 74644
+ }
+
+ """
+ marshaller = self.serializers.get(
+ type(o), super(JSONEncoder, self).default)
+ return marshaller(o)
+
+
+class JSONDecoder():
+ """A JSONDecoder class.
+
+ This class will convert dict containing datetime values
+ to datetime objects.
+
+ """
+
+ decoders = {}
+
+ def __call__(self, dct):
+ """Convert given dict to datetime objects.
+
+ Args:
+ dct (object): A dict containing datetime values and class type.
+
+ Returns:
+ object or dct: The datetime object for given dct, or dct if
+ respective class decoder is not found.
+
+ Example:
+ A datetime object returned after decoding::
+
+ datetime.datetime(2018, 10, 2, 0, 41, 17, 74644)
+
+ """
+ if dct.get('__class__') in self.decoders:
+ return self.decoders[dct['__class__']](dct)
+ return dct
+
+
+def register_json_type(type_cls, fields, decode_fn):
+ """Register JSON types.
+
+ This method will register the serializers and decoders for the
+ JSONEncoder and JSONDecoder classes respectively.
+
+ Args:
+ type_cls (object): A datetime object.
+ fields (list): List of fields used to store data in dict.
+ decode_fn (object): A lambda function object for decoding.
+
+ """
+ type_name = type_cls.__name__
+ JSONEncoder.serializers[type_cls] = lambda obj: dict(
+ __class__=type_name,
+ **{field: getattr(obj, field) for field in fields}
+ )
+ JSONDecoder.decoders[type_name] = decode_fn
+
+
+register_json_type(
+ datetime.datetime,
+ ['year', 'month', 'day', 'hour', 'minute', 'second', 'microsecond'],
+ lambda dct: datetime.datetime(
+ dct['year'], dct['month'], dct['day'],
+ dct['hour'], dct['minute'], dct['second'], dct['microsecond']
+ )
+)
+
+register_json_type(
+ datetime.date,
+ ['year', 'month', 'day'],
+ lambda dct: datetime.date(dct['year'], dct['month'], dct['day'])
+)
+
+register_json_type(
+ datetime.time,
+ ['hour', 'minute', 'second', 'microsecond'],
+ lambda dct: datetime.time(
+ dct['hour'], dct['minute'], dct['second'], dct['microsecond'])
+)
| diff --git a/tests/test_database_sqlite.py b/tests/test_database_sqlite.py
new file mode 100644
--- /dev/null
+++ b/tests/test_database_sqlite.py
@@ -0,0 +1,153 @@
+"""Tests for the DatabaseSqlite class."""
+import asyncio
+import json
+import datetime
+
+import unittest
+import unittest.mock as mock
+import asynctest
+import asynctest.mock as amock
+
+from opsdroid.database.sqlite import DatabaseSqlite, JSONEncoder, JSONDecoder
+from opsdroid.database.sqlite import register_json_type
+
+
+class TestDatabaseSqlite(unittest.TestCase):
+ """A database test class.
+
+ Test the opsdroid sqlite database class.
+
+ """
+
+ def setUp(self):
+ self.loop = asyncio.new_event_loop()
+
+ def test_init(self):
+ """Test initialisation of database class.
+
+ This method will test the initialisation of the database
+ class. It will assert if the database class properties are
+ declared and equated to None.
+
+ """
+ database = DatabaseSqlite({"file": "sqlite.db"})
+ self.assertEqual(None, database.client)
+ self.assertEqual(None, database.database)
+ self.assertEqual(None, database.db_file)
+ self.assertEqual(None, database.table)
+ self.assertEqual({'isolation_level': None}, database.conn_args)
+
+
+class TestDatabaseSqliteAsync(asynctest.TestCase):
+ """A async database test class.
+
+ Test the async methods of the opsdroid sqlite database class.
+
+ """
+
+ async def test_connect(self):
+ """Test database connection.
+
+ This method will test the database connection of sqlite database.
+ As the database is created `opsdroid` table is created first.
+
+ """
+ database = DatabaseSqlite({"file": "sqlite.db"})
+ opsdroid = amock.CoroutineMock()
+ opsdroid.eventloop = self.loop
+
+ try:
+ await database.connect(opsdroid)
+ except NotImplementedError:
+ raise Exception
+ else:
+ self.assertEqual("opsdroid", database.table)
+ self.assertEqual("Connection", type(database.client).__name__)
+
+ async def test_get_and_put(self):
+ """Test get and put functions of database
+
+ This method will test the get and put functions which help to read
+ and write data from the database. The function `put` a value with
+ key and asserts the same value after the `get` operation is completed.
+
+ """
+ database = DatabaseSqlite({"file": "sqlite.db"})
+ opsdroid = amock.CoroutineMock()
+ opsdroid.eventloop = self.loop
+
+ try:
+ await database.connect(opsdroid)
+ await database.put("hello", {})
+ data = await database.get("hello")
+ except NotImplementedError:
+ raise Exception
+ else:
+ self.assertEqual("opsdroid", database.table)
+ self.assertEqual({}, data)
+ self.assertEqual("Connection", type(database.client).__name__)
+
+
+class TestJSONEncoder(unittest.TestCase):
+ """A JSON Encoder test class.
+
+ Test the custom json encoder class.
+
+ """
+
+ def setUp(self):
+ self.loop = asyncio.new_event_loop()
+
+ def test_datetime_to_dict(self):
+ """Test default of json encoder class.
+
+ This method will test the conversion of the datetime
+ object to dict.
+
+ """
+ type_cls = datetime.datetime
+ test_obj = datetime.datetime(2018, 10, 2, 0, 41, 17, 74644)
+ encoder = JSONEncoder()
+ obj = encoder.default(o=test_obj)
+ self.assertEqual({
+ "__class__": type_cls.__name__,
+ "year": 2018,
+ "month": 10,
+ "day": 2,
+ "hour": 0,
+ "minute": 41,
+ "second": 17,
+ "microsecond": 74644
+ }, obj)
+
+
+class TestJSONDecoder(unittest.TestCase):
+ """A JSON Decoder test class.
+
+ Test the custom json decoder class.
+
+ """
+
+ def setUp(self):
+ self.loop = asyncio.new_event_loop()
+
+ def test_dict_to_datetime(self):
+ """Test call of json decoder class.
+
+ This method will test the conversion of the dict to
+ datetime object.
+
+ """
+ test_obj = {
+ "__class__": datetime.datetime.__name__,
+ "year": 2018,
+ "month": 10,
+ "day": 2,
+ "hour": 0,
+ "minute": 41,
+ "second": 17,
+ "microsecond": 74644
+ }
+ decoder = JSONDecoder()
+ obj = decoder(test_obj)
+ self.assertEqual(datetime.datetime(2018, 10, 2, 0, 41, 17, 74644), obj)
| Move the sqlite database module into core
This issue covers adding the [sqlite database module](https://github.com/opsdroid/database-sqlite) to core.
## Background
A while ago we began moving connectors and databases from external plugins into the core of the project (see #185 for more context). We started with [slack](https://github.com/opsdroid/opsdroid/blob/master/opsdroid/connector/slack/__init__.py) and [websockets](https://github.com/opsdroid/opsdroid/blob/master/opsdroid/connector/websocket/__init__.py) connectors but need to go through all the other existing plugins and move them into the core.
## Steps
- Make a new submodule directory in [`opsdroid.database`](https://github.com/opsdroid/opsdroid/tree/master/opsdroid/database) and copy the database code over.
- Update the [`requirements.txt`](https://github.com/opsdroid/opsdroid/blob/master/requirements.txt) with any dependencies from the database if necessary.
- Write tests for the database. (See the [Slack connector tests](https://github.com/jacobtomlinson/opsdroid/blob/master/tests/test_connector_slack.py) for inspiration).
- Copy the relevant information from the database `README.md` into a [new documentation page](https://github.com/opsdroid/opsdroid/tree/master/docs/connectors).
- Add the new page to the [mkdocs.yml](https://github.com/opsdroid/opsdroid/blob/master/mkdocs.yml).
- Add to the [list of databases](https://github.com/opsdroid/opsdroid/blob/master/docs/configuration-reference.md#database-modules).
- Add a deprecation notice to the old database module. (See [the slack connector](https://github.com/opsdroid/connector-slack))
| Hi @jacobtomlinson can i take this issue ?
Hello @vshelke please go ahead and work on this issue, let us know if you encounter any problems or if you need any help ๐ | 2018-10-01T16:59:19 |
opsdroid/opsdroid | 684 | opsdroid__opsdroid-684 | [
"668"
] | ca6685e9634ffa6ce5a483f3c159599ee78d5e34 | diff --git a/opsdroid/connector/facebook/__init__.py b/opsdroid/connector/facebook/__init__.py
new file mode 100644
--- /dev/null
+++ b/opsdroid/connector/facebook/__init__.py
@@ -0,0 +1,129 @@
+"""A connector for Facebook Messenger."""
+import json
+import logging
+
+import aiohttp
+
+from opsdroid.connector import Connector
+from opsdroid.message import Message
+
+
+_LOGGER = logging.getLogger(__name__)
+_FACEBOOK_SEND_URL = "https://graph.facebook.com/v2.6/me/messages" \
+ "?access_token={}"
+
+
+class ConnectorFacebook(Connector):
+ """A connector for Facebook Messenger.
+
+ It handles the incoming messages from facebook messenger and sends the user
+ messages. It also handles the authentication challenge by verifying the
+ token.
+
+ Attributes:
+ config: The config for this connector specified in the
+ `configuration.yaml` file.
+ name: String name of the connector.
+ opsdroid: opsdroid instance.
+ default_room: String name of default room for chat messages.
+ bot_name: String name for bot.
+
+ """
+
+ def __init__(self, config):
+ """Connector Setup."""
+ super().__init__(config)
+ _LOGGER.debug("Starting facebook connector")
+ self.config = config
+ self.name = self.config.get("name", "facebook")
+ self.opsdroid = None
+ self.default_room = None
+ self.bot_name = config.get("bot-name", 'opsdroid')
+
+ async def connect(self, opsdroid):
+ """Connect to the chat service."""
+ self.opsdroid = opsdroid
+
+ self.opsdroid.web_server.web_app.router.add_post(
+ "/connector/{}".format(self.name),
+ self.facebook_message_handler)
+
+ self.opsdroid.web_server.web_app.router.add_get(
+ "/connector/{}".format(self.name),
+ self.facebook_challenge_handler)
+
+ async def facebook_message_handler(self, request):
+ """Handle incoming message.
+
+ For each entry in request, it will check if the entry is a `messaging`
+ type. Then it will process all the incoming messages.
+
+ Return:
+ A 200 OK response. The Messenger Platform will resend the webhook
+ event every 20 seconds, until a 200 OK response is received.
+ Failing to return a 200 OK may cause your webhook to be
+ unsubscribed by the Messenger Platform.
+
+ """
+ req_data = await request.json()
+
+ if "object" in req_data and req_data["object"] == "page":
+ for entry in req_data["entry"]:
+ for fb_msg in entry["messaging"]:
+ _LOGGER.debug(fb_msg)
+ try:
+ message = Message(fb_msg["message"]["text"],
+ fb_msg["sender"]["id"],
+ fb_msg["sender"]["id"],
+ self)
+ await self.opsdroid.parse(message)
+ except KeyError as error:
+ _LOGGER.error(error)
+
+ return aiohttp.web.Response(
+ text=json.dumps("Received"), status=200)
+
+ async def facebook_challenge_handler(self, request):
+ """Handle auth challenge.
+
+ Return:
+ A response if challenge is a success or failure.
+
+ """
+ _LOGGER.debug(request.query)
+ if request.query["hub.verify_token"] == \
+ self.config.get('verify-token'):
+ return aiohttp.web.Response(
+ text=request.query["hub.challenge"], status=200)
+ return aiohttp.web.Response(
+ text=json.dumps("Bad verify token"), status=403)
+
+ async def listen(self, opsdroid):
+ """Listen for and parse new messages."""
+ pass # Listening is handled by the aiohttp web server
+
+ async def respond(self, message, room=None):
+ """Respond with a message."""
+ _LOGGER.debug("Responding to facebook")
+ url = _FACEBOOK_SEND_URL.format(self.config.get('page-access-token'))
+ headers = {'content-type': 'application/json'}
+ payload = {
+ "recipient": {
+ "id": message.room
+ },
+ "message": {
+ "text": message.text
+ }
+ }
+ async with aiohttp.ClientSession() as session:
+ resp = await session.post(
+ url,
+ data=json.dumps(payload),
+ headers=headers
+ )
+ if resp.status < 300:
+ _LOGGER.info("Responded with: %s", message.text)
+ else:
+ _LOGGER.debug(resp.status)
+ _LOGGER.debug(await resp.text())
+ _LOGGER.error("Unable to respond to facebook")
| diff --git a/tests/test_connector_facebook.py b/tests/test_connector_facebook.py
new file mode 100644
--- /dev/null
+++ b/tests/test_connector_facebook.py
@@ -0,0 +1,170 @@
+import unittest
+import asyncio
+
+import asynctest
+import asynctest.mock as amock
+
+from opsdroid.core import OpsDroid
+from opsdroid.connector.facebook import ConnectorFacebook
+from opsdroid.message import Message
+
+
+class TestConnectorFacebook(unittest.TestCase):
+ """Test the opsdroid Facebook connector class."""
+
+ def setUp(self):
+ self.loop = asyncio.new_event_loop()
+
+ def test_init(self):
+ connector = ConnectorFacebook({})
+ self.assertEqual(None, connector.default_room)
+ self.assertEqual("facebook", connector.name)
+
+ def test_property(self):
+ connector = ConnectorFacebook({})
+ self.assertEqual("facebook", connector.name)
+
+
+class TestConnectorFacebookAsync(asynctest.TestCase):
+ """Test the async methods of the opsdroid Facebook connector class."""
+
+ async def test_connect(self):
+ """Test the connect method adds the handlers."""
+ connector = ConnectorFacebook({})
+ opsdroid = amock.CoroutineMock()
+ opsdroid.web_server = amock.CoroutineMock()
+ opsdroid.web_server.web_app = amock.CoroutineMock()
+ opsdroid.web_server.web_app.router = amock.CoroutineMock()
+ opsdroid.web_server.web_app.router.add_get = amock.CoroutineMock()
+ opsdroid.web_server.web_app.router.add_post = amock.CoroutineMock()
+
+ await connector.connect(opsdroid)
+
+ self.assertTrue(opsdroid.web_server.web_app.router.add_get.called)
+ self.assertTrue(opsdroid.web_server.web_app.router.add_post.called)
+
+ async def test_facebook_message_handler(self):
+ """Test the new facebook message handler."""
+ import aiohttp
+ connector = ConnectorFacebook({})
+ req_ob = {
+ "object": "page",
+ "entry": [{
+ "messaging": [{
+ "message": {"text": "Hello"},
+ "sender": {"id": '1234567890'}
+ }]
+ }]
+ }
+ mock_request = amock.CoroutineMock()
+ mock_request.json = amock.CoroutineMock()
+ mock_request.json.return_value = req_ob
+
+ with OpsDroid() as opsdroid:
+ connector.opsdroid = opsdroid
+ connector.opsdroid.parse = amock.CoroutineMock()
+
+ response = await connector.facebook_message_handler(mock_request)
+ self.assertTrue(connector.opsdroid.parse.called)
+ self.assertEqual(type(response), aiohttp.web.Response)
+ self.assertEqual(response.status, 200)
+
+ async def test_facebook_message_handler_invalid(self):
+ """Test the new facebook message handler for invalid message."""
+ import aiohttp
+ connector = ConnectorFacebook({})
+ req_ob = {
+ "object": "page",
+ "entry": [{
+ "messaging": [{
+ "message": {"text": "Hello"},
+ "sender": {}
+ }]
+ }]
+ }
+ mock_request = amock.CoroutineMock()
+ mock_request.json = amock.CoroutineMock()
+ mock_request.json.return_value = req_ob
+
+ with OpsDroid() as opsdroid, \
+ amock.patch('opsdroid.connector.facebook._LOGGER.error') \
+ as logmock:
+ connector.opsdroid = opsdroid
+ connector.opsdroid.parse = amock.CoroutineMock()
+
+ response = await connector.facebook_message_handler(mock_request)
+ self.assertFalse(connector.opsdroid.parse.called)
+ self.assertTrue(logmock.called)
+ self.assertEqual(type(response), aiohttp.web.Response)
+ self.assertEqual(response.status, 200)
+
+ async def test_facebook_challenge_handler(self):
+ """Test the facebook challenge handler."""
+ import aiohttp
+ connector = ConnectorFacebook({'verify-token': 'token_123'})
+ mock_request = amock.Mock()
+ mock_request.query = {
+ "hub.verify_token": 'token_123',
+ 'hub.challenge': 'challenge_123'
+ }
+
+ response = await connector.facebook_challenge_handler(mock_request)
+ self.assertEqual(type(response), aiohttp.web.Response)
+ self.assertEqual(response.text, 'challenge_123')
+ self.assertEqual(response.status, 200)
+
+ mock_request.query = {
+ "hub.verify_token": 'token_abc',
+ 'hub.challenge': 'challenge_123'
+ }
+ response = await connector.facebook_challenge_handler(mock_request)
+ self.assertEqual(type(response), aiohttp.web.Response)
+ self.assertEqual(response.status, 403)
+
+ async def test_listen(self):
+ """Test that listen does nothing."""
+ connector = ConnectorFacebook({})
+ await connector.listen(None)
+
+ async def test_respond(self):
+ """Test that responding sends a message."""
+ post_response = amock.Mock()
+ post_response.status = 200
+
+ with OpsDroid() as opsdroid, \
+ amock.patch('aiohttp.ClientSession.post',
+ new=asynctest.CoroutineMock()) as patched_request:
+ self.assertTrue(opsdroid.__class__.instances)
+ connector = ConnectorFacebook({})
+ room = "a146f52c-548a-11e8-a7d1-28cfe949e12d"
+ test_message = Message(text="Hello world",
+ user="Alice",
+ room=room,
+ connector=connector)
+ patched_request.return_value = asyncio.Future()
+ patched_request.return_value.set_result(post_response)
+ await test_message.respond("Response")
+ self.assertTrue(patched_request.called)
+
+ async def test_respond_bad_response(self):
+ """Test that responding sends a message and get bad response."""
+ post_response = amock.Mock()
+ post_response.status = 401
+ post_response.text = amock.CoroutineMock()
+ post_response.text.return_value = "Error"
+
+ with OpsDroid() as opsdroid, \
+ amock.patch('aiohttp.ClientSession.post',
+ new=asynctest.CoroutineMock()) as patched_request:
+ self.assertTrue(opsdroid.__class__.instances)
+ connector = ConnectorFacebook({})
+ room = "a146f52c-548a-11e8-a7d1-28cfe949e12d"
+ test_message = Message(text="Hello world",
+ user="Alice",
+ room=room,
+ connector=connector)
+ patched_request.return_value = asyncio.Future()
+ patched_request.return_value.set_result(post_response)
+ await test_message.respond("Response")
+ self.assertTrue(patched_request.called)
+ self.assertTrue(post_response.text.called)
| Move Facebook Messenger connector into core
This issue covers adding the [Facebook Messenger connector](https://github.com/opsdroid/connector-facebook) to core.
## Background
A while ago we began moving connectors from external plugins into the core of the project (see #185 for more context). We started with [slack](https://github.com/opsdroid/opsdroid/blob/master/opsdroid/connector/slack/__init__.py) and [websockets](https://github.com/opsdroid/opsdroid/blob/master/opsdroid/connector/websocket/__init__.py) but need to go through all the other existing plugins and move them into the core.
## Steps
- Make a new submodule directory in [`opsdroid.connector`](https://github.com/opsdroid/opsdroid/tree/master/opsdroid/connector) and copy the connector code over.
- Update the [`requirements.txt`](https://github.com/opsdroid/opsdroid/blob/master/requirements.txt) with any dependencies from the connector if necessary.
- Write tests for the connector. (See the [Slack connector tests](https://github.com/jacobtomlinson/opsdroid/blob/master/tests/test_connector_slack.py) for inspiration).
- Copy the relevant information from the connector `README.md` into a [new documentation page](https://github.com/opsdroid/opsdroid/tree/master/docs/connectors).
- Add the new page to the [mkdocs.yml](https://github.com/opsdroid/opsdroid/blob/master/mkdocs.yml).
- Add to the [list of connectors](https://github.com/opsdroid/opsdroid/blob/master/docs/configuration-reference.md#connector-modules).
- Add a deprecation notice to the old connector. (See [the slack connector](https://github.com/opsdroid/connector-slack))
| Happy to help here ๐ | 2018-10-02T13:31:02 |
opsdroid/opsdroid | 685 | opsdroid__opsdroid-685 | [
"672"
] | a427866896b169a16df9a1a042628f5e9d1da4c4 | diff --git a/opsdroid/connector/rocketchat/__init__.py b/opsdroid/connector/rocketchat/__init__.py
new file mode 100644
--- /dev/null
+++ b/opsdroid/connector/rocketchat/__init__.py
@@ -0,0 +1,195 @@
+"""A connector for Rocket.Chat."""
+import asyncio
+import logging
+import datetime
+import aiohttp
+
+from opsdroid.connector import Connector
+from opsdroid.message import Message
+
+_LOGGER = logging.getLogger(__name__)
+API_PATH = '/api/v1/'
+
+
+class RocketChat(Connector):
+ """A connector for the chat service Rocket.Chat."""
+
+ def __init__(self, config):
+ """Create the connector.
+
+ Sets up logic for the Connector class, gets data from
+ the config.yaml or adds default values.
+
+ Args:
+ config (dict): configuration settings from the
+ file config.yaml.
+
+ """
+ super().__init__(config)
+ self.name = "rocket.chat"
+ self.config = config
+ self.default_room = config.get("default-room", "general")
+ self.group = config.get("group", None)
+ self.url = config.get("channel-url", "https://open.rocket.chat")
+ self.update_interval = config.get("update-interval", 1)
+ self.bot_name = config.get("bot-name", "opsdroid")
+ self.listening = True
+ self.latest_update = datetime.datetime.utcnow().isoformat()
+
+ try:
+ self.user_id = config['user-id']
+ self.token = config['token']
+ self.headers = {
+ 'X-User-Id': self.user_id,
+ "X-Auth-Token": self.token,
+ }
+ except (KeyError, AttributeError):
+ _LOGGER.error("Unable to login: Access token is missing. "
+ "Rocket.Chat connector will not be available.")
+
+ def build_url(self, method):
+ """Build the url to connect with api.
+
+ Helper function to build the url to interact with the
+ Rocket.Chat REST API. Uses the global variable API_PATH
+ that points to current api version. (example: /api/v1/)
+
+
+ Args:
+ method (string): Api call endpoint.
+
+ Return:
+ String that represents full API url.
+
+ """
+ return "{}{}{}".format(self.url, API_PATH, method)
+
+ async def connect(self, opsdroid):
+ """Connect to the chat service.
+
+ This method is used to text if the connection to the chat
+ service is successful. If the connection is successful
+ the response is a JSON format containing information
+ about the user. Other than the user username, the
+ information is not used.
+
+ Args:
+ opsdroid (OpsDroid): An instance of opsdroid core.
+
+ """
+ _LOGGER.info("Connecting to Rocket.Chat")
+
+ async with aiohttp.ClientSession() as session:
+ resp = await session.get(self.build_url('me'),
+ headers=self.headers)
+ if resp.status != 200:
+ _LOGGER.error("Unable to connect.")
+ _LOGGER.error("Rocket.Chat error %s, %s",
+ resp.status, resp.text)
+ else:
+ json = await resp.json()
+ _LOGGER.debug("Connected to Rocket.Chat as %s",
+ json["username"])
+
+ async def _parse_message(self, opsdroid, response):
+ """Parse the message received.
+
+ Args:
+ opsdroid (OpsDroid): An instance of opsdroid core.
+ response (dict): Response returned by aiohttp.Client.
+
+ """
+ if response['messages']:
+ message = Message(
+ response['messages'][0]['msg'],
+ response['messages'][0]['u']['username'],
+ response['messages'][0]['rid'],
+ self)
+ _LOGGER.debug("Received message from Rocket.Chat %s",
+ response['messages'][0]['msg'])
+
+ await opsdroid.parse(message)
+ self.latest_update = response['messages'][0]['ts']
+
+ async def _get_message(self, opsdroid):
+ """Connect to the API and get messages.
+
+ This method will only listen to either a channel or a
+ private room called groups by Rocket.Chat. If a group
+ is specified in the config then it takes priority
+ over a channel.
+
+ Args:
+ opsdroid (OpsDroid): An instance of opsdroid.core.
+
+ """
+ if self.group:
+ url = self.build_url('groups.history?roomName={}'.format(
+ self.group))
+ self.default_room = self.group
+ else:
+ url = self.build_url('channels.history?roomName={}'.format(
+ self.default_room))
+
+ if self.latest_update:
+ url += '&oldest={}'.format(self.latest_update)
+
+ async with aiohttp.ClientSession() as session:
+ resp = await session.get(url,
+ headers=self.headers)
+
+ if resp.status != 200:
+ _LOGGER.error("Rocket.Chat error %s, %s",
+ resp.status, resp.text)
+ self.listening = False
+ else:
+ json = await resp.json()
+ await self._parse_message(opsdroid, json)
+
+ async def listen(self, opsdroid):
+ """Listen for and parse new messages.
+
+ The method will sleep asynchronously at the end of
+ every loop. The time can either be specified in the
+ config.yaml with the param update-interval - this
+ defaults to 1 second.
+
+ If the channel didn't get any new messages opsdroid
+ will still call the REST API, but won't do anything.
+
+ Args:
+ opsdroid (Opsdroid): An instance of opsdroid core.
+
+ """
+ while self.listening:
+ await self._get_message(opsdroid)
+ await asyncio.sleep(self.update_interval)
+
+ async def respond(self, message, room=None):
+ """Respond with a message.
+
+ The message argument carries both the text to reply with but
+ also which room to reply with depending of the roomId(rid) got
+ from the _parse_message method.
+
+ Args:
+ message (object): An instance of Message
+ room (string, optional): Name of the room to respond to.
+
+ """
+ _LOGGER.debug("Responding with: %s", message.text)
+ async with aiohttp.ClientSession() as session:
+ data = {}
+ data['channel'] = message.room
+ data['alias'] = self.bot_name
+ data['text'] = message.text
+ data['avatar'] = ''
+ resp = await session.post(
+ self.build_url('chat.postMessage'),
+ headers=self.headers,
+ data=data)
+
+ if resp.status == 200:
+ _LOGGER.debug('Successfully responded')
+ else:
+ _LOGGER.debug("Error - %s: Unable to respond", resp.status)
| diff --git a/tests/test_connector_rocketchat.py b/tests/test_connector_rocketchat.py
new file mode 100644
--- /dev/null
+++ b/tests/test_connector_rocketchat.py
@@ -0,0 +1,243 @@
+"""Tests for the RocketChat class."""
+import asyncio
+import unittest
+import unittest.mock as mock
+import asynctest
+import asynctest.mock as amock
+
+from opsdroid.core import OpsDroid
+from opsdroid.connector.rocketchat import RocketChat
+from opsdroid.message import Message
+
+
+class TestRocketChat(unittest.TestCase):
+ """Test the opsdroid Slack connector class."""
+
+ def setUp(self):
+ self.loop = asyncio.new_event_loop()
+
+ def test_init(self):
+ """Test that the connector is initialised properly."""
+ connector = RocketChat({
+ 'name': 'rocket.chat',
+ 'access-token': 'test',
+ 'user-id': 'userID'
+ })
+ self.assertEqual("general", connector.default_room)
+ self.assertEqual("rocket.chat", connector.name)
+
+ def test_missing_token(self):
+ """Test that attempt to connect without info raises an error."""
+ with mock.patch('opsdroid.connector.rocketchat._LOGGER.error') \
+ as logmock:
+ RocketChat({})
+ self.assertTrue(logmock.called)
+
+
+class TestConnectorRocketChatAsync(asynctest.TestCase):
+ """Test the async methods of the opsdroid Slack connector class."""
+
+ def setUp(self):
+ self.connector = RocketChat({
+ 'name': 'rocket.chat',
+ 'token': 'test',
+ 'user-id': 'userID',
+ 'default_room': "test"
+ })
+ self.connector.latest_update = '2018-10-08T12:57:37.126Z'
+
+ async def test_connect(self):
+ connect_response = amock.Mock()
+ connect_response.status = 200
+ connect_response.json = amock.CoroutineMock()
+ connect_response.return_value = {
+ "_id": "3vABZrQgDzfcz7LZi",
+ "name": "Fรกbio Rosado",
+ "emails": [
+ {
+ "address": "[email protected]",
+ "verified": True
+ }
+ ],
+ "status": "online",
+ "statusConnection": "online",
+ "username": "FabioRosado",
+ "utcOffset": 1,
+ "active": True,
+ "roles": [
+ "user"
+ ],
+ "settings": {},
+ "email": "[email protected]",
+ "success": True
+ }
+
+ with OpsDroid() as opsdroid, \
+ amock.patch('aiohttp.ClientSession.get') as patched_request, \
+ amock.patch('opsdroid.connector.rocketchat._LOGGER.debug',) \
+ as logmock:
+
+ patched_request.return_value = asyncio.Future()
+ patched_request.return_value.set_result(connect_response)
+
+ await self.connector.connect(opsdroid)
+
+ self.assertTrue(logmock.called)
+ self.assertNotEqual(200, patched_request.status)
+ self.assertTrue(patched_request.called)
+
+ async def test_connect_failure(self):
+ result = amock.MagicMock()
+ result.status = 401
+
+ with OpsDroid() as opsdroid, \
+ amock.patch('aiohttp.ClientSession.get') as patched_request, \
+ amock.patch('opsdroid.connector.rocketchat._LOGGER.error',) \
+ as logmock:
+
+ patched_request.return_value = asyncio.Future()
+ patched_request.return_value.set_result(result)
+
+ await self.connector.connect(opsdroid)
+ self.assertTrue(logmock.called)
+
+ async def test_get_message(self):
+ connector_group = RocketChat({
+ 'name': 'rocket.chat',
+ 'token': 'test',
+ 'user-id': 'userID',
+ 'group': "test"
+ })
+ response = amock.Mock()
+ response.status = 200
+ response.json = amock.CoroutineMock()
+ response.return_value = {
+ 'messages': [
+ {
+ "_id": "ZbhuIO764jOIu",
+ "rid": "Ipej45JSbfjt9",
+ "msg": "hows it going",
+ "ts": "2018-05-11T16:05:41.047Z",
+ "u": {
+ "_id": "ZbhuIO764jOIu",
+ "username": "FabioRosado",
+ "name": "Fรกbio Rosado"
+ },
+ "_updatedAt": "2018-05-11T16:05:41.489Z",
+ "editedBy": None,
+ "editedAt": None,
+ "emoji": None,
+ "avatar": None,
+ "alias": None,
+ "customFields": None,
+ "attachments": None,
+ "mentions": [],
+ "channels": []
+ }
+ ]}
+
+ with OpsDroid() as opsdroid, \
+ amock.patch('aiohttp.ClientSession.get') as patched_request, \
+ amock.patch.object(connector_group, '_parse_message') \
+ as mocked_parse_message:
+
+ patched_request.return_value = asyncio.Future()
+ patched_request.return_value.set_result(response)
+
+ await connector_group._get_message(opsdroid)
+
+ self.assertTrue(patched_request.called)
+ self.assertTrue(mocked_parse_message.called)
+
+ async def test_parse_message(self):
+ response = {
+ 'messages': [
+ {
+ "_id": "ZbhuIO764jOIu",
+ "rid": "Ipej45JSbfjt9",
+ "msg": "hows it going",
+ "ts": "2018-05-11T16:05:41.047Z",
+ "u": {
+ "_id": "ZbhuIO764jOIu",
+ "username": "FabioRosado",
+ "name": "Fรกbio Rosado"
+ },
+ "_updatedAt": "2018-05-11T16:05:41.489Z",
+ "editedBy": None,
+ "editedAt": None,
+ "emoji": None,
+ "avatar": None,
+ "alias": None,
+ "customFields": None,
+ "attachments": None,
+ "mentions": [],
+ "channels": []
+ }
+ ]}
+
+ with OpsDroid() as opsdroid, \
+ amock.patch('opsdroid.core.OpsDroid.parse') as mocked_parse:
+ await self.connector._parse_message(opsdroid, response)
+ self.assertLogs('_LOGGER', 'debug')
+ self.assertTrue(mocked_parse.called)
+ self.assertEqual("2018-05-11T16:05:41.047Z",
+ self.connector.latest_update)
+
+
+ async def test_listen(self):
+ self.connector.side_effect = Exception()
+ await self.connector.listen(amock.CoroutineMock())
+
+ async def test_get_message_failure(self):
+ listen_response = amock.Mock()
+ listen_response.status = 401
+
+ with OpsDroid() as opsdroid, \
+ amock.patch('aiohttp.ClientSession.get') as patched_request:
+
+ patched_request.return_value = asyncio.Future()
+ patched_request.return_value.set_result(listen_response)
+ await self.connector._get_message(opsdroid)
+ self.assertLogs('_LOGGER', 'error')
+ self.assertEqual(False, self.connector.listening)
+
+ async def test_respond(self):
+ post_response = amock.Mock()
+ post_response.status = 200
+
+ with OpsDroid() as opsdroid, \
+ amock.patch('aiohttp.ClientSession.post') as patched_request, \
+ amock.patch('opsdroid.connector.rocketchat._LOGGER.debug') \
+ as logmock:
+
+ self.assertTrue(opsdroid.__class__.instances)
+ test_message = Message(text="This is a test",
+ user="opsdroid",
+ room="test",
+ connector=self.connector)
+
+ patched_request.return_value = asyncio.Future()
+ patched_request.return_value.set_result(post_response)
+ await test_message.respond("Response")
+ self.assertTrue(patched_request.called)
+ self.assertTrue(logmock.called)
+
+ async def test_respond_failure(self):
+ post_response = amock.Mock()
+ post_response.status = 401
+
+ with OpsDroid() as opsdroid, \
+ amock.patch('aiohttp.ClientSession.post') as patched_request, \
+ amock.patch('opsdroid.connector.rocketchat._LOGGER.debug') \
+ as logmock:
+
+ self.assertTrue(opsdroid.__class__.instances)
+ test_message = Message(text="This is a test",
+ user="opsdroid",
+ room="test",
+ connector=self.connector)
+
+ patched_request.return_value = asyncio.Future()
+ patched_request.return_value.set_result(post_response)
+ await test_message.respond("Response")
+ self.assertTrue(logmock.called)
| Create a Rocket.Chat connector module
Let's add a [Rocket.Chat connector](https://rocket.chat/) as proposed in #498.
## Steps
- Make a new submodule directory in [`opsdroid.connector`](https://github.com/opsdroid/opsdroid/tree/master/opsdroid/connector) called `rocketchat` and create an `__init__.py` file inside.
- Implement the connector according to the [documentation on writing connectors](https://opsdroid.readthedocs.io/en/stable/extending/connectors/).
- Update the [`requirements.txt`](https://github.com/opsdroid/opsdroid/blob/master/requirements.txt) with any new dependencies from the connector if necessary.
- Write tests for the connector. (See the [Slack connector tests](https://github.com/jacobtomlinson/opsdroid/blob/master/tests/test_connector_slack.py) for inspiration).
- Create a [new documentation page](https://github.com/opsdroid/opsdroid/tree/master/docs/connectors).
- Add the new page to the [mkdocs.yml](https://github.com/opsdroid/opsdroid/blob/master/mkdocs.yml).
- Add to the [list of connectors](https://github.com/opsdroid/opsdroid/blob/master/docs/configuration-reference.md#connector-modules).
### Resources to get you started
- [Rockat.Chat REST API](https://rocket.chat/docs/developer-guides/rest-api/)
- [Rocket.Chat Beta Realtime API](https://rocket.chat/docs/developer-guides/realtime-api/)
- [jadlog/rocketchat_API](https://github.com/jadolg/rocketchat_API) (bear in mind my comments in #498)
### Asyncio
Opsdroid uses [asyncio and an event loop](https://docs.python.org/3/library/asyncio.html), so here are some things to bear in mind:
- Functions should be coroutines which are defined with `async def myfunc(): ...`
- Coroutines are called with `await`. For example `await myfunc()`. This is how asyncio allows many functions to take it in turns as they swap over when the current thread calls `await`.
- **Don't block the thread!** If you need to make a call to a blocking function such as `time.sleep()` or any kind of IO request such as `requests.get()` then you should `await` an async variant. For example you can call `await asyncio.sleep(10)` to sleep without blocking the whole application and you can use `aiohttp` as an async url library.
| I have been wanting to tackle this for a while but kept slipping my mind, so I'll give it a try ๐ | 2018-10-02T15:02:18 |
opsdroid/opsdroid | 689 | opsdroid__opsdroid-689 | [
"669"
] | bc0baafc2497db93950f8a3ac491153b5f9cc8f7 | diff --git a/opsdroid/database/mongo/__init__.py b/opsdroid/database/mongo/__init__.py
new file mode 100644
--- /dev/null
+++ b/opsdroid/database/mongo/__init__.py
@@ -0,0 +1,72 @@
+# -*- coding: utf-8 -*-
+"""A module for opsdroid to allow persist in mongo database."""
+import logging
+from motor.motor_asyncio import AsyncIOMotorClient
+
+from opsdroid.database import Database
+
+
+class DatabaseMongo(Database):
+ """A module for opsdroid to allow memory to persist in a mongo database.
+
+ Attributes:
+
+ """
+
+ def __init__(self, config):
+ """Create the connection.
+
+ Set some basic properties from the database config such as the name
+ of this database.
+
+ Args:
+ config (dict): The config for this database specified in the
+ `configuration.yaml` file.
+
+ """
+ super().__init__(config)
+ logging.debug("Loaded mongo database connector")
+ self.name = "mongo"
+ self.config = config
+ self.client = None
+ self.database = None
+
+ async def connect(self, opsdroid):
+ """Connect to the database.
+
+ Args:
+ obsdroid (object): the opsdroid instance
+ """
+ host = self.config["host"] if "host" in self.config else "localhost"
+ port = self.config["port"] if "port" in self.config else "27017"
+ database = self.config["database"] \
+ if "database" in self.config else "opsdroid"
+ path = "mongodb://" + host + ":" + port
+ self.client = AsyncIOMotorClient(path)
+ self.database = self.client[database]
+ logging.info("Connected to mongo")
+
+ async def put(self, key, data):
+ """Insert or replace an object into the database for a given key.
+
+ Args:
+ key (str): the key is the databasename
+ data (object): the data to be inserted or replaced
+ """
+ logging.debug("Putting %s into mongo", key)
+ if "_id" in data:
+ await self.database[key].update_one({"_id": data["_id"]},
+ {"$set": data})
+ else:
+ await self.database[key].insert_one(data)
+
+ async def get(self, key):
+ """Get a document from the database (key).
+
+ Args:
+ key (str): the key is the databasename.
+ """
+ logging.debug("Getting %s from mongo", key)
+ return await self.database[key].find_one(
+ {"$query": {}, "$orderby": {"$natural": -1}}
+ )
| diff --git a/tests/mockmodules/databases/mongo/mongo_database.py b/tests/mockmodules/databases/mongo/mongo_database.py
new file mode 100644
--- /dev/null
+++ b/tests/mockmodules/databases/mongo/mongo_database.py
@@ -0,0 +1,33 @@
+"""A mocked database module."""
+
+from opsdroid.database.mongo import DatabaseMongo
+
+
+class DatabaseMongoTest():
+ """The mocked database mongo class."""
+
+ def __init__(self, config):
+ """Start the class."""
+ self.config = config
+ self.dummy_db = {}
+
+ async def find_one(self,key):
+ """Mock method find_one.
+
+ Args: key(object) not considered for test
+ """
+ return await self.dummy_db;
+
+ async def update_one(self,key,update):
+ """Mock method update_one.
+
+ Args: key(object) not considered for test
+ """
+ return await self.dummy_db;
+
+ async def insert_one(self,key):
+ """Mock method insert_one.
+
+ Args: key(object) not considered for test
+ """
+ return self.dummy_db;
diff --git a/tests/test_database_mongo.py b/tests/test_database_mongo.py
new file mode 100644
--- /dev/null
+++ b/tests/test_database_mongo.py
@@ -0,0 +1,71 @@
+"""Tests for the DatabaseMongo class. """
+
+import unittest
+import asynctest
+
+
+from opsdroid.database.mongo import DatabaseMongo
+from mockmodules.databases.mongo.mongo_database import DatabaseMongoTest
+
+class TestDatabaseMongoClass(unittest.TestCase):
+ """Test the opsdroid mongo database class."""
+
+ def test_init(self):
+ """Initialization fo mock database"""
+ config = {"example_item": "test"}
+ database = DatabaseMongo(config)
+ self.assertEqual("mongo", database.name)
+ self.assertEqual("test", database.config["example_item"])
+
+
+class TestDatabaseBaseMongoClassAsync(asynctest.TestCase):
+ """Test the opsdroid database base class."""
+
+ async def test_connect(self):
+ """test the method connect"""
+ database = DatabaseMongo({})
+ try:
+ await database.connect({})
+ except NotImplementedError:
+ raise Exception
+ else:
+ pass
+
+ async def test_get2(self):
+ """test of mocked method get"""
+ database = DatabaseMongo({})
+ database.database = {}
+ database.database['test'] = DatabaseMongoTest({})
+ try:
+ await database.get("test")
+ except TypeError:
+ pass
+ else:
+ raise Exception
+
+ async def test_put2(self):
+ """test of mocked method put"""
+ database = DatabaseMongo({})
+ try:
+ await database.put("test", {})
+ except TypeError:
+ pass
+ else:
+ raise Exception
+
+ async def test_put(self):
+ """test of mocked put"""
+ database = DatabaseMongo({})
+ database.database = {}
+ database.database['test'] = DatabaseMongoTest({})
+ try:
+ await database.put("test", {"_id":"0" , "key":"value"})
+ except TypeError:
+ try:
+ await database.put("test", {})
+ except NotImplementedError:
+ raise Exception
+ else:
+ pass
+ else:
+ raise Exception
| Move MongoDB database module into core
This issue covers adding the [MongoDB database module](https://github.com/opsdroid/database-mongo) to core.
## Background
A while ago we began moving connectors and databases from external plugins into the core of the project (see #185 for more context). We started with [slack](https://github.com/opsdroid/opsdroid/blob/master/opsdroid/connector/slack/__init__.py) and [websockets](https://github.com/opsdroid/opsdroid/blob/master/opsdroid/connector/websocket/__init__.py) connectors but need to go through all the other existing plugins and move them into the core.
## Steps
- Make a new submodule directory in [`opsdroid.database`](https://github.com/opsdroid/opsdroid/tree/master/opsdroid/database) and copy the database code over.
- Update the [`requirements.txt`](https://github.com/opsdroid/opsdroid/blob/master/requirements.txt) with any dependencies from the database if necessary.
- Write tests for the database. (See the [Slack connector tests](https://github.com/jacobtomlinson/opsdroid/blob/master/tests/test_connector_slack.py) for inspiration).
- Copy the relevant information from the database `README.md` into a [new documentation page](https://github.com/opsdroid/opsdroid/tree/master/docs/connectors).
- Add the new page to the [mkdocs.yml](https://github.com/opsdroid/opsdroid/blob/master/mkdocs.yml).
- Add to the [list of databases](https://github.com/opsdroid/opsdroid/blob/master/docs/configuration-reference.md#database-modules).
- Add a deprecation notice to the old database module. (See [the slack connector](https://github.com/opsdroid/connector-slack))
| Hi may i work on this issue? I'm new to opensource. | 2018-10-03T10:49:07 |
opsdroid/opsdroid | 691 | opsdroid__opsdroid-691 | [
"665"
] | f65a0e1c4347eddcd592baafa73822e805a5bb9d | diff --git a/opsdroid/connector/telegram/__init__.py b/opsdroid/connector/telegram/__init__.py
new file mode 100644
--- /dev/null
+++ b/opsdroid/connector/telegram/__init__.py
@@ -0,0 +1,190 @@
+"""A connector for Telegram."""
+import asyncio
+import logging
+import aiohttp
+
+
+from opsdroid.connector import Connector
+from opsdroid.message import Message
+
+
+_LOGGER = logging.getLogger(__name__)
+
+
+class ConnectorTelegram(Connector):
+ """A connector the the char service Telegram."""
+
+ def __init__(self, config):
+ """Create the connector.
+
+ Args:
+ config (dict): configuration settings from the
+ file config.yaml.
+
+ """
+ _LOGGER.debug("Loaded telegram connector")
+ super().__init__(config)
+ self.name = "telegram"
+ self.latest_update = None
+ self.default_room = None
+ self.listening = True
+ self.default_user = config.get("default-user", None)
+ self.whitelisted_users = config.get("whitelisted-users", None)
+ self.update_interval = config.get("update_interval", 1)
+
+ try:
+ self.token = config["token"]
+ except (KeyError, AttributeError):
+ _LOGGER.error("Unable to login: Access token is missing. "
+ "Telegram connector will be unavailable.")
+
+ def build_url(self, method):
+ """Build the url to connect to the API.
+
+ Args:
+ methods (string): API call end point.
+
+ Return:
+ String that represents the full API url.
+
+ """
+ return "https://api.telegram.org/bot{}/{}".format(self.token, method)
+
+ async def connect(self, opsdroid):
+ """Connect to Telegram.
+
+ This method is not an authorization call. It basically
+ checks if the API token was provided and makes an API
+ call to Telegram and evaluates the status of the call.
+
+ Args:
+ opsdroid (OpsDroid): An instance of opsdroid core.
+
+ """
+ _LOGGER.debug("Connecting to telegram")
+ async with aiohttp.ClientSession() as session:
+ resp = await session.get(self.build_url("getMe"))
+
+ if resp.status != 200:
+ _LOGGER.error("Unable to connect")
+ _LOGGER.error("Telegram error %s, %s",
+ resp.status, resp.text)
+ else:
+ json = await resp.json()
+ _LOGGER.debug(json)
+ _LOGGER.debug("Connected to telegram as %s",
+ json["result"]["username"])
+
+ async def _parse_message(self, opsdroid, response):
+ """Handle logic to parse a received message.
+
+ Since everyone can send a private message to any user/bot
+ in Telegram, this method allows to set a list of whitelisted
+ users that can interact with the bot. If any other user tries
+ to interact with the bot the command is not parsed and instead
+ the bot will inform that user that he is not allowed to talk
+ with the bot.
+
+ We also set self.latest_update to +1 in order to get the next
+ available message (or an empty {} if no message has been received
+ yet) with the method self._get_messages().
+
+ Args:
+ opsdroid (OpsDroid): An instance of opsdroid core.
+ response (dict): Response returned by aiohttp.ClientSession.
+ """
+ for result in response["result"]:
+ _LOGGER.debug(result)
+ if result["message"]["text"]:
+ user = result["message"]["from"]["username"]
+
+ message = Message(
+ result["message"]["text"],
+ user,
+ result["message"]["chat"],
+ self)
+
+ if not self.whitelisted_users or \
+ user in self.whitelisted_users:
+ await opsdroid.parse(message)
+ else:
+ message.text = "Sorry, you're not allowed " \
+ "to speak with this bot."
+ await self.respond(message)
+ self.latest_update = result["update_id"] + 1
+
+ async def _get_messages(self, opsdroid):
+ """Connect to the Telegram API.
+
+ Uses an aiohttp ClientSession to connect to Telegram API
+ and get the latest messages from the chat service.
+
+ The data["offset"] is used to consume every new message, the API
+ returns an int - "update_id" value. In order to get the next
+ message this value needs to be increased by 1 the next time
+ the API is called. If no new messages exists the API will just
+ return an empty {}.
+
+ Args:
+ opsdroid (OpsDroid): An instance of opsdroid core.
+
+ """
+ async with aiohttp.ClientSession() as session:
+ data = {}
+ if self.latest_update is not None:
+ data["offset"] = self.latest_update
+ resp = await session.get(self.build_url("getUpdates"),
+ params=data)
+ if resp.status != 200:
+ _LOGGER.error("Telegram error %s, %s",
+ resp.status, resp.text)
+ self.listening = False
+
+ else:
+ json = await resp.json()
+ # _LOGGER.debug(json)
+
+ await self._parse_message(opsdroid, json)
+
+ async def listen(self, opsdroid):
+ """Listen for and parse new messages.
+
+ The bot will always listen to all opened chat windows,
+ as long as opsdroid is running. Since anyone can start
+ a new chat with the bot is recommended that a list of
+ users to be whitelisted be provided in config.yaml.
+
+ The method will sleep asynchronously at the end of
+ every loop. The time can either be specified in the
+ config.yaml with the param update-interval - this
+ defaults to 1 second.
+
+ Args:
+ opsdroid (OpsDroid): An instance of opsdroid core.
+
+ """
+ while self.listening:
+ await self._get_messages(opsdroid)
+
+ await asyncio.sleep(self.update_interval)
+
+ async def respond(self, message, room=None):
+ """Respond with a message.
+
+ Args:
+ message (object): An instance of Message.
+ room (string, optional): Name of the room to respond to.
+
+ """
+ _LOGGER.debug("Responding with: %s", message.text)
+
+ async with aiohttp.ClientSession() as session:
+ data = {}
+ data["chat_id"] = message.room["id"]
+ data["text"] = message.text
+ resp = await session.post(self.build_url("sendMessage"),
+ data=data)
+ if resp.status == 200:
+ _LOGGER.debug("Successfully responded")
+ else:
+ _LOGGER.error("Unable to respond.")
| diff --git a/tests/test_connector_telegram.py b/tests/test_connector_telegram.py
new file mode 100644
--- /dev/null
+++ b/tests/test_connector_telegram.py
@@ -0,0 +1,245 @@
+"""Tests for the ConnectorTelegram class."""
+import asyncio
+import unittest
+import unittest.mock as mock
+import asynctest
+import asynctest.mock as amock
+
+from opsdroid.core import OpsDroid
+from opsdroid.connector.telegram import ConnectorTelegram
+from opsdroid.message import Message
+
+
+class TestConnectorTelegram(unittest.TestCase):
+ """Test the opsdroid Telegram connector class."""
+
+ def setUp(self):
+ self.loop = asyncio.new_event_loop()
+
+ def test_init(self):
+ """Test that the connector is initialised properly."""
+ connector = ConnectorTelegram({
+ 'name': 'telegram',
+ 'token': 'test',
+ })
+ self.assertEqual(None, connector.default_room)
+ self.assertEqual("telegram", connector.name)
+
+ def test_missing_token(self):
+ """Test that attempt to connect without info raises an error."""
+ ConnectorTelegram({})
+ self.assertLogs('_LOGGER', 'error')
+
+
+class TestConnectorTelegramAsync(asynctest.TestCase):
+ """Test the async methods of the opsdroid Telegram connector class."""
+
+ def setUp(self):
+ self.connector = ConnectorTelegram({
+ 'name': 'telegram',
+ 'token': 'bot:765test',
+ 'whitelisted-users': ['user', 'test']
+ })
+
+ async def test_connect(self):
+ connect_response = amock.Mock()
+ connect_response.status = 200
+ connect_response.json = amock.CoroutineMock()
+ connect_response.return_value = {
+ "ok": True,
+ "result": {
+ "id": 635392558,
+ "is_bot": True,
+ "first_name": "opsdroid",
+ "username": "opsdroid_bot"
+ }
+ }
+
+ with OpsDroid() as opsdroid, \
+ amock.patch('aiohttp.ClientSession.get') as patched_request:
+
+ patched_request.return_value = asyncio.Future()
+ patched_request.return_value.set_result(connect_response)
+ await self.connector.connect(opsdroid)
+ self.assertLogs('_LOGGER', 'debug')
+ self.assertNotEqual(200, patched_request.status)
+ self.assertTrue(patched_request.called)
+
+ async def test_connect_failure(self):
+ result = amock.MagicMock()
+ result.status = 401
+
+ with OpsDroid() as opsdroid, \
+ amock.patch('aiohttp.ClientSession.get') as patched_request:
+
+ patched_request.return_value = asyncio.Future()
+ patched_request.return_value.set_result(result)
+
+ await self.connector.connect(opsdroid)
+ self.assertLogs('_LOGGER', 'error')
+
+ async def test_parse_message(self):
+ response = { 'result': [{
+ "update_id": 427647860,
+ "message": {
+ "message_id": 12,
+ "from": {
+ "id": 649671308,
+ "is_bot": False,
+ "first_name": "A",
+ "last_name": "User",
+ "username": "user",
+ "language_code": "en-GB"
+ },
+ "chat": {
+ "id": 649671308,
+ "first_name": "A",
+ "last_name": "User",
+ "username": "a_user",
+ "type": "private"
+ },
+ "date": 1538756863,
+ "text": "Hello"
+ }
+ }]}
+
+ with OpsDroid() as opsdroid, \
+ amock.patch('opsdroid.core.OpsDroid.parse') as mocked_parse:
+ await self.connector._parse_message(opsdroid, response)
+ self.assertTrue(mocked_parse.called)
+
+ async def test_parse_message_unauthorized(self):
+ self.connector.config['whitelisted-users'] = ['user', 'test']
+ response = { 'result': [{
+ "update_id": 427647860,
+ "message": {
+ "message_id": 12,
+ "from": {
+ "id": 649671308,
+ "is_bot": False,
+ "first_name": "A",
+ "last_name": "User",
+ "username": "a_user",
+ "language_code": "en-GB"
+ },
+ "chat": {
+ "id": 649671308,
+ "first_name": "A",
+ "last_name": "User",
+ "username": "a_user",
+ "type": "private"
+ },
+ "date": 1538756863,
+ "text": "Hello"
+ }
+ }]}
+
+ self.assertEqual(
+ self.connector.config['whitelisted-users'], ['user', 'test'])
+
+ message_text = "Sorry, you're not allowed to speak with this bot."
+
+ with OpsDroid() as opsdroid, \
+ amock.patch.object(self.connector, 'respond') \
+ as mocked_respond:
+ await self.connector._parse_message(opsdroid, response)
+ self.assertTrue(mocked_respond.called)
+ self.assertTrue(mocked_respond.called_with(message_text))
+
+ async def test_get_messages(self):
+ listen_response = amock.Mock()
+ listen_response.status = 200
+ listen_response.json = amock.CoroutineMock()
+ listen_response.return_value = {"result": [
+ {
+ "update_id": 427647860,
+ "message": {
+ "message_id": 54,
+ "from": {
+ "id": 639889348,
+ "is_bot": False,
+ "first_name": "Fabio",
+ "last_name": "Rosado",
+ "username": "FabioRosado",
+ "language_code": "en-GB"
+ },
+ "chat": {
+ "id": 639889348,
+ "first_name": "Fabio",
+ "last_name": "Rosado",
+ "username": "FabioRosado",
+ "type": "private"
+ },
+ "date": 1538756863,
+ "text": "Hello"
+ }
+ }
+ ]}
+
+ with OpsDroid() as opsdroid, \
+ amock.patch('aiohttp.ClientSession.get') as patched_request,\
+ amock.patch.object(self.connector, '_parse_message') \
+ as mocked_parse_message:
+
+ self.connector.latest_update = 54
+
+ patched_request.return_value = asyncio.Future()
+ patched_request.return_value.set_result(listen_response)
+ await self.connector._get_messages(opsdroid)
+ self.assertTrue(patched_request.called)
+ self.assertLogs('_LOGGER', 'debug')
+ # self.assertTrue(mocked_parse_message.called)
+
+ async def test_get_messages_failure(self):
+ listen_response = amock.Mock()
+ listen_response.status = 401
+
+ with OpsDroid() as opsdroid, \
+ amock.patch('aiohttp.ClientSession.get') as patched_request:
+
+ patched_request.return_value = asyncio.Future()
+ patched_request.return_value.set_result(listen_response)
+ await self.connector._get_messages(opsdroid)
+ self.assertLogs('_LOGGER', 'error')
+
+ async def test_listen(self):
+ self.connector.listening = amock.CoroutineMock()
+ self.connector.listening.side_effect = Exception()
+ await self.connector.listen(amock.CoroutineMock())
+
+ async def test_respond(self):
+ post_response = amock.Mock()
+ post_response.status = 200
+
+ with OpsDroid() as opsdroid, \
+ amock.patch('aiohttp.ClientSession.post') as patched_request:
+
+ self.assertTrue(opsdroid.__class__.instances)
+ test_message = Message(text="This is a test",
+ user="opsdroid",
+ room={"id": 12404},
+ connector=self.connector)
+
+ patched_request.return_value = asyncio.Future()
+ patched_request.return_value.set_result(post_response)
+ await test_message.respond("Response")
+ self.assertTrue(patched_request.called)
+ self.assertLogs("_LOGGER", "debug")
+
+ async def test_respond_failure(self):
+ post_response = amock.Mock()
+ post_response.status = 401
+
+ with OpsDroid() as opsdroid, \
+ amock.patch('aiohttp.ClientSession.post') as patched_request:
+
+ self.assertTrue(opsdroid.__class__.instances)
+ test_message = Message(text="This is a test",
+ user="opsdroid",
+ room={"id": 12404},
+ connector=self.connector)
+
+ patched_request.return_value = asyncio.Future()
+ patched_request.return_value.set_result(post_response)
+ await test_message.respond("Response")
+ self.assertLogs('_LOGGER', 'debug')
| Move Telegram connector into core
This issue covers adding the [Telegram connector](https://github.com/opsdroid/connector-telegram) to core.
## Background
A while ago we began moving connectors from external plugins into the core of the project (see #185 for more context). We started with [slack](https://github.com/opsdroid/opsdroid/blob/master/opsdroid/connector/slack/__init__.py) and [websockets](https://github.com/opsdroid/opsdroid/blob/master/opsdroid/connector/websocket/__init__.py) but need to go through all the other existing plugins and move them into the core.
## Steps
- Make a new submodule directory in [`opsdroid.connector`](https://github.com/opsdroid/opsdroid/tree/master/opsdroid/connector) and copy the connector code over.
- Update the [`requirements.txt`](https://github.com/opsdroid/opsdroid/blob/master/requirements.txt) with any dependencies from the connector if necessary.
- Write tests for the connector. (See the [Slack connector tests](https://github.com/jacobtomlinson/opsdroid/blob/master/tests/test_connector_slack.py) for inspiration).
- Copy the relevant information from the connector `README.md` into a [new documentation page](https://github.com/opsdroid/opsdroid/tree/master/docs/connectors).
- Add the new page to the [mkdocs.yml](https://github.com/opsdroid/opsdroid/blob/master/mkdocs.yml).
- Add to the [list of connectors](https://github.com/opsdroid/opsdroid/blob/master/docs/configuration-reference.md#connector-modules).
- Add a deprecation notice to the old connector. (See [the slack connector](https://github.com/opsdroid/connector-slack))
| I will be working on this issue ๐ | 2018-10-04T17:38:46 |
opsdroid/opsdroid | 692 | opsdroid__opsdroid-692 | [
"458"
] | 1bce0cc370077c9d082268d5952a1a6625566ed8 | diff --git a/opsdroid/const.py b/opsdroid/const.py
--- a/opsdroid/const.py
+++ b/opsdroid/const.py
@@ -21,7 +21,7 @@
LOCALE_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'locale')
EXAMPLE_CONFIG_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"configuration/example_configuration.yaml")
-REGEX_MAX_SCORE = 0.6
+REGEX_SCORE_FACTOR = 0.6
RASANLU_DEFAULT_URL = "http://localhost:5000"
RASANLU_DEFAULT_PROJECT = "opsdroid"
diff --git a/opsdroid/matchers.py b/opsdroid/matchers.py
--- a/opsdroid/matchers.py
+++ b/opsdroid/matchers.py
@@ -2,6 +2,7 @@
import logging
+from opsdroid.const import REGEX_SCORE_FACTOR
from opsdroid.helper import get_opsdroid
from opsdroid.web import Web
@@ -9,7 +10,7 @@
_LOGGER = logging.getLogger(__name__)
-def match_regex(regex, case_sensitive=True):
+def match_regex(regex, case_sensitive=True, score_factor=None):
"""Return regex match decorator."""
def matcher(func):
"""Add decorated function to skills list for regex matching."""
@@ -18,7 +19,8 @@ def matcher(func):
config = opsdroid.loader.current_import_config
regex_setup = {
"expression": regex,
- "case_sensitive": case_sensitive
+ "case_sensitive": case_sensitive,
+ "score_factor": score_factor or REGEX_SCORE_FACTOR,
}
opsdroid.skills.append({"regex": regex_setup,
"skill": func,
diff --git a/opsdroid/parsers/regex.py b/opsdroid/parsers/regex.py
--- a/opsdroid/parsers/regex.py
+++ b/opsdroid/parsers/regex.py
@@ -3,17 +3,14 @@
import logging
import re
-from opsdroid.const import REGEX_MAX_SCORE
-
-
_LOGGER = logging.getLogger(__name__)
-async def calculate_score(regex):
+async def calculate_score(regex, score_factor):
"""Calculate the score of a regex."""
# The score asymptotically approaches the max score
# based on the length of the expression.
- return (1 - (1 / ((len(regex) + 1) ** 2))) * REGEX_MAX_SCORE
+ return (1 - (1 / ((len(regex) + 1) ** 2))) * score_factor
async def parse_regex(opsdroid, message):
@@ -21,17 +18,18 @@ async def parse_regex(opsdroid, message):
matched_skills = []
for skill in opsdroid.skills:
if "regex" in skill:
- if skill["regex"]["case_sensitive"]:
- regex = re.search(skill["regex"]["expression"],
+ opts = skill["regex"]
+ if opts["case_sensitive"]:
+ regex = re.search(opts["expression"],
message.text)
else:
- regex = re.search(skill["regex"]["expression"],
+ regex = re.search(opts["expression"],
message.text, re.IGNORECASE)
if regex:
message.regex = regex
matched_skills.append({
"score": await calculate_score(
- skill["regex"]["expression"]),
+ opts["expression"], opts["score_factor"]),
"skill": skill["skill"],
"config": skill["config"],
"message": message
| diff --git a/tests/test_parser_regex.py b/tests/test_parser_regex.py
--- a/tests/test_parser_regex.py
+++ b/tests/test_parser_regex.py
@@ -22,6 +22,22 @@ async def test_parse_regex(self):
skills = await parse_regex(opsdroid, message)
self.assertEqual(mock_skill, skills[0]["skill"])
+ async def test_parse_regex_priority(self):
+ with OpsDroid() as opsdroid:
+ regex = r"(.*)"
+
+ mock_skill_low = amock.CoroutineMock()
+ match_regex(regex, score_factor=0.6)(mock_skill_low)
+
+ mock_skill_high = amock.CoroutineMock()
+ match_regex(regex, score_factor=1)(mock_skill_high)
+
+ mock_connector = amock.CoroutineMock()
+ message = Message("Hello world", "user", "default", mock_connector)
+
+ skills = await opsdroid.get_ranked_skills(message)
+ self.assertEqual(mock_skill_high, skills[0]["skill"])
+
async def test_parse_regex_raises(self):
with OpsDroid() as opsdroid:
mock_skill = amock.CoroutineMock()
| Change the way opsdroid prioritize regex / NLU skills
# Description
Actually opsdroid use a score system to decide what skill have to run when more than one skills match a message.
This score system is designed to prioritize NLU engines over regex (see #311).
The problem comes when you're working at the same time with some NLU engine and some regex skills. If the regex skill have some words of some NLU intent, it will be never executed, even though you write it exactly as in the regex.
## Steps to Reproduce
For example, you add a greetings intent to some NLU training with various sentences like 'hello', 'hello opsdroid', 'hi', 'hello there', 'whats up', 'good morning'...
And you have the min-score at 0.6.
Then you have a regex matcher with something like 'hello from (?P\w+)!'.
If a user writes "hello from Barcelona!" opsdroid will prioritize the NLU response, because it's almost secure that the NLU platform will detect the intent into the sentence.
## Expected Functionality
If a developer has the regex 'hello from (?P\w+)!' and it matches, it has to be prioritized over the NLU intent, because regex it's more specific.
## Proposed solution
My proposal is redesign the priority system by specificity. So I will prioritize skills in that order:
_parse (#450) > regex > NLU engines_
And, to decide what skill to execute if more than one match in the same level:
- In parse and regex, use the expression length. Longer expression normally it's more specific.
- In NLU engines, use the engine score (like now)
Any thought or discussion will be welcome :)
| I agree that regex skills should be able to score higher than NLU skills if it truly is a better match.
When writing this I was worried about simple regex's winning over advanced NLU, for example if an NLU engine is trained with sentences like "will it rain in {place} at {time}" and there is a regex that matches "will it rain" then I want the NLU to win.
But your examples demonstrate that the regex scores should definitely be able to beat the NLU score.
I disagree that parse and regex have higher priority over NLU, however I do agree that we should think about how to properly score them so things work better.
Perhaps a good approach would be a combination of message length, regex/parse length and the difference between the two.
> When writing this I was worried about simple regex's winning over advanced NLU, for example if an NLU engine is trained with sentences like "will it rain in {place} at {time}" and there is a regex that matches "will it rain" then I want the NLU to win.
And what about make win any regex with anchors (`^$`)? So if instead of "will it rain" the skill matches "^will it rain$", then you want this specific sentence and it must win NLU engines.
I have another idea. Add an optional parameter to the regex matcher, so the programmer can change the priority to be above or below the NLU skills. Then also create a configuration option that sets the default regex priority, so the operator can decide if in general if they want regex to have priority, or not.
I like @go8ose idea because you give the power to the user to choose NLU or Regex priority.
One thing that I would like to mention is that in some cases the NLU will give a 100% score when detecting the intent of a sentence, so we shouldn't use the score to specify which parser should have priority. Perhaps using a sort of flag that points either to regex or the NLU?
@go8ose I like this idea of changing the weighting of regex in the configuration.
@FabioRosado I'm not sure I understand what you mean. If something has a 100% score then I think that should always win. Or do you mean if two parsers both return 100%?
Sorry I was still half asleep when I wrote that haha
I mean we should use something other than the score to decide which parser has priority because in some cases the NLU will return a 100% score on the sentence and that would make the regex be skipped again
But if the NLU returns 100% then surely it is 100% confident of the match? The whole point of the scores is to tell opsdroid which one to run.
Ideally I want to treat regex like any other parser. It matches a string, it calculates a score to say how confident it is on the match and then opsdroid uses the scores to decide which parser to use.
@jacobtomlinson I didn't think regex's did partial matches. They either match, or they don't. I mean, in a regex with optional elements you might have some groups that have nothing in them, but the regex would still have matched or not.
If an NLU is 100% sure it's a match for the intent, but a regex is also 100% sure, how do you break the tie? If I had an existing opsdroid bot, and I add as skill to it which has a regex, and then it doesn't fire, I'm going to be confused. But if I have a bot with multiple skills using NLU, the chance of a tie seems much lower, and you'd probably understand that you might need to "train" your NLU more. if that is what you do with NLUs (1).
As an example, consider where an operator wants to make available a single command line program to the bot. They might have something like:
```python
@match_regex(r'ping ([0-9]{1,3}\.){3}[0-9]{1,3}$')
async def ping(opsdroid, config, message):
mac = message.regex.group(1)
# Fire off the command "ping -c 4 {mac}"
# TODO: work out how to do this in async world, i.e. https://docs.python.org/3/library/asyncio-subprocess.html
# Send the output
await message.respond(text)
```
In my mind, this is a perfectly normal thing to expect to be able to do in the chatops world. If an NLU thinks that "ping 127.0.0.1" should mean to reply with "there is no place like home", then I'm going to be confused (2). Maybe my focus is on the chatops world, but Jacob is thinking of other areas where you don't want your bot doing low level systems tasks.
So my proposal is
a) NLU return confidence scores of 0 to 100
b) regex matchers have an optional parameter so the programmer can express their confidence, in the range of 0 to 150
c) in configuration you have an option to set the default regex confidence if none is expressed, in the range of 0 to 150. The default for this configuration is 50 (3)
(1) It should be pretty clear by now I have almost no experience with NLU systems.
(2) See (1), I don't know why it would do that.
(3) I don't know what the numbers should be. However with the numbers I've given I'm trying to demonstrate that I think 50 meets with what Jacob is thinking. In any bot I deploy I'd probably set it to 150.
@go8ose Thanks for that, really useful stuff!
To break ties the parsers have an order of priority in the code (due to the nature of the code, not intentionally), I guess what @FabioRosado was suggesting is the ability to reorder them in the config. Regex is currently first if that makes a difference ๐.
In my head a partial regex match in your example above would be `please could you ping 8.8.8.8`, which I feel should get a lower score than `ping 8.8.8.8`. A failed example would be `please ping the ip 8.8.8.8`, as it wouldn't match.
If I trained an NLU model with examples like `please ping {ip}`, `ping {ip}`, `ping the ip {ip}`, etc then I would expect it to be more flexible than the regex and therefore be able to score higher.
I see your point that an NLU parser with some kind of smalltalk skill enabled could end up hijacking the conversation a lot of the time, and so the regex skills should get high scores.
My initial reaction is to give regex skills a scored based on how well it matched (using length, empty capture groups, etc) but distribute that score normally. The bounds of that distribution could be configurable (scores are currently probabilities and so can't go over 1). To ensure regex always wins you could configure the mean to 1 and the variance to 0. Or for situations where you want NLU to have a chance you could set the mean to 0.75 and the variance to 0.25, resulting in scores between 0.5 and 1 for regex.
I'm also totally open to the fact that I'm over thinking this. I'm more interested in the views and use cases of the community, which so far are compelling me to just default the score of a regex to 1 and allow configuration to another value if you want.
Also this might be useful
```python
from asyncio.subprocess import PIPE
import asyncio
@match_regex(r"ping (?P<ip>([0-9]{1,3}\.){3}[0-9]{1,3})$")
async def ping(opsdroid, config, message):
ip = message.regex.group('ip')
proc = await asyncio.create_subprocess_shell("ping -c 4 {ip}".format(ip=ip), stdin=None, stderr=None, stdout=PIPE)
output = await proc.stdout.read()
await message.respond(output.decode('utf-8'))
```
@jacobtomlinson Yeah that was what I was trying to say, but you had it covered so its all good haha
This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions.
| 2018-10-05T15:18:03 |
opsdroid/opsdroid | 693 | opsdroid__opsdroid-693 | [
"532"
] | 88405bae8de93050cbdaa541eb390f466700bf99 | diff --git a/opsdroid/memory.py b/opsdroid/memory.py
--- a/opsdroid/memory.py
+++ b/opsdroid/memory.py
@@ -7,15 +7,33 @@
class Memory:
- """An object to store and persist data outside of opsdroid."""
+ """A Memory object.
+
+ An object to obtain, store and persist data outside of opsdroid.
+
+ Attributes:
+ databases (:obj:`list` of :obj:`Database`): List of database objects.
+ memory (:obj:`dict`): In-memory dictionary to store data.
+
+ """
def __init__(self):
- """Create memory dictionary."""
+ """Create object with minimum properties."""
self.memory = {}
self.databases = []
async def get(self, key):
- """Get data object for a given key."""
+ """Get data object for a given key.
+
+ Gets the key value found in-memory or from the database(s).
+
+ Args:
+ key (str): Key to retrieve data.
+
+ Returns:
+ A data object for the given key, otherwise `None`.
+
+ """
_LOGGER.debug(_("Getting %s from memory."), key)
database_result = await self._get_from_database(key)
if database_result is not None:
@@ -26,24 +44,53 @@ async def get(self, key):
return None
async def put(self, key, data):
- """Put a data object to a given key."""
+ """Put a data object to a given key.
+
+ Stores the key and value in memory and the database(s).
+
+ Args:
+ key (str): Key for the data to store.
+ data (obj): Data object to store.
+
+ """
_LOGGER.debug(_("Putting %s to memory"), key)
self.memory[key] = data
await self._put_to_database(key, self.memory[key])
async def _get_from_database(self, key):
- """Get updates from databases for a given key."""
+ """Get updates from databases for a given key.
+
+ Gets the first key value found from the database(s).
+
+ Args:
+ key (str): Key to retrieve data from a database.
+
+ Returns:
+ The first key value (data object) found from the database(s).
+ Or `None` when no database is defined or no value is found.
+
+ Todo:
+ * Handle multiple databases
+
+ """
if not self.databases:
return None
results = []
for database in self.databases:
results.append(await database.get(key))
- # TODO: Handle multiple databases
return results[0]
async def _put_to_database(self, key, data):
- """Put updates into databases for a given key."""
+ """Put updates into databases for a given key.
+
+ Stores the key and value on each database defined.
+
+ Args:
+ key (str): Key for the data to store.
+ data (obj): Data object to store.
+
+ """
if self.databases:
for database in self.databases:
await database.put(key, data)
| Add Google Style Docstrings
We should implement Google Style Docstrings to every function, method, class in opsdroid. This style will support existing documentation and will help in the future by generating documentation automatically.
This consists in a bit of effort so this issue can be worked by more than one contributor, just make sure that everyone knows what you are working on in order to avoid other contributors spending time on something that you are working on.
If you are unfamiliar with the Google Style Docstrings I'd recommend that you check these resources:
- [Sphix 1.8.0+ - Google Style Docstrings](https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html)
Docstrings that need to be updated:
- main.py
- [x] configure_lang
- [ ] configure_log
- [ ] get_logging_level
- [ ] check_dependencies
- [ ] print_version
- [ ] print_example_config
- [ ] edit_files
- [x] welcome_message
- ~~helper.py~~
- [x] get_opsdroid
- [x] del_rw
- [x] move_config_to_appdir
- memory.py
- [x] Memory
- [x] get
- [x] put
- [x] _get_from_database
- [x] _put_to_database
- message.py
- [x] Message
- [x] __init__
- [x] _thinking_delay
- [x] _typing delay
- [x] respond
- [x] react
- web.py
- [ ] Web
- [x] get_port
- [x] get_host
- [x] get_ssl_context
- [ ] start
- [ ] build_response
- [ ] web_index_handler
- [ ] web_stats_handler
- matchers.py
- [ ] match_regex
- [ ] match_apiai_action
- [ ] match_apiai_intent
- [ ] match_dialogflow_action
- [ ] match_dialogflow_intent
- [ ] match_luisai_intent
- [ ] match_rasanlu
- [ ] match_recastai
- [ ] match_witai
- [ ] match_crontab
- [ ] match_webhook
- [ ] match_always
- core.py
- [ ] OpsDroid
- [ ] default_connector
- [ ] exit
- [ ] critical
- [ ] call_stop
- [ ] disconnect
- [ ] stop
- [ ] load
- [ ] start_loop
- [x] setup_skills
- [ ] train_parsers
- [ ] start_connector_tasks
- [ ] start_database
- [ ] run_skill
- [ ] get_ranked_skills
- [ ] parse
- loader.py
- [ ] Loader
- [x] import_module_from_spec
- [x] import_module
- [x] check_cache
- [x] build_module_import_path
- [x] build_module_install_path
- [x] git_clone
- [x] git_pull
- [x] pip_install_deps
- [x] create_default_config
- [x] load_config_file
- [ ] envvar_constructor
- [ ] include_constructor
- [x] setup_modules_directory
- [x] load_modules_from_config
- [x] _load_modules
- [x] _install_module
- [x] _update_module
- [ ] _install_git_module
- [x] _install_local_module
---- ORIGINAL POST ----
I've been wondering about this for a while now and I would like to know if we should replace/update all the docstrings in opsdroid with the Google Style doc strings.
I think this could help new and old contributors to contribute and commit to opsdroid since the Google Style docstrings give more information about every method/function and specifies clearly what sort of input the function/method expects, what will it return and what will be raised (if applicable).
The downsize of this style is that the length of every .py file will increase due to the doc strings, but since most IDE's allow you to hide those fields it shouldn't be too bad.
Here is a good example of Google Style Doc strings: [Sphix 1.8.0+ - Google Style Docstrings](http://www.sphinx-doc.org/en/master/ext/example_google.html)
I would like to know what you all think about this idea and if its worth spending time on it.
| Yes we should definitely do this!
It can also be really useful for automatically generating reference documentation.
Awesome I'll wait a few days to see if anyone is opposed to this idea or if they would like to give some advice/comment on the issue.
If in a few days no one says anything I'll edit this issue just to explain more in depth what we expect of the comments and how to do it - I'd recommend dividing opsdroid per each .py file so different people can contribute to the issue ๐
I like the idea. Is it possible to add a test that inspects the doc strings, and fails if they don't match the format? If so, would Jacob be happy with test coverage "reducing" in the short term as this test was added, but before all the doc strings complied?
On 29 April 2018 2:42:05 am AEST, "Fรกbio Rosado" <[email protected]> wrote:
>Awesome I'll wait a few days to see if anyone is opposed to this idea
>or if they would like to give some advice/comment on the issue.
>
>If in a few days no one says anything I'll edit this issue just to
>explain more in depth what we expect of the comments and how to do it -
>I'd recommend dividing opsdroid per each .py file so different people
>can contribute to the issue ๐
>
>--
>You are receiving this because you are subscribed to this thread.
>Reply to this email directly or view it on GitHub:
>https://github.com/opsdroid/opsdroid/issues/532#issuecomment-385189370
--
Sent from my Android device with K-9 Mail. Please excuse my brevity.
Yes I'm happy with that approach.
I've been thinking about it and I think there are two reasons why anyone would want to do this. The first is for autogenerating documentation, the second is making it easier for people to contribute.
As you said you are intending this to help people contribute, and I definitely agree. I just want to be clear on why we are doing this beyond it just being a thing that some projects do.
We currently run `pydocstyle` as part of the lint suite. I wonder if there is a way of telling it to enforce this docstring style?
I'm not sure if it's possible to enforce google doc style in the lint, but I know that you can run tests on the docstrings like @go8ose suggested, Sphinx has a command for this (it uses the doctest module), but this tests might provide some issues and headaches.
The doctests will use the string representation present in the docstring to run the tests, if the result is not consistent like... a function that deals with dates for example and uses date.now() this test will always fail.
Another example would be running doctests with dictionaries, these tests will mostly fail due to the unsorted nature of dicts, the only way to make them pass would be to sort the dict all the time.
One way to work around it would be to just test some docstrings and not others. In Sphinx you can just add the command:
```
..doctest::
>>> foo()
bar
```
Finally, I believe that all the tests that we have at the moment do a very good job at testing every single piece of code in opsdroid so perhaps adding the doctests would be extra effort for no real gain - these will test what it's being tested already.
--EDIT--
I've updated my first post with all the functions,classes and methods that need to be updated, let me know if you need some added in or removed ๐
Hi Fabio,
I'm not suggesting we add more tests in the form of doctests. That would indeed be a waste of effort. I'm suggesting we check conformance with the google style doc strings.
Jacob suggested seeing if this can be checked in the linting run. That is a good idea, linting is what I should have suggested initially.
Cheers,
Geoff
On 30 April 2018 5:51:38 pm AEST, "Fรกbio Rosado" <[email protected]> wrote:
>I'm not sure if it's possible to enforce google doc style in the lint,
>but I know that you can run tests on the docstrings like @go8ose
>suggested, Sphinx has a command for this (it uses the doctest module),
>but this tests might provide some issues and headaches.
>
>The doctests will use the string representation present in the
>docstring to run the tests, if the result is not consistent like... a
>function that deals with dates for example and uses date.now() this
>test will always fail.
>Another example would be running doctests with dictionaries, these
>tests will mostly fail due to the unsorted nature of dicts, the only
>way to make them pass would be to sort the dict all the time.
>
>One way to work around it would be to just test some docstrings and not
>others. In Sphinx you can just add the command:
>
>``` ..doctest::
>>>> foo()
>bar
>```
>
>Finally, I believe that all the tests that we have at the moment do a
>very good job at testing every single piece of code in opsdroid so
>perhaps adding the doctests would be extra effort for no real gain -
>these will test what it's being tested already.
>
>--
>You are receiving this because you were mentioned.
>Reply to this email directly or view it on GitHub:
>https://github.com/opsdroid/opsdroid/issues/532#issuecomment-385332374
--
Sent from my Android device with K-9 Mail. Please excuse my brevity.
This tool looks like it tests the docstrings against the google convention. We should explore it more.
https://github.com/terrencepreilly/darglint
Hi Fabio,
I like your idea, i new here on this project i can try to do something and make a pull request.
For my part i'm going to begin with **helper.py** it's Thk ok for you ?
Thk best regards
Heya @sims34 yeah that would be much appreciated, let me know in gitter if you need any help with this ๐
Hi Fabio,
I am new here on this project. I was hoping I could help out with main.py
Regards
Hey @purvaudai, please go ahead!
Hello @purvaudai did you manage to work on `main.py`? If you are stuck with something let us know, we would be happy to help you getting started ๐
Hi Guys is anyone working on this issue
@mraza007 not currently. Please go ahead.
Sure I will start working on this issue. Is there a way that you can assign this issue to me and on what files do I have to add google style doc string functions
Sorry for the lack of replies from my side. I tried solving this issue but
got confused, so I decided to look through the docs again, and I got
carried away learning. I am sorry for the lack of professionalism from my
side.
On Mon, 25 Jun 2018 at 19:33, Muhammad <[email protected]> wrote:
> Sure I will start working on this issue. Is there a way that you can
> assign this issue to me and on what files do I have to add google style doc
> string functions
>
> โ
> You are receiving this because you were mentioned.
> Reply to this email directly, view it on GitHub
> <https://github.com/opsdroid/opsdroid/issues/532#issuecomment-399963131>,
> or mute the thread
> <https://github.com/notifications/unsubscribe-auth/AeXNt1F1hUU9JhsW2bl75KQG7SRQEceRks5uAO2_gaJpZM4TqkMs>
> .
>
--
Purva Udai Singh
@purvaudai Hey are working on this issue do you want to continue
Hi guys I'd love to contribute too.
@mraza007 @purvaudai I know you guys are working on this, but if you need my help with any of the file, I'll be more than happy to contribute.
@mraza007 @purvaudai @NikhilRaverkar Thanks for all wanting to contribute! There is a lot to be done on this issue so I strongly recommend picking a file and starting work on it.
Don't worry too much about duplicates, it's unlikely to happen given the number of methods that need updating. I would also be happy for you to submit lots of small PRs. Just pick a few methods, update the docstrings and raise a PR.
Yup I think that would be a great idea
@purvaudai Don't worry my friend sometimes these things happen, if you want to contribute to opsdroid in the future we will be glad to help you ๐
@mraza007 @NikhilRaverkar If you guys need any help, let us know. You can work on different files if you want and if something is not clear feel free to hit us up either in here or our [gitter channel](https://gitter.im/opsdroid/developers)
Sure I just joined the channel I will start working on this over the weekend
Iโll start working on this over the weekend.
Thanks a lot,
Nikhil
> On Jun 28, 2018, at 10:44 AM, Muhammad <[email protected]> wrote:
>
> Sure I just joined the channel I will start working on this over the week
>
> โ
> You are receiving this because you were mentioned.
> Reply to this email directly, view it on GitHub, or mute the thread.
I would like to contribute to this. For starters I was thinking about taking the web.py file. :smile:
Hello, please go ahead and let us know if you need any help
@FabioRosado Can I grab message.py? This would be my first issue!
@archime please go ahead! Let me know if you need any help with this issue. Also, welcome to the project ๐
Hey, I'd like to add couple of docstrings, however I've got a question first.
The Google style guide in the description seems to be deprecated.
Should I reference this one https://github.com/google/styleguide/blob/gh-pages/pyguide.md instead?
Hello @kritokrator Thanks for showing interest in this matter.
The sphinx documentation has an up to date style. We are not using typehints in our codebase yet so we will be specifying the type as:
```
Args:
arg1 (String): This arg does things
arg2 (Boolean, optional): This arg can be true, its optional
```
This is a very brief explanation but should get you started, I also recommend you check the [Facebook Connector](https://github.com/opsdroid/opsdroid/blob/master/opsdroid/connector/facebook/__init__.py) as an example of how we would like the doc strings ๐ | 2018-10-05T16:42:15 |
|
opsdroid/opsdroid | 700 | opsdroid__opsdroid-700 | [
"341"
] | 864a103a7b562e229e1626f77781f7c202b006d6 | diff --git a/opsdroid/core.py b/opsdroid/core.py
--- a/opsdroid/core.py
+++ b/opsdroid/core.py
@@ -147,10 +147,18 @@ def start_loop(self):
self.eventloop.close()
def setup_skills(self, skills):
- """Call the setup function on the passed in skills."""
+ """Call the setup function on the loaded skills.
+
+ Iterates through all the skills which have been loaded and runs
+ any setup functions which have been defined in the skill.
+
+ Args:
+ skills (list): A list of all the loaded skills.
+
+ """
with contextlib.suppress(AttributeError):
for skill in skills:
- skill["module"].setup(self)
+ skill["module"].setup(self, self.config)
def train_parsers(self, skills):
"""Train the parsers."""
| diff --git a/tests/test_core.py b/tests/test_core.py
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -6,6 +6,7 @@
import asynctest.mock as amock
import importlib
+from opsdroid.__main__ import configure_lang
from opsdroid.core import OpsDroid
from opsdroid.message import Message
from opsdroid.connector import Connector
@@ -19,6 +20,7 @@ class TestCore(unittest.TestCase):
def setUp(self):
self.previous_loop = asyncio.get_event_loop()
+ configure_lang({})
def tearDown(self):
self.previous_loop.close()
@@ -148,6 +150,9 @@ def test_setup_modules(self):
example_modules.append({"module": {"name": "test"}})
opsdroid.setup_skills(example_modules)
self.assertEqual(len(example_modules[0]["module"].mock_calls), 1)
+ self.assertEqual(example_modules[0]['module'].method_calls[0][0], 'setup')
+ self.assertEqual(len(example_modules[0]['module'].method_calls[0][1]), 2)
+ self.assertEqual(example_modules[0]['module'].method_calls[0][1][1], {})
def test_default_connector(self):
with OpsDroid() as opsdroid:
| Pass config to skill setup method
# Description
When running the `setup` method of a skill only a pointer to opsdroid is passed, the config for that skill should be passed too. See the [call here](https://github.com/opsdroid/opsdroid/blob/master/opsdroid/core.py#L153).
| Hi, I'm a first-time contributor, and would like to work on this. Would that be alright?
Hello @RonitRay that would be amazing if you could work on this issue. Please let us know if you need any help ๐
Great!
So, from what I understand, the call is
`skill["module"].setup(self)`
and you require it to be
`skill["module"].setup(self, self.config)`
is that right or am I approaching this the wrong way?
Yep that looks right to me. If you raise a Pull Request we can always review it there. | 2018-10-22T14:58:45 |
opsdroid/opsdroid | 729 | opsdroid__opsdroid-729 | [
"723"
] | ea1ed8a637f6bcbed90665699e3da76d1e43ee98 | diff --git a/opsdroid/core.py b/opsdroid/core.py
--- a/opsdroid/core.py
+++ b/opsdroid/core.py
@@ -185,6 +185,13 @@ async def unload(self, future=None):
self.cron_task = None
_LOGGER.info(_("Stopped cron"))
+ _LOGGER.info(_("Stopping pending tasks..."))
+ tasks = asyncio.Task.all_tasks()
+ for task in list(tasks):
+ if not task.done() and task is not asyncio.Task.current_task():
+ task.cancel()
+ _LOGGER.info(_("Stopped pending tasks"))
+
async def reload(self):
"""Reload opsdroid."""
await self.unload()
| diff --git a/tests/test_core.py b/tests/test_core.py
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -247,8 +247,14 @@ async def test_unload(self):
opsdroid.cron_task.cancel = amock.CoroutineMock()
mock_cron_task = opsdroid.cron_task
+ async def task():
+ await asyncio.sleep(.5)
+
+ t = asyncio.Task(task(), loop=self.loop)
+
await opsdroid.unload()
+ self.assertTrue(t.cancel())
self.assertTrue(mock_connector.disconnect.called)
self.assertTrue(mock_database.disconnect.called)
self.assertTrue(mock_web_server.stop.called)
| Application hangs on kill if connector doesn't disconnect
If a connector doesn't honour the disconnect method opsdroid can hang on shutdown. We should force kill connectors after 30 seconds after disconnect is called.
| 2018-11-07T16:50:16 |
|
opsdroid/opsdroid | 731 | opsdroid__opsdroid-731 | [
"662"
] | c19a4c9f31052b53c2e04e1637a7580fd4e916ef | diff --git a/opsdroid/connector/matrix/__init__.py b/opsdroid/connector/matrix/__init__.py
new file mode 100644
--- /dev/null
+++ b/opsdroid/connector/matrix/__init__.py
@@ -0,0 +1,3 @@
+"""Imports the matrix connector."""
+
+from .connector import ConnectorMatrix # noqa: F401
diff --git a/opsdroid/connector/matrix/connector.py b/opsdroid/connector/matrix/connector.py
new file mode 100644
--- /dev/null
+++ b/opsdroid/connector/matrix/connector.py
@@ -0,0 +1,233 @@
+"""Connector for Matrix (https://matrix.org)."""
+
+import re
+import logging
+
+import aiohttp
+
+from matrix_api_async.api_asyncio import AsyncHTTPAPI
+from matrix_client.errors import MatrixRequestError
+
+from opsdroid.connector import Connector
+from opsdroid.message import Message
+
+from .html_cleaner import clean
+
+
+_LOGGER = logging.getLogger(__name__)
+
+__all__ = ['ConnectorMatrix']
+
+
+class ConnectorMatrix(Connector):
+ """Connector for Matrix (https://matrix.org)."""
+
+ def __init__(self, config, opsdroid=None): # noqa: D107
+ """Init the config for the connector."""
+ super().__init__(config, opsdroid=opsdroid)
+
+ self.name = "ConnectorMatrix" # The name of your connector
+ self.config = config # The config dictionary to be accessed later
+ self.rooms = config.get('rooms', None)
+ if not self.rooms:
+ self.rooms = {'main': config['room']}
+ self.room_ids = {}
+ self.default_room = self.rooms['main']
+ self.mxid = config['mxid']
+ self.nick = config.get('nick', None)
+ self.homeserver = config.get('homeserver', "https://matrix.org")
+ self.password = config['password']
+ self.room_specific_nicks = config.get("room_specific_nicks", False)
+ self.session = None
+ self.filter_id = None
+ self.connection = None
+
+ @property
+ def filter_json(self):
+ """Define JSON filter to apply to incoming events."""
+ return {
+ "event_format": "client",
+ "account_data": {
+ "limit": 0,
+ "types": []
+ },
+ "presence": {
+ "limit": 0,
+ "types": []
+ },
+ "room": {
+ "rooms": [],
+ "account_data": {
+ "types": []
+ },
+ "timeline": {
+ "types": ["m.room.message"]
+ },
+ "ephemeral": {
+ "types": []
+ },
+ "state": {
+ "types": []
+ }
+ }
+ }
+
+ async def make_filter(self, api, room_ids):
+ """Make a filter on the server for future syncs."""
+ fjson = self.filter_json
+ for room_id in room_ids:
+ fjson['room']['rooms'].append(room_id)
+
+ resp = await api.create_filter(
+ user_id=self.mxid, filter_params=fjson)
+
+ return resp['filter_id']
+
+ async def connect(self):
+ """Create connection object with chat library."""
+ session = aiohttp.ClientSession()
+ mapi = AsyncHTTPAPI(self.homeserver, session)
+
+ self.session = session
+ login_response = await mapi.login(
+ "m.login.password", user=self.mxid, password=self.password)
+ mapi.token = login_response['access_token']
+ mapi.sync_token = None
+
+ for roomname, room in self.rooms.items():
+ response = await mapi.join_room(room)
+ self.room_ids[roomname] = response['room_id']
+ self.connection = mapi
+
+ # Create a filter now, saves time on each later sync
+ self.filter_id = await self.make_filter(mapi, self.room_ids.values())
+
+ # Do initial sync so we don't get old messages later.
+ response = await self.connection.sync(
+ timeout_ms=3000,
+ filter='{ "room": { "timeline" : { "limit" : 1 } } }',
+ set_presence="online")
+ self.connection.sync_token = response["next_batch"]
+
+ if self.nick:
+ display_name = await self.connection.get_display_name(self.mxid)
+ if display_name != self.nick:
+ await self.connection.set_display_name(self.mxid, self.nick)
+
+ async def _parse_sync_response(self, response):
+ self.connection.sync_token = response["next_batch"]
+ for roomid in self.room_ids.values():
+ room = response['rooms']['join'].get(roomid, None)
+ if room and 'timeline' in room:
+ for event in room['timeline']['events']:
+ if event['content']['msgtype'] == 'm.text':
+ if event['sender'] != self.mxid:
+ return Message(event['content']['body'],
+ await self._get_nick(
+ roomid,
+ event['sender']),
+ roomid, self,
+ raw_message=event)
+
+ async def listen(self): # pragma: no cover
+ """Listen for new messages from the chat service."""
+ while True: # pylint: disable=R1702
+ try:
+ response = await self.connection.sync(
+ self.connection.sync_token,
+ timeout_ms=int(6 * 60 * 60 * 1e3), # 6h in ms
+ filter=self.filter_id)
+ _LOGGER.debug("matrix sync request returned")
+ message = await self._parse_sync_response(response)
+ await self.opsdroid.parse(message)
+
+ except Exception: # pylint: disable=W0703
+ _LOGGER.exception('Matrix Sync Error')
+
+ async def _get_nick(self, roomid, mxid):
+ """
+ Get nickname from user ID.
+
+ Get the nickname of a sender depending on the room specific config
+ setting.
+ """
+ if self.room_specific_nicks:
+ try:
+ return await self.connection.get_room_displayname(roomid, mxid)
+ except Exception: # pylint: disable=W0703
+ # Fallback to the non-room specific one
+ logging.exception(
+ "Failed to lookup room specific nick for %s", mxid)
+
+ try:
+ return await self.connection.get_display_name(mxid)
+ except MatrixRequestError as mre:
+ # Log the error if it's not the 404 from the user not having a nick
+ if mre.code != 404:
+ logging.exception("Failed to lookup nick for %s", mxid)
+ return mxid
+
+ @staticmethod
+ def _get_formatted_message_body(message, body=None, msgtype="m.text"):
+ """
+ Get HTML from a message.
+
+ Return the json representation of the message in
+ "org.matrix.custom.html" format.
+ """
+ # Markdown leaves a <p></p> around standard messages that we want to
+ # strip:
+ if message.startswith('<p>'):
+ message = message[3:]
+ if message.endswith('</p>'):
+ message = message[:-4]
+
+ clean_html = clean(message)
+
+ return {
+ # Strip out any tags from the markdown to make the body
+ "body": body if body else re.sub('<[^<]+?>', '', clean_html),
+ "msgtype": msgtype,
+ "format": "org.matrix.custom.html",
+ "formatted_body": clean_html
+ }
+
+ async def respond(self, message, room=None):
+ """Send `message.text` back to the chat service."""
+ if not room:
+ # Connector responds in the same room it received the original
+ # message
+ room_id = message.room
+ else:
+ room_id = self.rooms[room]
+
+ # Ensure we have a room id not alias
+ if not room_id.startswith('!'):
+ room_id = await self.connection.get_room_id(room_id)
+ else:
+ room_id = room_id
+
+ try:
+ await self.connection.send_message_event(
+ room_id,
+ "m.room.message",
+ self._get_formatted_message_body(message.text))
+ except aiohttp.client_exceptions.ServerDisconnectedError:
+ _LOGGER.debug("Server had disconnected, retrying send.")
+ await self.connection.send_message_event(
+ room_id,
+ "m.room.message",
+ self._get_formatted_message_body(message.text))
+
+ async def disconnect(self):
+ """Close the matrix session."""
+ await self.session.close()
+
+ def get_roomname(self, room):
+ """Get the name of a room from alias or room ID."""
+ if room.startswith(('#', '!')):
+ for connroom in self.rooms:
+ if room in (connroom, self.room_ids[connroom]):
+ return connroom
+
+ return room
diff --git a/opsdroid/connector/matrix/html_cleaner.py b/opsdroid/connector/matrix/html_cleaner.py
new file mode 100644
--- /dev/null
+++ b/opsdroid/connector/matrix/html_cleaner.py
@@ -0,0 +1,76 @@
+"""Helpers for sanitising HTML input to the bot."""
+
+import bleach
+
+__all__ = ['clean']
+
+
+"""
+Take the list of allowed tags and attributes from Riot for consistency:
+https://github.com/matrix-org/matrix-react-sdk/blob/master/src/HtmlUtils.js#L180-L195
+"""
+
+ALLOWED_TAGS = [
+ 'font', # custom to matrix for IRC-style font coloring
+ 'del', # for markdown
+ 'h1',
+ 'h2',
+ 'h3',
+ 'h4',
+ 'h5',
+ 'h6',
+ 'blockquote',
+ 'p',
+ 'a',
+ 'ul',
+ 'ol',
+ 'sup',
+ 'sub',
+ 'nl',
+ 'li',
+ 'b',
+ 'i',
+ 'u',
+ 'strong',
+ 'em',
+ 'strike',
+ 'code',
+ 'hr',
+ 'br',
+ 'div',
+ 'table',
+ 'thead',
+ 'caption',
+ 'tbody',
+ 'tr',
+ 'th',
+ 'td',
+ 'pre',
+ 'span',
+ 'img',
+]
+
+ALLOWED_ATTRIBUTES = {
+ 'font': ['color', 'data-mx-bg-color', 'data-mx-color', 'style'],
+ 'span': ['data-mx-bg-color', 'data-mx-color', 'style'],
+ 'a': ['href', 'name', 'target', 'rel'],
+ 'img': ['src', 'width', 'height', 'alt', 'title'],
+ 'ol': ['start'],
+}
+
+
+def clean(html, **kwargs):
+ """
+ Sanitise HTML fragments.
+
+ A version of `bleach.clean` but with Riot's allowed tags and ``strip=True``
+ by default.
+ """
+ defaults = {
+ 'strip': True,
+ 'tags': ALLOWED_TAGS,
+ 'attributes': ALLOWED_ATTRIBUTES
+ }
+ defaults.update(kwargs)
+
+ return bleach.clean(html, **defaults)
| diff --git a/tests/test_connector_matrix.py b/tests/test_connector_matrix.py
new file mode 100644
--- /dev/null
+++ b/tests/test_connector_matrix.py
@@ -0,0 +1,290 @@
+"""Tests for the ConnectorMatrix class."""
+import asyncio
+from unittest import mock
+
+import aiohttp
+import asynctest
+import asynctest.mock as amock
+from matrix_api_async import AsyncHTTPAPI
+from matrix_client.errors import MatrixRequestError
+
+from opsdroid.core import OpsDroid
+from opsdroid.connector.matrix import ConnectorMatrix
+from opsdroid.__main__ import configure_lang # noqa
+
+api_string = 'matrix_api_async.AsyncHTTPAPI.{}'
+
+
+def setup_connector():
+ """Initiate a basic connector setup for testing on"""
+ connector = ConnectorMatrix(
+ {"room": "#test:localhost",
+ "mxid": "@opsdroid:localhost",
+ "password": "hello",
+ "homeserver": "http://localhost:8008"}
+ )
+ return connector
+
+
+class TestConnectorMatrixAsync(asynctest.TestCase):
+ """Test the async methods of the opsdroid Matrix connector class."""
+ @property
+ def sync_return(self):
+ """Define some mock json to return from the sync method"""
+ return {
+ "account_data": {
+ "events": []
+ },
+ "device_lists": {
+ "changed": [],
+ "left": []
+ },
+ "device_one_time_keys_count": {
+ "signed_curve25519": 50
+ },
+ "groups": {
+ "invite": {},
+ "join": {},
+ "leave": {}
+ },
+ "next_batch": "s801873745",
+ "presence": {
+ "events": []
+ },
+ "rooms": {
+ "invite": {},
+ "join": {
+ "!aroomid:localhost": {
+ "account_data": {
+ "events": []
+ },
+ "ephemeral": {
+ "events": []
+ },
+ "state": {
+ "events": []
+ },
+ "summary": {},
+ "timeline": {
+ "events": [
+ {
+ "content": {
+ "body": "LOUD NOISES",
+ "msgtype": "m.text"
+ },
+ "event_id": "$eventid:localhost",
+ "origin_server_ts": 1547124373956,
+ "sender": "@cadair:cadair.com",
+ "type": "m.room.message",
+ "unsigned": {
+ "age": 3498
+ }
+ }
+ ],
+ "limited": False,
+ "prev_batch": "s801873709"
+ },
+ "unread_notifications": {
+ "highlight_count": 0,
+ "notification_count": 0
+ }
+ }
+ },
+ "leave": {}
+ },
+ "to_device": {
+ "events": []
+ }
+ }
+
+ def setUp(self):
+ """Basic setting up for tests"""
+ self.connector = setup_connector()
+ self.api = AsyncHTTPAPI('https://notaurl.com', None)
+ self.connector.connection = self.api
+
+ async def test_make_filter(self):
+ with amock.patch(api_string.format('create_filter')) as patched_filter:
+ patched_filter.return_value = asyncio.Future()
+ patched_filter.return_value.set_result({'filter_id': 'arbitrary string'})
+ test_rooms = ['!notaroom:matrix.org', '!notanotherroom:matrix.org']
+ filter_id = await self.connector.make_filter(self.api, test_rooms)
+ assert filter_id == 'arbitrary string'
+
+ assert patched_filter.called
+ assert patched_filter.call_args[1]['user_id'] == '@opsdroid:localhost'
+ assert patched_filter.call_args[1]['filter_params']['room']['rooms'] == test_rooms
+
+ async def test_connect(self):
+ with amock.patch(api_string.format('login')) as patched_login, \
+ amock.patch(api_string.format('join_room')) as patched_join_room, \
+ amock.patch(api_string.format('create_filter')) as patched_filter, \
+ amock.patch(api_string.format('sync')) as patched_sync, \
+ amock.patch(api_string.format('get_display_name')) as patched_get_nick, \
+ amock.patch(api_string.format('set_display_name')) as patch_set_nick, \
+ amock.patch('aiohttp.ClientSession') as patch_cs, \
+ OpsDroid() as opsdroid:
+
+ # Skip actually creating a client session
+ patch_cs.return_value = amock.MagicMock()
+
+ patched_login.return_value = asyncio.Future()
+ patched_login.return_value.set_result({'access_token': 'arbitrary string1'})
+
+ patched_join_room.return_value = asyncio.Future()
+ patched_join_room.return_value.set_result({'room_id': '!aroomid:localhost'})
+
+ patched_filter.return_value = asyncio.Future()
+ patched_filter.return_value.set_result({'filter_id': 'arbitrary string'})
+
+ patched_sync.return_value = asyncio.Future()
+ patched_sync.return_value.set_result({'next_batch': 'arbitrary string2'})
+
+ await self.connector.connect()
+
+ assert '!aroomid:localhost' in self.connector.room_ids.values()
+
+ assert self.connector.connection.token == 'arbitrary string1'
+
+ assert self.connector.filter_id == 'arbitrary string'
+
+ assert self.connector.connection.sync_token == 'arbitrary string2'
+
+ self.connector.nick = "Rabbit Hole"
+
+ patched_get_nick.return_value = asyncio.Future()
+ patched_get_nick.return_value.set_result("Rabbit Hole")
+
+ await self.connector.connect()
+
+ assert patched_get_nick.called
+ assert not patch_set_nick.called
+
+ patched_get_nick.return_value = asyncio.Future()
+ patched_get_nick.return_value.set_result("Neo")
+
+ self.connector.mxid = "@morpheus:matrix.org"
+
+ await self.connector.connect()
+
+ assert patched_get_nick.called
+ assert patch_set_nick.called_once_with("@morpheus:matrix.org", "Rabbit Hole")
+
+ async def test_parse_sync_response(self):
+ self.connector.room_ids = {'main': '!aroomid:localhost'}
+ self.connector.filter_id = 'arbitrary string'
+
+ with amock.patch(api_string.format('get_display_name')) as patched_name:
+ patched_name.return_value = asyncio.Future()
+ patched_name.return_value.set_result('SomeUsersName')
+
+ returned_message = await self.connector._parse_sync_response(self.sync_return)
+
+ assert returned_message.text == 'LOUD NOISES'
+ assert returned_message.user == 'SomeUsersName'
+ assert returned_message.room == '!aroomid:localhost'
+ assert returned_message.connector == self.connector
+ raw_message = self.sync_return['rooms']['join']['!aroomid:localhost']['timeline']['events'][0]
+ assert returned_message.raw_message == raw_message
+
+ async def test_get_nick(self):
+ self.connector.room_specific_nicks = True
+
+ with amock.patch(api_string.format('get_room_displayname')) as patched_roomname, \
+ amock.patch(api_string.format('get_display_name')) as patched_globname:
+ patched_roomname.return_value = asyncio.Future()
+ patched_roomname.return_value.set_result('')
+
+ mxid = '@notaperson:matrix.org'
+ assert await self.connector._get_nick('#notaroom:localhost', mxid) == ''
+ # Test if a room displayname couldn't be found
+ patched_roomname.side_effect = Exception()
+
+ # Test if that leads to a global displayname being returned
+ patched_globname.return_value = asyncio.Future()
+ patched_globname.return_value.set_result('@notaperson')
+ assert await self.connector._get_nick('#notaroom:localhost', mxid) == '@notaperson'
+
+ # Test that failed nickname lookup returns the mxid
+ patched_globname.side_effect = MatrixRequestError()
+ assert await self.connector._get_nick('#notaroom:localhost', mxid) == mxid
+
+ async def test_get_formatted_message_body(self):
+ original_html = "<p><h3><no>Hello World</no></h3></p>"
+ original_body = "### Hello World"
+ message = self.connector._get_formatted_message_body(original_html)
+ assert message['formatted_body'] == "<h3>Hello World</h3>"
+ assert message['body'] == "Hello World"
+
+ message = self.connector._get_formatted_message_body(original_html,
+ original_body)
+ assert message['formatted_body'] == "<h3>Hello World</h3>"
+ assert message['body'] == "### Hello World"
+
+ async def _get_message(self):
+ self.connector.room_ids = {'main': '!aroomid:localhost'}
+ self.connector.filter_id = 'arbitrary string'
+ m = 'opsdroid.connector.matrix.ConnectorMatrix._get_nick'
+
+ with amock.patch(m) as patched_nick:
+ patched_nick.return_value = asyncio.Future()
+ patched_nick.return_value.set_result("Neo")
+
+ return await self.connector._parse_sync_response(self.sync_return)
+
+ async def test_respond_retry(self):
+ message = await self._get_message()
+ with amock.patch(api_string.format("send_message_event")) as patched_send:
+ patched_send.return_value = asyncio.Future()
+ patched_send.return_value.set_result(None)
+ await self.connector.respond(message)
+
+ message_obj = self.connector._get_formatted_message_body(message.text)
+ assert patched_send.called_once_with(message.room,
+ "m.room.message",
+ message_obj)
+
+ patched_send.side_effect = [aiohttp.client_exceptions.ServerDisconnectedError(),
+ patched_send.return_value]
+
+ await self.connector.respond(message)
+
+ message_obj = self.connector._get_formatted_message_body(message.text)
+ assert patched_send.called_once_with(message.room,
+ "m.room.message",
+ message_obj)
+
+ async def test_respond_room(self):
+ message = await self._get_message()
+ with amock.patch(api_string.format("send_message_event")) as patched_send, \
+ amock.patch(api_string.format("get_room_id")) as patched_room_id:
+
+ patched_send.return_value = asyncio.Future()
+ patched_send.return_value.set_result(None)
+
+ patched_room_id.return_value = asyncio.Future()
+ patched_room_id.return_value.set_result(message.room)
+
+ await self.connector.respond(message, room="main")
+
+ message_obj = self.connector._get_formatted_message_body(message.text)
+ assert patched_send.called_once_with(message.room,
+ "m.room.message",
+ message_obj)
+
+ async def test_disconnect(self):
+ self.connector.session = amock.MagicMock()
+ self.connector.session.close = amock.CoroutineMock()
+ await self.connector.disconnect()
+ assert self.connector.session.close.called
+
+ def test_get_roomname(self):
+ self.connector.rooms = ['#notthisroom:localhost',
+ '#thisroom:localhost']
+ self.connector.room_ids = dict(zip(self.connector.rooms,
+ ['!aroomid:localhost',
+ '!anotherroomid:localhost']))
+
+ assert self.connector.get_roomname('#thisroom:localhost') == '#thisroom:localhost'
+ assert self.connector.get_roomname('!anotherroomid:localhost') == '#thisroom:localhost'
+ assert self.connector.get_roomname('someroom') == 'someroom'
| Move Matrix connector into core
This issue covers adding the [Matrix connector](https://github.com/opsdroid/connector-matrix) to core.
## Background
A while ago we began moving connectors from external plugins into the core of the project (see #185 for more context). We started with [slack](https://github.com/opsdroid/opsdroid/blob/master/opsdroid/connector/slack/__init__.py) and [websockets](https://github.com/opsdroid/opsdroid/blob/master/opsdroid/connector/websocket/__init__.py) but need to go through all the other existing plugins and move them into the core.
## Steps
- Make a new submodule directory in [`opsdroid.connector`](https://github.com/opsdroid/opsdroid/tree/master/opsdroid/connector) and copy the connector code over.
- Update the [`requirements.txt`](https://github.com/opsdroid/opsdroid/blob/master/requirements.txt) with any dependencies from the connector if necessary.
- Write tests for the connector. (See the [Slack connector tests](https://github.com/jacobtomlinson/opsdroid/blob/master/tests/test_connector_slack.py) for inspiration).
- Copy the relevant information from the connector `README.md` into a [new documentation page](https://github.com/opsdroid/opsdroid/tree/master/docs/connectors).
- Add the new page to the [mkdocs.yml](https://github.com/opsdroid/opsdroid/blob/master/mkdocs.yml).
- Add to the [list of connectors](https://github.com/opsdroid/opsdroid/blob/master/docs/configuration-reference.md#connector-modules).
- Add a deprecation notice to the old connector. (See [the slack connector](https://github.com/opsdroid/connector-slack))
| I recommend that @SolarDrew or @Cadair take this if you have bandwidth. However if someone else wants to take it then please go ahead!
Can you override the core connector with a plugin connector? i.e. for development or similar?
My instinct is that if you specify a path in the config then that should override the core connector. I need to check that this is actually true though.
You can always configure a connector with a different name (ie `matrixdev`) and set the path on that to be sure. | 2018-11-08T15:54:55 |
opsdroid/opsdroid | 737 | opsdroid__opsdroid-737 | [
"732"
] | 7df9dc03b47bbb5b124ad6b7f74d02671058c60f | diff --git a/opsdroid/database/__init__.py b/opsdroid/database/__init__.py
--- a/opsdroid/database/__init__.py
+++ b/opsdroid/database/__init__.py
@@ -40,7 +40,7 @@ async def connect(self, opsdroid):
"""
raise NotImplementedError
- async def disconnect(self):
+ async def disconnect(self, opsdroid):
"""Disconnect from the database.
This method should disconnect from the given database using a native
| diff --git a/tests/test_database.py b/tests/test_database.py
--- a/tests/test_database.py
+++ b/tests/test_database.py
@@ -26,7 +26,7 @@ async def test_connect(self):
async def test_disconnect(self):
database = Database({})
try:
- await database.disconnect()
+ await database.disconnect(None)
except NotImplementedError:
self.fail("disconnect() raised NotImplementedError unexpectedly!")
| Cannot disconnect from SQLite
<!-- Before you post an issue or if you are unsure about something join our gitter channel https://gitter.im/opsdroid/ and ask away! We are more than happy to help you. -->
# Description
SQLite database connector canโt disconnect because of wrong method signature.
## Steps to Reproduce
Enable the SQLite database module, then try to shut down the bot.
## Expected Functionality
The bot should shut down.
## Experienced Functionality
This error message on the console, and the bot remains running (but with the connectors already disconnected).
```
ERROR opsdroid.core: {'message': 'Task exception was never retrieved', 'exception': TypeError('disconnect() takes 1 positional argument but 2 were given',), 'future': <Task finished coro=<OpsDroid.handle_signal() done, defined at /home/polesz/.local/lib/python3.6/site-packages/opsdroid/core.py:121> exception=TypeError('disconnect() takes 1 positional argument but 2 were given',)>}
```
## Versions
- **Opsdroid version:** 0.13.0
- **Python version:** 3.6.6 (bundled with Fedora 28)
- **OS/Docker version:** Fedora 28, no Docker involved
## Additional information
It seems the method signature of `Database.disconnect()` is wrong (should be `async def disconnect(self, opsdroid)`) or the caller (`OpsDroid.unload()`) should not pass the `opsdroid` instance to `database.disconnect()` (personally iโd vote for the former).
| Thank you for raising this, as soon as #728 gets merged into opsdroid, this issue will be fixed.
Looking deeper into this it seems to affect mongo and redis, too, as neither of them override the default `disconnect()` method. | 2018-11-13T13:23:39 |
opsdroid/opsdroid | 741 | opsdroid__opsdroid-741 | [
"664"
] | 36ef692c2bd8cf2e2385508fe98aa83ace38726c | diff --git a/opsdroid/connector/github/__init__.py b/opsdroid/connector/github/__init__.py
new file mode 100644
--- /dev/null
+++ b/opsdroid/connector/github/__init__.py
@@ -0,0 +1,114 @@
+"""A connector for GitHub."""
+import json
+import logging
+
+import aiohttp
+
+from opsdroid.connector import Connector
+from opsdroid.message import Message
+
+
+_LOGGER = logging.getLogger(__name__)
+GITHUB_API_URL = "https://api.github.com"
+
+
+class ConnectorGitHub(Connector):
+ """A connector for GitHub."""
+
+ def __init__(self, config):
+ """Create the connector."""
+ super().__init__(config)
+ logging.debug("Loaded GitHub connector")
+ self.config = config
+ try:
+ self.github_token = config["token"]
+ except KeyError:
+ _LOGGER.error("Missing auth token!"
+ "You must set 'token' in your config")
+ self.name = self.config.get("name", "github")
+ self.default_room = None
+ self.opsdroid = None
+ self.github_username = None
+
+ async def connect(self, opsdroid):
+ """Connect to GitHub."""
+ self.opsdroid = opsdroid
+ url = '{}/user?access_token={}'.format(
+ GITHUB_API_URL, self.github_token)
+ async with aiohttp.ClientSession() as session:
+ response = await session.get(url)
+ if response.status >= 300:
+ _LOGGER.error("Error connecting to github: %s",
+ response.text())
+ return False
+ _LOGGER.debug("Reading bot information...")
+ bot_data = await response.json()
+ _LOGGER.debug("Done.")
+ self.github_username = bot_data["login"]
+
+ self.opsdroid.web_server.web_app.router.add_post(
+ "/connector/{}".format(self.name),
+ self.github_message_handler)
+
+ async def disconnect(self, opsdroid):
+ """Disconnect from GitHub."""
+ pass
+
+ async def listen(self, opsdroid):
+ """Listen for new message."""
+ pass # Listening is handled by the aiohttp web server
+
+ async def github_message_handler(self, request):
+ """Handle event from GitHub."""
+ req = await request.post()
+ payload = json.loads(req["payload"])
+ try:
+ if payload["action"] == "created" and "comment" in payload:
+ issue_number = payload["issue"]["number"]
+ body = payload["comment"]["body"]
+ elif payload["action"] == "opened" and "issue" in payload:
+ issue_number = payload["issue"]["number"]
+ body = payload["issue"]["body"]
+ elif payload["action"] == "opened" and "pull_request" in payload:
+ issue_number = payload["pull_request"]["number"]
+ body = payload["pull_request"]["body"]
+ else:
+ _LOGGER.debug("No message to respond to.")
+ _LOGGER.debug(payload)
+ return aiohttp.web.Response(
+ text=json.dumps("No message to respond to."),
+ status=200)
+
+ issue = "{}/{}#{}".format(payload["repository"]["owner"]["login"],
+ payload["repository"]["name"],
+ issue_number)
+ message = Message(body,
+ payload["sender"]["login"],
+ issue,
+ self)
+ await self.opsdroid.parse(message)
+ except KeyError as error:
+ _LOGGER.error("Key %s not found in payload", error)
+ _LOGGER.debug(payload)
+ return aiohttp.web.Response(
+ text=json.dumps("Received"), status=201)
+
+ async def respond(self, message, room=None):
+ """Respond with a message."""
+ # stop immediately if the message is from the bot itself.
+ if message.user == self.github_username:
+ return True
+ _LOGGER.debug("Responding via GitHub")
+ repo, issue = message.room.split('#')
+ url = "{}/repos/{}/issues/{}/comments".format(
+ GITHUB_API_URL, repo, issue)
+ headers = {'Authorization': ' token {}'.format(self.github_token)}
+ async with aiohttp.ClientSession() as session:
+ resp = await session.post(url,
+ json={"body": message.text},
+ headers=headers)
+ if resp.status == 201:
+ _LOGGER.info("Message sent.")
+ return True
+ _LOGGER.error(await resp.json())
+ return False
| diff --git a/tests/responses/github_comment_payload.json b/tests/responses/github_comment_payload.json
new file mode 100644
--- /dev/null
+++ b/tests/responses/github_comment_payload.json
@@ -0,0 +1,215 @@
+{
+ "action": "created",
+ "issue": {
+ "url": "https://api.github.com/repos/opsdroid/opsdroid/issues/237",
+ "repository_url": "https://api.github.com/repos/opsdroid/opsdroid",
+ "labels_url": "https://api.github.com/repos/opsdroid/opsdroid/issues/237/labels{/name}",
+ "comments_url": "https://api.github.com/repos/opsdroid/opsdroid/issues/237/comments",
+ "events_url": "https://api.github.com/repos/opsdroid/opsdroid/issues/237/events",
+ "html_url": "https://github.com/opsdroid/opsdroid/issues/237",
+ "id": 261768248,
+ "node_id": "MDU6SXNzdWUyNjE3NjgyNDg=",
+ "number": 237,
+ "title": "test issue, please ignore",
+ "user": {
+ "login": "jacobtomlinson",
+ "id": 1610850,
+ "node_id": "MDQ6VXNlcjE2MTA4NTA=",
+ "avatar_url": "https://avatars3.githubusercontent.com/u/1610850?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/jacobtomlinson",
+ "html_url": "https://github.com/jacobtomlinson",
+ "followers_url": "https://api.github.com/users/jacobtomlinson/followers",
+ "following_url": "https://api.github.com/users/jacobtomlinson/following{/other_user}",
+ "gists_url": "https://api.github.com/users/jacobtomlinson/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/jacobtomlinson/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/jacobtomlinson/subscriptions",
+ "organizations_url": "https://api.github.com/users/jacobtomlinson/orgs",
+ "repos_url": "https://api.github.com/users/jacobtomlinson/repos",
+ "events_url": "https://api.github.com/users/jacobtomlinson/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/jacobtomlinson/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "labels": [
+
+ ],
+ "state": "closed",
+ "locked": false,
+ "assignee": null,
+ "assignees": [
+
+ ],
+ "milestone": null,
+ "comments": 2,
+ "created_at": "2017-09-29T20:55:49Z",
+ "updated_at": "2018-11-16T08:29:51Z",
+ "closed_at": "2017-09-30T19:37:25Z",
+ "author_association": "MEMBER",
+ "body": "This issue is for testing some opsdroid skills. Please ignore."
+ },
+ "comment": {
+ "url": "https://api.github.com/repos/opsdroid/opsdroid/issues/comments/439318644",
+ "html_url": "https://github.com/opsdroid/opsdroid/issues/237#issuecomment-439318644",
+ "issue_url": "https://api.github.com/repos/opsdroid/opsdroid/issues/237",
+ "id": 439318644,
+ "node_id": "MDEyOklzc3VlQ29tbWVudDQzOTMxODY0NA==",
+ "user": {
+ "login": "jacobtomlinson",
+ "id": 1610850,
+ "node_id": "MDQ6VXNlcjE2MTA4NTA=",
+ "avatar_url": "https://avatars3.githubusercontent.com/u/1610850?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/jacobtomlinson",
+ "html_url": "https://github.com/jacobtomlinson",
+ "followers_url": "https://api.github.com/users/jacobtomlinson/followers",
+ "following_url": "https://api.github.com/users/jacobtomlinson/following{/other_user}",
+ "gists_url": "https://api.github.com/users/jacobtomlinson/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/jacobtomlinson/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/jacobtomlinson/subscriptions",
+ "organizations_url": "https://api.github.com/users/jacobtomlinson/orgs",
+ "repos_url": "https://api.github.com/users/jacobtomlinson/repos",
+ "events_url": "https://api.github.com/users/jacobtomlinson/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/jacobtomlinson/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "created_at": "2018-11-16T08:29:51Z",
+ "updated_at": "2018-11-16T08:29:51Z",
+ "author_association": "MEMBER",
+ "body": "hello"
+ },
+ "repository": {
+ "id": 64034523,
+ "node_id": "MDEwOlJlcG9zaXRvcnk2NDAzNDUyMw==",
+ "name": "opsdroid",
+ "full_name": "opsdroid/opsdroid",
+ "private": false,
+ "owner": {
+ "login": "opsdroid",
+ "id": 20677702,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjIwNjc3NzAy",
+ "avatar_url": "https://avatars2.githubusercontent.com/u/20677702?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/opsdroid",
+ "html_url": "https://github.com/opsdroid",
+ "followers_url": "https://api.github.com/users/opsdroid/followers",
+ "following_url": "https://api.github.com/users/opsdroid/following{/other_user}",
+ "gists_url": "https://api.github.com/users/opsdroid/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/opsdroid/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/opsdroid/subscriptions",
+ "organizations_url": "https://api.github.com/users/opsdroid/orgs",
+ "repos_url": "https://api.github.com/users/opsdroid/repos",
+ "events_url": "https://api.github.com/users/opsdroid/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/opsdroid/received_events",
+ "type": "Organization",
+ "site_admin": false
+ },
+ "html_url": "https://github.com/opsdroid/opsdroid",
+ "description": "๐ค An open source chat-ops bot framework",
+ "fork": false,
+ "url": "https://api.github.com/repos/opsdroid/opsdroid",
+ "forks_url": "https://api.github.com/repos/opsdroid/opsdroid/forks",
+ "keys_url": "https://api.github.com/repos/opsdroid/opsdroid/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/opsdroid/opsdroid/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/opsdroid/opsdroid/teams",
+ "hooks_url": "https://api.github.com/repos/opsdroid/opsdroid/hooks",
+ "issue_events_url": "https://api.github.com/repos/opsdroid/opsdroid/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/opsdroid/opsdroid/events",
+ "assignees_url": "https://api.github.com/repos/opsdroid/opsdroid/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/opsdroid/opsdroid/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/opsdroid/opsdroid/tags",
+ "blobs_url": "https://api.github.com/repos/opsdroid/opsdroid/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/opsdroid/opsdroid/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/opsdroid/opsdroid/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/opsdroid/opsdroid/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/opsdroid/opsdroid/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/opsdroid/opsdroid/languages",
+ "stargazers_url": "https://api.github.com/repos/opsdroid/opsdroid/stargazers",
+ "contributors_url": "https://api.github.com/repos/opsdroid/opsdroid/contributors",
+ "subscribers_url": "https://api.github.com/repos/opsdroid/opsdroid/subscribers",
+ "subscription_url": "https://api.github.com/repos/opsdroid/opsdroid/subscription",
+ "commits_url": "https://api.github.com/repos/opsdroid/opsdroid/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/opsdroid/opsdroid/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/opsdroid/opsdroid/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/opsdroid/opsdroid/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/opsdroid/opsdroid/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/opsdroid/opsdroid/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/opsdroid/opsdroid/merges",
+ "archive_url": "https://api.github.com/repos/opsdroid/opsdroid/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/opsdroid/opsdroid/downloads",
+ "issues_url": "https://api.github.com/repos/opsdroid/opsdroid/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/opsdroid/opsdroid/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/opsdroid/opsdroid/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/opsdroid/opsdroid/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/opsdroid/opsdroid/labels{/name}",
+ "releases_url": "https://api.github.com/repos/opsdroid/opsdroid/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/opsdroid/opsdroid/deployments",
+ "created_at": "2016-07-23T20:18:56Z",
+ "updated_at": "2018-11-15T08:44:25Z",
+ "pushed_at": "2018-11-15T08:44:24Z",
+ "git_url": "git://github.com/opsdroid/opsdroid.git",
+ "ssh_url": "[email protected]:opsdroid/opsdroid.git",
+ "clone_url": "https://github.com/opsdroid/opsdroid.git",
+ "svn_url": "https://github.com/opsdroid/opsdroid",
+ "homepage": "https://opsdroid.github.io",
+ "size": 676,
+ "stargazers_count": 199,
+ "watchers_count": 199,
+ "language": "Python",
+ "has_issues": true,
+ "has_projects": false,
+ "has_downloads": true,
+ "has_wiki": false,
+ "has_pages": false,
+ "forks_count": 100,
+ "mirror_url": null,
+ "archived": false,
+ "open_issues_count": 54,
+ "license": {
+ "key": "apache-2.0",
+ "name": "Apache License 2.0",
+ "spdx_id": "Apache-2.0",
+ "url": "https://api.github.com/licenses/apache-2.0",
+ "node_id": "MDc6TGljZW5zZTI="
+ },
+ "forks": 100,
+ "open_issues": 54,
+ "watchers": 199,
+ "default_branch": "master"
+ },
+ "organization": {
+ "login": "opsdroid",
+ "id": 20677702,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjIwNjc3NzAy",
+ "url": "https://api.github.com/orgs/opsdroid",
+ "repos_url": "https://api.github.com/orgs/opsdroid/repos",
+ "events_url": "https://api.github.com/orgs/opsdroid/events",
+ "hooks_url": "https://api.github.com/orgs/opsdroid/hooks",
+ "issues_url": "https://api.github.com/orgs/opsdroid/issues",
+ "members_url": "https://api.github.com/orgs/opsdroid/members{/member}",
+ "public_members_url": "https://api.github.com/orgs/opsdroid/public_members{/member}",
+ "avatar_url": "https://avatars2.githubusercontent.com/u/20677702?v=4",
+ "description": "An open source python chat-ops bot framework"
+ },
+ "sender": {
+ "login": "jacobtomlinson",
+ "id": 1610850,
+ "node_id": "MDQ6VXNlcjE2MTA4NTA=",
+ "avatar_url": "https://avatars3.githubusercontent.com/u/1610850?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/jacobtomlinson",
+ "html_url": "https://github.com/jacobtomlinson",
+ "followers_url": "https://api.github.com/users/jacobtomlinson/followers",
+ "following_url": "https://api.github.com/users/jacobtomlinson/following{/other_user}",
+ "gists_url": "https://api.github.com/users/jacobtomlinson/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/jacobtomlinson/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/jacobtomlinson/subscriptions",
+ "organizations_url": "https://api.github.com/users/jacobtomlinson/orgs",
+ "repos_url": "https://api.github.com/users/jacobtomlinson/repos",
+ "events_url": "https://api.github.com/users/jacobtomlinson/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/jacobtomlinson/received_events",
+ "type": "User",
+ "site_admin": false
+ }
+ }
diff --git a/tests/responses/github_issue_payload.json b/tests/responses/github_issue_payload.json
new file mode 100644
--- /dev/null
+++ b/tests/responses/github_issue_payload.json
@@ -0,0 +1,184 @@
+{
+ "action": "opened",
+ "issue": {
+ "url": "https://api.github.com/repos/opsdroid/opsdroid/issues/740",
+ "repository_url": "https://api.github.com/repos/opsdroid/opsdroid",
+ "labels_url": "https://api.github.com/repos/opsdroid/opsdroid/issues/740/labels{/name}",
+ "comments_url": "https://api.github.com/repos/opsdroid/opsdroid/issues/740/comments",
+ "events_url": "https://api.github.com/repos/opsdroid/opsdroid/issues/740/events",
+ "html_url": "https://github.com/opsdroid/opsdroid/issues/740",
+ "id": 381840536,
+ "node_id": "MDU6SXNzdWUzODE4NDA1MzY=",
+ "number": 740,
+ "title": "A test please ignore",
+ "user": {
+ "login": "jacobtomlinson",
+ "id": 1610850,
+ "node_id": "MDQ6VXNlcjE2MTA4NTA=",
+ "avatar_url": "https://avatars3.githubusercontent.com/u/1610850?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/jacobtomlinson",
+ "html_url": "https://github.com/jacobtomlinson",
+ "followers_url": "https://api.github.com/users/jacobtomlinson/followers",
+ "following_url": "https://api.github.com/users/jacobtomlinson/following{/other_user}",
+ "gists_url": "https://api.github.com/users/jacobtomlinson/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/jacobtomlinson/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/jacobtomlinson/subscriptions",
+ "organizations_url": "https://api.github.com/users/jacobtomlinson/orgs",
+ "repos_url": "https://api.github.com/users/jacobtomlinson/repos",
+ "events_url": "https://api.github.com/users/jacobtomlinson/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/jacobtomlinson/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "labels": [
+
+ ],
+ "state": "open",
+ "locked": false,
+ "assignee": null,
+ "assignees": [
+
+ ],
+ "milestone": null,
+ "comments": 0,
+ "created_at": "2018-11-17T08:08:25Z",
+ "updated_at": "2018-11-17T08:08:25Z",
+ "closed_at": null,
+ "author_association": "MEMBER",
+ "body": "test"
+ },
+ "repository": {
+ "id": 64034523,
+ "node_id": "MDEwOlJlcG9zaXRvcnk2NDAzNDUyMw==",
+ "name": "opsdroid",
+ "full_name": "opsdroid/opsdroid",
+ "private": false,
+ "owner": {
+ "login": "opsdroid",
+ "id": 20677702,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjIwNjc3NzAy",
+ "avatar_url": "https://avatars2.githubusercontent.com/u/20677702?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/opsdroid",
+ "html_url": "https://github.com/opsdroid",
+ "followers_url": "https://api.github.com/users/opsdroid/followers",
+ "following_url": "https://api.github.com/users/opsdroid/following{/other_user}",
+ "gists_url": "https://api.github.com/users/opsdroid/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/opsdroid/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/opsdroid/subscriptions",
+ "organizations_url": "https://api.github.com/users/opsdroid/orgs",
+ "repos_url": "https://api.github.com/users/opsdroid/repos",
+ "events_url": "https://api.github.com/users/opsdroid/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/opsdroid/received_events",
+ "type": "Organization",
+ "site_admin": false
+ },
+ "html_url": "https://github.com/opsdroid/opsdroid",
+ "description": "๐ค An open source chat-ops bot framework",
+ "fork": false,
+ "url": "https://api.github.com/repos/opsdroid/opsdroid",
+ "forks_url": "https://api.github.com/repos/opsdroid/opsdroid/forks",
+ "keys_url": "https://api.github.com/repos/opsdroid/opsdroid/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/opsdroid/opsdroid/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/opsdroid/opsdroid/teams",
+ "hooks_url": "https://api.github.com/repos/opsdroid/opsdroid/hooks",
+ "issue_events_url": "https://api.github.com/repos/opsdroid/opsdroid/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/opsdroid/opsdroid/events",
+ "assignees_url": "https://api.github.com/repos/opsdroid/opsdroid/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/opsdroid/opsdroid/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/opsdroid/opsdroid/tags",
+ "blobs_url": "https://api.github.com/repos/opsdroid/opsdroid/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/opsdroid/opsdroid/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/opsdroid/opsdroid/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/opsdroid/opsdroid/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/opsdroid/opsdroid/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/opsdroid/opsdroid/languages",
+ "stargazers_url": "https://api.github.com/repos/opsdroid/opsdroid/stargazers",
+ "contributors_url": "https://api.github.com/repos/opsdroid/opsdroid/contributors",
+ "subscribers_url": "https://api.github.com/repos/opsdroid/opsdroid/subscribers",
+ "subscription_url": "https://api.github.com/repos/opsdroid/opsdroid/subscription",
+ "commits_url": "https://api.github.com/repos/opsdroid/opsdroid/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/opsdroid/opsdroid/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/opsdroid/opsdroid/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/opsdroid/opsdroid/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/opsdroid/opsdroid/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/opsdroid/opsdroid/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/opsdroid/opsdroid/merges",
+ "archive_url": "https://api.github.com/repos/opsdroid/opsdroid/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/opsdroid/opsdroid/downloads",
+ "issues_url": "https://api.github.com/repos/opsdroid/opsdroid/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/opsdroid/opsdroid/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/opsdroid/opsdroid/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/opsdroid/opsdroid/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/opsdroid/opsdroid/labels{/name}",
+ "releases_url": "https://api.github.com/repos/opsdroid/opsdroid/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/opsdroid/opsdroid/deployments",
+ "created_at": "2016-07-23T20:18:56Z",
+ "updated_at": "2018-11-15T08:44:25Z",
+ "pushed_at": "2018-11-16T12:16:50Z",
+ "git_url": "git://github.com/opsdroid/opsdroid.git",
+ "ssh_url": "[email protected]:opsdroid/opsdroid.git",
+ "clone_url": "https://github.com/opsdroid/opsdroid.git",
+ "svn_url": "https://github.com/opsdroid/opsdroid",
+ "homepage": "https://opsdroid.github.io",
+ "size": 677,
+ "stargazers_count": 199,
+ "watchers_count": 199,
+ "language": "Python",
+ "has_issues": true,
+ "has_projects": false,
+ "has_downloads": true,
+ "has_wiki": false,
+ "has_pages": false,
+ "forks_count": 100,
+ "mirror_url": null,
+ "archived": false,
+ "open_issues_count": 56,
+ "license": {
+ "key": "apache-2.0",
+ "name": "Apache License 2.0",
+ "spdx_id": "Apache-2.0",
+ "url": "https://api.github.com/licenses/apache-2.0",
+ "node_id": "MDc6TGljZW5zZTI="
+ },
+ "forks": 100,
+ "open_issues": 56,
+ "watchers": 199,
+ "default_branch": "master"
+ },
+ "organization": {
+ "login": "opsdroid",
+ "id": 20677702,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjIwNjc3NzAy",
+ "url": "https://api.github.com/orgs/opsdroid",
+ "repos_url": "https://api.github.com/orgs/opsdroid/repos",
+ "events_url": "https://api.github.com/orgs/opsdroid/events",
+ "hooks_url": "https://api.github.com/orgs/opsdroid/hooks",
+ "issues_url": "https://api.github.com/orgs/opsdroid/issues",
+ "members_url": "https://api.github.com/orgs/opsdroid/members{/member}",
+ "public_members_url": "https://api.github.com/orgs/opsdroid/public_members{/member}",
+ "avatar_url": "https://avatars2.githubusercontent.com/u/20677702?v=4",
+ "description": "An open source python chat-ops bot framework"
+ },
+ "sender": {
+ "login": "jacobtomlinson",
+ "id": 1610850,
+ "node_id": "MDQ6VXNlcjE2MTA4NTA=",
+ "avatar_url": "https://avatars3.githubusercontent.com/u/1610850?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/jacobtomlinson",
+ "html_url": "https://github.com/jacobtomlinson",
+ "followers_url": "https://api.github.com/users/jacobtomlinson/followers",
+ "following_url": "https://api.github.com/users/jacobtomlinson/following{/other_user}",
+ "gists_url": "https://api.github.com/users/jacobtomlinson/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/jacobtomlinson/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/jacobtomlinson/subscriptions",
+ "organizations_url": "https://api.github.com/users/jacobtomlinson/orgs",
+ "repos_url": "https://api.github.com/users/jacobtomlinson/repos",
+ "events_url": "https://api.github.com/users/jacobtomlinson/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/jacobtomlinson/received_events",
+ "type": "User",
+ "site_admin": false
+ }
+ }
diff --git a/tests/responses/github_label_payload.json b/tests/responses/github_label_payload.json
new file mode 100644
--- /dev/null
+++ b/tests/responses/github_label_payload.json
@@ -0,0 +1,144 @@
+{
+ "action": "created",
+ "label": {
+ "id": 1130762664,
+ "node_id": "MDU6TGFiZWwxMTMwNzYyNjY0",
+ "url": "https://api.github.com/repos/opsdroid/opsdroid/labels/test",
+ "name": "test",
+ "color": "bfdadc",
+ "default": false
+ },
+ "repository": {
+ "id": 64034523,
+ "node_id": "MDEwOlJlcG9zaXRvcnk2NDAzNDUyMw==",
+ "name": "opsdroid",
+ "full_name": "opsdroid/opsdroid",
+ "private": false,
+ "owner": {
+ "login": "opsdroid",
+ "id": 20677702,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjIwNjc3NzAy",
+ "avatar_url": "https://avatars2.githubusercontent.com/u/20677702?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/opsdroid",
+ "html_url": "https://github.com/opsdroid",
+ "followers_url": "https://api.github.com/users/opsdroid/followers",
+ "following_url": "https://api.github.com/users/opsdroid/following{/other_user}",
+ "gists_url": "https://api.github.com/users/opsdroid/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/opsdroid/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/opsdroid/subscriptions",
+ "organizations_url": "https://api.github.com/users/opsdroid/orgs",
+ "repos_url": "https://api.github.com/users/opsdroid/repos",
+ "events_url": "https://api.github.com/users/opsdroid/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/opsdroid/received_events",
+ "type": "Organization",
+ "site_admin": false
+ },
+ "html_url": "https://github.com/opsdroid/opsdroid",
+ "description": "๐ค An open source chat-ops bot framework",
+ "fork": false,
+ "url": "https://api.github.com/repos/opsdroid/opsdroid",
+ "forks_url": "https://api.github.com/repos/opsdroid/opsdroid/forks",
+ "keys_url": "https://api.github.com/repos/opsdroid/opsdroid/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/opsdroid/opsdroid/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/opsdroid/opsdroid/teams",
+ "hooks_url": "https://api.github.com/repos/opsdroid/opsdroid/hooks",
+ "issue_events_url": "https://api.github.com/repos/opsdroid/opsdroid/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/opsdroid/opsdroid/events",
+ "assignees_url": "https://api.github.com/repos/opsdroid/opsdroid/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/opsdroid/opsdroid/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/opsdroid/opsdroid/tags",
+ "blobs_url": "https://api.github.com/repos/opsdroid/opsdroid/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/opsdroid/opsdroid/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/opsdroid/opsdroid/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/opsdroid/opsdroid/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/opsdroid/opsdroid/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/opsdroid/opsdroid/languages",
+ "stargazers_url": "https://api.github.com/repos/opsdroid/opsdroid/stargazers",
+ "contributors_url": "https://api.github.com/repos/opsdroid/opsdroid/contributors",
+ "subscribers_url": "https://api.github.com/repos/opsdroid/opsdroid/subscribers",
+ "subscription_url": "https://api.github.com/repos/opsdroid/opsdroid/subscription",
+ "commits_url": "https://api.github.com/repos/opsdroid/opsdroid/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/opsdroid/opsdroid/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/opsdroid/opsdroid/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/opsdroid/opsdroid/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/opsdroid/opsdroid/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/opsdroid/opsdroid/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/opsdroid/opsdroid/merges",
+ "archive_url": "https://api.github.com/repos/opsdroid/opsdroid/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/opsdroid/opsdroid/downloads",
+ "issues_url": "https://api.github.com/repos/opsdroid/opsdroid/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/opsdroid/opsdroid/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/opsdroid/opsdroid/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/opsdroid/opsdroid/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/opsdroid/opsdroid/labels{/name}",
+ "releases_url": "https://api.github.com/repos/opsdroid/opsdroid/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/opsdroid/opsdroid/deployments",
+ "created_at": "2016-07-23T20:18:56Z",
+ "updated_at": "2018-11-15T08:44:25Z",
+ "pushed_at": "2018-11-16T12:16:50Z",
+ "git_url": "git://github.com/opsdroid/opsdroid.git",
+ "ssh_url": "[email protected]:opsdroid/opsdroid.git",
+ "clone_url": "https://github.com/opsdroid/opsdroid.git",
+ "svn_url": "https://github.com/opsdroid/opsdroid",
+ "homepage": "https://opsdroid.github.io",
+ "size": 677,
+ "stargazers_count": 199,
+ "watchers_count": 199,
+ "language": "Python",
+ "has_issues": true,
+ "has_projects": false,
+ "has_downloads": true,
+ "has_wiki": false,
+ "has_pages": false,
+ "forks_count": 100,
+ "mirror_url": null,
+ "archived": false,
+ "open_issues_count": 55,
+ "license": {
+ "key": "apache-2.0",
+ "name": "Apache License 2.0",
+ "spdx_id": "Apache-2.0",
+ "url": "https://api.github.com/licenses/apache-2.0",
+ "node_id": "MDc6TGljZW5zZTI="
+ },
+ "forks": 100,
+ "open_issues": 55,
+ "watchers": 199,
+ "default_branch": "master"
+ },
+ "organization": {
+ "login": "opsdroid",
+ "id": 20677702,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjIwNjc3NzAy",
+ "url": "https://api.github.com/orgs/opsdroid",
+ "repos_url": "https://api.github.com/orgs/opsdroid/repos",
+ "events_url": "https://api.github.com/orgs/opsdroid/events",
+ "hooks_url": "https://api.github.com/orgs/opsdroid/hooks",
+ "issues_url": "https://api.github.com/orgs/opsdroid/issues",
+ "members_url": "https://api.github.com/orgs/opsdroid/members{/member}",
+ "public_members_url": "https://api.github.com/orgs/opsdroid/public_members{/member}",
+ "avatar_url": "https://avatars2.githubusercontent.com/u/20677702?v=4",
+ "description": "An open source python chat-ops bot framework"
+ },
+ "sender": {
+ "login": "jacobtomlinson",
+ "id": 1610850,
+ "node_id": "MDQ6VXNlcjE2MTA4NTA=",
+ "avatar_url": "https://avatars3.githubusercontent.com/u/1610850?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/jacobtomlinson",
+ "html_url": "https://github.com/jacobtomlinson",
+ "followers_url": "https://api.github.com/users/jacobtomlinson/followers",
+ "following_url": "https://api.github.com/users/jacobtomlinson/following{/other_user}",
+ "gists_url": "https://api.github.com/users/jacobtomlinson/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/jacobtomlinson/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/jacobtomlinson/subscriptions",
+ "organizations_url": "https://api.github.com/users/jacobtomlinson/orgs",
+ "repos_url": "https://api.github.com/users/jacobtomlinson/repos",
+ "events_url": "https://api.github.com/users/jacobtomlinson/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/jacobtomlinson/received_events",
+ "type": "User",
+ "site_admin": false
+ }
+ }
diff --git a/tests/responses/github_pr_payload.json b/tests/responses/github_pr_payload.json
new file mode 100644
--- /dev/null
+++ b/tests/responses/github_pr_payload.json
@@ -0,0 +1,482 @@
+{
+ "action": "opened",
+ "number": 175,
+ "pull_request": {
+ "url": "https://api.github.com/repos/opsdroid/opsdroid-audio/pulls/175",
+ "id": 231512167,
+ "node_id": "MDExOlB1bGxSZXF1ZXN0MjMxNTEyMTY3",
+ "html_url": "https://github.com/opsdroid/opsdroid-audio/pull/175",
+ "diff_url": "https://github.com/opsdroid/opsdroid-audio/pull/175.diff",
+ "patch_url": "https://github.com/opsdroid/opsdroid-audio/pull/175.patch",
+ "issue_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/issues/175",
+ "number": 175,
+ "state": "open",
+ "locked": false,
+ "title": "Update pytest-timeout to 1.3.3",
+ "user": {
+ "login": "pyup-bot",
+ "id": 16239342,
+ "node_id": "MDQ6VXNlcjE2MjM5MzQy",
+ "avatar_url": "https://avatars0.githubusercontent.com/u/16239342?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/pyup-bot",
+ "html_url": "https://github.com/pyup-bot",
+ "followers_url": "https://api.github.com/users/pyup-bot/followers",
+ "following_url": "https://api.github.com/users/pyup-bot/following{/other_user}",
+ "gists_url": "https://api.github.com/users/pyup-bot/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/pyup-bot/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/pyup-bot/subscriptions",
+ "organizations_url": "https://api.github.com/users/pyup-bot/orgs",
+ "repos_url": "https://api.github.com/users/pyup-bot/repos",
+ "events_url": "https://api.github.com/users/pyup-bot/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/pyup-bot/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "body": "hello world",
+ "created_at": "2018-11-16T12:17:49Z",
+ "updated_at": "2018-11-16T12:17:49Z",
+ "closed_at": null,
+ "merged_at": null,
+ "merge_commit_sha": null,
+ "assignee": null,
+ "assignees": [
+
+ ],
+ "requested_reviewers": [
+
+ ],
+ "requested_teams": [
+
+ ],
+ "labels": [
+
+ ],
+ "milestone": null,
+ "commits_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/pulls/175/commits",
+ "review_comments_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/pulls/175/comments",
+ "review_comment_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/pulls/comments{/number}",
+ "comments_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/issues/175/comments",
+ "statuses_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/statuses/e9e14486799f07449f1e0c6dd1c7601f74ea529d",
+ "head": {
+ "label": "opsdroid:pyup-update-pytest-timeout-1.2.1-to-1.3.3",
+ "ref": "pyup-update-pytest-timeout-1.2.1-to-1.3.3",
+ "sha": "e9e14486799f07449f1e0c6dd1c7601f74ea529d",
+ "user": {
+ "login": "opsdroid",
+ "id": 20677702,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjIwNjc3NzAy",
+ "avatar_url": "https://avatars2.githubusercontent.com/u/20677702?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/opsdroid",
+ "html_url": "https://github.com/opsdroid",
+ "followers_url": "https://api.github.com/users/opsdroid/followers",
+ "following_url": "https://api.github.com/users/opsdroid/following{/other_user}",
+ "gists_url": "https://api.github.com/users/opsdroid/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/opsdroid/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/opsdroid/subscriptions",
+ "organizations_url": "https://api.github.com/users/opsdroid/orgs",
+ "repos_url": "https://api.github.com/users/opsdroid/repos",
+ "events_url": "https://api.github.com/users/opsdroid/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/opsdroid/received_events",
+ "type": "Organization",
+ "site_admin": false
+ },
+ "repo": {
+ "id": 83927663,
+ "node_id": "MDEwOlJlcG9zaXRvcnk4MzkyNzY2Mw==",
+ "name": "opsdroid-audio",
+ "full_name": "opsdroid/opsdroid-audio",
+ "private": false,
+ "owner": {
+ "login": "opsdroid",
+ "id": 20677702,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjIwNjc3NzAy",
+ "avatar_url": "https://avatars2.githubusercontent.com/u/20677702?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/opsdroid",
+ "html_url": "https://github.com/opsdroid",
+ "followers_url": "https://api.github.com/users/opsdroid/followers",
+ "following_url": "https://api.github.com/users/opsdroid/following{/other_user}",
+ "gists_url": "https://api.github.com/users/opsdroid/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/opsdroid/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/opsdroid/subscriptions",
+ "organizations_url": "https://api.github.com/users/opsdroid/orgs",
+ "repos_url": "https://api.github.com/users/opsdroid/repos",
+ "events_url": "https://api.github.com/users/opsdroid/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/opsdroid/received_events",
+ "type": "Organization",
+ "site_admin": false
+ },
+ "html_url": "https://github.com/opsdroid/opsdroid-audio",
+ "description": "๐ฃ A companion application for opsdroid which adds hotwords, speech recognition and audio responses.",
+ "fork": false,
+ "url": "https://api.github.com/repos/opsdroid/opsdroid-audio",
+ "forks_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/forks",
+ "keys_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/teams",
+ "hooks_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/hooks",
+ "issue_events_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/events",
+ "assignees_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/tags",
+ "blobs_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/languages",
+ "stargazers_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/stargazers",
+ "contributors_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/contributors",
+ "subscribers_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/subscribers",
+ "subscription_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/subscription",
+ "commits_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/merges",
+ "archive_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/downloads",
+ "issues_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/labels{/name}",
+ "releases_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/deployments",
+ "created_at": "2017-03-04T22:04:15Z",
+ "updated_at": "2018-05-21T17:47:36Z",
+ "pushed_at": "2018-11-16T12:17:48Z",
+ "git_url": "git://github.com/opsdroid/opsdroid-audio.git",
+ "ssh_url": "[email protected]:opsdroid/opsdroid-audio.git",
+ "clone_url": "https://github.com/opsdroid/opsdroid-audio.git",
+ "svn_url": "https://github.com/opsdroid/opsdroid-audio",
+ "homepage": null,
+ "size": 2453,
+ "stargazers_count": 3,
+ "watchers_count": 3,
+ "language": "Python",
+ "has_issues": true,
+ "has_projects": true,
+ "has_downloads": true,
+ "has_wiki": true,
+ "has_pages": false,
+ "forks_count": 2,
+ "mirror_url": null,
+ "archived": false,
+ "open_issues_count": 28,
+ "license": {
+ "key": "apache-2.0",
+ "name": "Apache License 2.0",
+ "spdx_id": "Apache-2.0",
+ "url": "https://api.github.com/licenses/apache-2.0",
+ "node_id": "MDc6TGljZW5zZTI="
+ },
+ "forks": 2,
+ "open_issues": 28,
+ "watchers": 3,
+ "default_branch": "master"
+ }
+ },
+ "base": {
+ "label": "opsdroid:master",
+ "ref": "master",
+ "sha": "b32a39d57d34422362c06346220069b196e00f62",
+ "user": {
+ "login": "opsdroid",
+ "id": 20677702,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjIwNjc3NzAy",
+ "avatar_url": "https://avatars2.githubusercontent.com/u/20677702?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/opsdroid",
+ "html_url": "https://github.com/opsdroid",
+ "followers_url": "https://api.github.com/users/opsdroid/followers",
+ "following_url": "https://api.github.com/users/opsdroid/following{/other_user}",
+ "gists_url": "https://api.github.com/users/opsdroid/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/opsdroid/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/opsdroid/subscriptions",
+ "organizations_url": "https://api.github.com/users/opsdroid/orgs",
+ "repos_url": "https://api.github.com/users/opsdroid/repos",
+ "events_url": "https://api.github.com/users/opsdroid/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/opsdroid/received_events",
+ "type": "Organization",
+ "site_admin": false
+ },
+ "repo": {
+ "id": 83927663,
+ "node_id": "MDEwOlJlcG9zaXRvcnk4MzkyNzY2Mw==",
+ "name": "opsdroid-audio",
+ "full_name": "opsdroid/opsdroid-audio",
+ "private": false,
+ "owner": {
+ "login": "opsdroid",
+ "id": 20677702,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjIwNjc3NzAy",
+ "avatar_url": "https://avatars2.githubusercontent.com/u/20677702?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/opsdroid",
+ "html_url": "https://github.com/opsdroid",
+ "followers_url": "https://api.github.com/users/opsdroid/followers",
+ "following_url": "https://api.github.com/users/opsdroid/following{/other_user}",
+ "gists_url": "https://api.github.com/users/opsdroid/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/opsdroid/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/opsdroid/subscriptions",
+ "organizations_url": "https://api.github.com/users/opsdroid/orgs",
+ "repos_url": "https://api.github.com/users/opsdroid/repos",
+ "events_url": "https://api.github.com/users/opsdroid/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/opsdroid/received_events",
+ "type": "Organization",
+ "site_admin": false
+ },
+ "html_url": "https://github.com/opsdroid/opsdroid-audio",
+ "description": "๐ฃ A companion application for opsdroid which adds hotwords, speech recognition and audio responses.",
+ "fork": false,
+ "url": "https://api.github.com/repos/opsdroid/opsdroid-audio",
+ "forks_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/forks",
+ "keys_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/teams",
+ "hooks_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/hooks",
+ "issue_events_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/events",
+ "assignees_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/tags",
+ "blobs_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/languages",
+ "stargazers_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/stargazers",
+ "contributors_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/contributors",
+ "subscribers_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/subscribers",
+ "subscription_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/subscription",
+ "commits_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/merges",
+ "archive_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/downloads",
+ "issues_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/labels{/name}",
+ "releases_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/deployments",
+ "created_at": "2017-03-04T22:04:15Z",
+ "updated_at": "2018-05-21T17:47:36Z",
+ "pushed_at": "2018-11-16T12:17:48Z",
+ "git_url": "git://github.com/opsdroid/opsdroid-audio.git",
+ "ssh_url": "[email protected]:opsdroid/opsdroid-audio.git",
+ "clone_url": "https://github.com/opsdroid/opsdroid-audio.git",
+ "svn_url": "https://github.com/opsdroid/opsdroid-audio",
+ "homepage": null,
+ "size": 2453,
+ "stargazers_count": 3,
+ "watchers_count": 3,
+ "language": "Python",
+ "has_issues": true,
+ "has_projects": true,
+ "has_downloads": true,
+ "has_wiki": true,
+ "has_pages": false,
+ "forks_count": 2,
+ "mirror_url": null,
+ "archived": false,
+ "open_issues_count": 28,
+ "license": {
+ "key": "apache-2.0",
+ "name": "Apache License 2.0",
+ "spdx_id": "Apache-2.0",
+ "url": "https://api.github.com/licenses/apache-2.0",
+ "node_id": "MDc6TGljZW5zZTI="
+ },
+ "forks": 2,
+ "open_issues": 28,
+ "watchers": 3,
+ "default_branch": "master"
+ }
+ },
+ "_links": {
+ "self": {
+ "href": "https://api.github.com/repos/opsdroid/opsdroid-audio/pulls/175"
+ },
+ "html": {
+ "href": "https://github.com/opsdroid/opsdroid-audio/pull/175"
+ },
+ "issue": {
+ "href": "https://api.github.com/repos/opsdroid/opsdroid-audio/issues/175"
+ },
+ "comments": {
+ "href": "https://api.github.com/repos/opsdroid/opsdroid-audio/issues/175/comments"
+ },
+ "review_comments": {
+ "href": "https://api.github.com/repos/opsdroid/opsdroid-audio/pulls/175/comments"
+ },
+ "review_comment": {
+ "href": "https://api.github.com/repos/opsdroid/opsdroid-audio/pulls/comments{/number}"
+ },
+ "commits": {
+ "href": "https://api.github.com/repos/opsdroid/opsdroid-audio/pulls/175/commits"
+ },
+ "statuses": {
+ "href": "https://api.github.com/repos/opsdroid/opsdroid-audio/statuses/e9e14486799f07449f1e0c6dd1c7601f74ea529d"
+ }
+ },
+ "author_association": "CONTRIBUTOR",
+ "merged": false,
+ "mergeable": null,
+ "rebaseable": null,
+ "mergeable_state": "unknown",
+ "merged_by": null,
+ "comments": 0,
+ "review_comments": 0,
+ "maintainer_can_modify": false,
+ "commits": 1,
+ "additions": 1,
+ "deletions": 1,
+ "changed_files": 1
+ },
+ "repository": {
+ "id": 83927663,
+ "node_id": "MDEwOlJlcG9zaXRvcnk4MzkyNzY2Mw==",
+ "name": "opsdroid-audio",
+ "full_name": "opsdroid/opsdroid-audio",
+ "private": false,
+ "owner": {
+ "login": "opsdroid",
+ "id": 20677702,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjIwNjc3NzAy",
+ "avatar_url": "https://avatars2.githubusercontent.com/u/20677702?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/opsdroid",
+ "html_url": "https://github.com/opsdroid",
+ "followers_url": "https://api.github.com/users/opsdroid/followers",
+ "following_url": "https://api.github.com/users/opsdroid/following{/other_user}",
+ "gists_url": "https://api.github.com/users/opsdroid/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/opsdroid/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/opsdroid/subscriptions",
+ "organizations_url": "https://api.github.com/users/opsdroid/orgs",
+ "repos_url": "https://api.github.com/users/opsdroid/repos",
+ "events_url": "https://api.github.com/users/opsdroid/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/opsdroid/received_events",
+ "type": "Organization",
+ "site_admin": false
+ },
+ "html_url": "https://github.com/opsdroid/opsdroid-audio",
+ "description": "๐ฃ A companion application for opsdroid which adds hotwords, speech recognition and audio responses.",
+ "fork": false,
+ "url": "https://api.github.com/repos/opsdroid/opsdroid-audio",
+ "forks_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/forks",
+ "keys_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/teams",
+ "hooks_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/hooks",
+ "issue_events_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/events",
+ "assignees_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/tags",
+ "blobs_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/languages",
+ "stargazers_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/stargazers",
+ "contributors_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/contributors",
+ "subscribers_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/subscribers",
+ "subscription_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/subscription",
+ "commits_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/merges",
+ "archive_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/downloads",
+ "issues_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/labels{/name}",
+ "releases_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/deployments",
+ "created_at": "2017-03-04T22:04:15Z",
+ "updated_at": "2018-05-21T17:47:36Z",
+ "pushed_at": "2018-11-16T12:17:48Z",
+ "git_url": "git://github.com/opsdroid/opsdroid-audio.git",
+ "ssh_url": "[email protected]:opsdroid/opsdroid-audio.git",
+ "clone_url": "https://github.com/opsdroid/opsdroid-audio.git",
+ "svn_url": "https://github.com/opsdroid/opsdroid-audio",
+ "homepage": null,
+ "size": 2453,
+ "stargazers_count": 3,
+ "watchers_count": 3,
+ "language": "Python",
+ "has_issues": true,
+ "has_projects": true,
+ "has_downloads": true,
+ "has_wiki": true,
+ "has_pages": false,
+ "forks_count": 2,
+ "mirror_url": null,
+ "archived": false,
+ "open_issues_count": 28,
+ "license": {
+ "key": "apache-2.0",
+ "name": "Apache License 2.0",
+ "spdx_id": "Apache-2.0",
+ "url": "https://api.github.com/licenses/apache-2.0",
+ "node_id": "MDc6TGljZW5zZTI="
+ },
+ "forks": 2,
+ "open_issues": 28,
+ "watchers": 3,
+ "default_branch": "master"
+ },
+ "organization": {
+ "login": "opsdroid",
+ "id": 20677702,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjIwNjc3NzAy",
+ "url": "https://api.github.com/orgs/opsdroid",
+ "repos_url": "https://api.github.com/orgs/opsdroid/repos",
+ "events_url": "https://api.github.com/orgs/opsdroid/events",
+ "hooks_url": "https://api.github.com/orgs/opsdroid/hooks",
+ "issues_url": "https://api.github.com/orgs/opsdroid/issues",
+ "members_url": "https://api.github.com/orgs/opsdroid/members{/member}",
+ "public_members_url": "https://api.github.com/orgs/opsdroid/public_members{/member}",
+ "avatar_url": "https://avatars2.githubusercontent.com/u/20677702?v=4",
+ "description": "An open source python chat-ops bot framework"
+ },
+ "sender": {
+ "login": "pyup-bot",
+ "id": 16239342,
+ "node_id": "MDQ6VXNlcjE2MjM5MzQy",
+ "avatar_url": "https://avatars0.githubusercontent.com/u/16239342?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/pyup-bot",
+ "html_url": "https://github.com/pyup-bot",
+ "followers_url": "https://api.github.com/users/pyup-bot/followers",
+ "following_url": "https://api.github.com/users/pyup-bot/following{/other_user}",
+ "gists_url": "https://api.github.com/users/pyup-bot/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/pyup-bot/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/pyup-bot/subscriptions",
+ "organizations_url": "https://api.github.com/users/pyup-bot/orgs",
+ "repos_url": "https://api.github.com/users/pyup-bot/repos",
+ "events_url": "https://api.github.com/users/pyup-bot/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/pyup-bot/received_events",
+ "type": "User",
+ "site_admin": false
+ }
+ }
diff --git a/tests/responses/github_status_payload.json b/tests/responses/github_status_payload.json
new file mode 100644
--- /dev/null
+++ b/tests/responses/github_status_payload.json
@@ -0,0 +1,295 @@
+{
+ "id": 5815600009,
+ "sha": "b32a39d57d34422362c06346220069b196e00f62",
+ "name": "opsdroid/opsdroid-audio",
+ "target_url": "https://travis-ci.org/opsdroid/opsdroid-audio/builds/455948125?utm_source=github_status&utm_medium=notification",
+ "context": "continuous-integration/travis-ci/push",
+ "description": "The Travis CI build is in progress",
+ "state": "pending",
+ "commit": {
+ "sha": "b32a39d57d34422362c06346220069b196e00f62",
+ "node_id": "MDY6Q29tbWl0ODM5Mjc2NjM6YjMyYTM5ZDU3ZDM0NDIyMzYyYzA2MzQ2MjIwMDY5YjE5NmUwMGY2Mg==",
+ "commit": {
+ "author": {
+ "name": "pyup.io bot",
+ "email": "[email protected]",
+ "date": "2018-04-25T14:55:42Z"
+ },
+ "committer": {
+ "name": "Jacob Tomlinson",
+ "email": "[email protected]",
+ "date": "2018-04-25T14:55:42Z"
+ },
+ "message": "Update pytest from 3.5.0 to 3.5.1 (#109)",
+ "tree": {
+ "sha": "b28528bd5d8c9660a23ef969486303a75b5100c5",
+ "url": "https://api.github.com/repos/opsdroid/opsdroid-audio/git/trees/b28528bd5d8c9660a23ef969486303a75b5100c5"
+ },
+ "url": "https://api.github.com/repos/opsdroid/opsdroid-audio/git/commits/b32a39d57d34422362c06346220069b196e00f62",
+ "comment_count": 0,
+ "verification": {
+ "verified": false,
+ "reason": "unsigned",
+ "signature": null,
+ "payload": null
+ }
+ },
+ "url": "https://api.github.com/repos/opsdroid/opsdroid-audio/commits/b32a39d57d34422362c06346220069b196e00f62",
+ "html_url": "https://github.com/opsdroid/opsdroid-audio/commit/b32a39d57d34422362c06346220069b196e00f62",
+ "comments_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/commits/b32a39d57d34422362c06346220069b196e00f62/comments",
+ "author": {
+ "login": "pyup-bot",
+ "id": 16239342,
+ "node_id": "MDQ6VXNlcjE2MjM5MzQy",
+ "avatar_url": "https://avatars0.githubusercontent.com/u/16239342?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/pyup-bot",
+ "html_url": "https://github.com/pyup-bot",
+ "followers_url": "https://api.github.com/users/pyup-bot/followers",
+ "following_url": "https://api.github.com/users/pyup-bot/following{/other_user}",
+ "gists_url": "https://api.github.com/users/pyup-bot/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/pyup-bot/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/pyup-bot/subscriptions",
+ "organizations_url": "https://api.github.com/users/pyup-bot/orgs",
+ "repos_url": "https://api.github.com/users/pyup-bot/repos",
+ "events_url": "https://api.github.com/users/pyup-bot/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/pyup-bot/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "committer": {
+ "login": "jacobtomlinson",
+ "id": 1610850,
+ "node_id": "MDQ6VXNlcjE2MTA4NTA=",
+ "avatar_url": "https://avatars3.githubusercontent.com/u/1610850?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/jacobtomlinson",
+ "html_url": "https://github.com/jacobtomlinson",
+ "followers_url": "https://api.github.com/users/jacobtomlinson/followers",
+ "following_url": "https://api.github.com/users/jacobtomlinson/following{/other_user}",
+ "gists_url": "https://api.github.com/users/jacobtomlinson/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/jacobtomlinson/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/jacobtomlinson/subscriptions",
+ "organizations_url": "https://api.github.com/users/jacobtomlinson/orgs",
+ "repos_url": "https://api.github.com/users/jacobtomlinson/repos",
+ "events_url": "https://api.github.com/users/jacobtomlinson/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/jacobtomlinson/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "parents": [
+ {
+ "sha": "6c79a4a8e0f1b65a39941a7ef736804b08c92a2c",
+ "url": "https://api.github.com/repos/opsdroid/opsdroid-audio/commits/6c79a4a8e0f1b65a39941a7ef736804b08c92a2c",
+ "html_url": "https://github.com/opsdroid/opsdroid-audio/commit/6c79a4a8e0f1b65a39941a7ef736804b08c92a2c"
+ }
+ ]
+ },
+ "branches": [
+ {
+ "name": "master",
+ "commit": {
+ "sha": "b32a39d57d34422362c06346220069b196e00f62",
+ "url": "https://api.github.com/repos/opsdroid/opsdroid-audio/commits/b32a39d57d34422362c06346220069b196e00f62"
+ }
+ },
+ {
+ "name": "pyup-update-astroid-1.5.3-to-2.0.4",
+ "commit": {
+ "sha": "d1842000c15200910e30806d3d90cd7597377a18",
+ "url": "https://api.github.com/repos/opsdroid/opsdroid-audio/commits/d1842000c15200910e30806d3d90cd7597377a18"
+ }
+ },
+ {
+ "name": "pyup-update-coveralls-1.3.0-to-1.5.1",
+ "commit": {
+ "sha": "362867f61a0e1d49310731d72a6116daf1100ef8",
+ "url": "https://api.github.com/repos/opsdroid/opsdroid-audio/commits/362867f61a0e1d49310731d72a6116daf1100ef8"
+ }
+ },
+ {
+ "name": "pyup-update-flake8-3.5.0-to-3.6.0",
+ "commit": {
+ "sha": "77d305afa6e7da4a1c6da1b96ceeea52eb439a9d",
+ "url": "https://api.github.com/repos/opsdroid/opsdroid-audio/commits/77d305afa6e7da4a1c6da1b96ceeea52eb439a9d"
+ }
+ },
+ {
+ "name": "pyup-update-google-api-python-client-1.6.6-to-1.7.4",
+ "commit": {
+ "sha": "955d578ce3ba1c6d4548c34d64fd16682b36258e",
+ "url": "https://api.github.com/repos/opsdroid/opsdroid-audio/commits/955d578ce3ba1c6d4548c34d64fd16682b36258e"
+ }
+ },
+ {
+ "name": "pyup-update-gtts-1.2.2-to-2.0.1",
+ "commit": {
+ "sha": "408df3a2cb290e2815a04d78a1dff878910fc25e",
+ "url": "https://api.github.com/repos/opsdroid/opsdroid-audio/commits/408df3a2cb290e2815a04d78a1dff878910fc25e"
+ }
+ },
+ {
+ "name": "pyup-update-pydocstyle-2.1.1-to-3.0.0",
+ "commit": {
+ "sha": "d1e4b25ad3c34222b1e54d43d541348a3b49b0f7",
+ "url": "https://api.github.com/repos/opsdroid/opsdroid-audio/commits/d1e4b25ad3c34222b1e54d43d541348a3b49b0f7"
+ }
+ },
+ {
+ "name": "pyup-update-pylint-1.7.5-to-2.1.1",
+ "commit": {
+ "sha": "226e0db99dc353f6dc7b7e63700bbedb13af5751",
+ "url": "https://api.github.com/repos/opsdroid/opsdroid-audio/commits/226e0db99dc353f6dc7b7e63700bbedb13af5751"
+ }
+ },
+ {
+ "name": "pyup-update-pytest-3.5.1-to-4.0.0",
+ "commit": {
+ "sha": "56efcde3bb35c4c9e488146d1964e3e78e45b673",
+ "url": "https://api.github.com/repos/opsdroid/opsdroid-audio/commits/56efcde3bb35c4c9e488146d1964e3e78e45b673"
+ }
+ },
+ {
+ "name": "pyup-update-pytest-cov-2.5.1-to-2.6.0",
+ "commit": {
+ "sha": "d7dbd635df656c868452678644d5e9b3b9ef7033",
+ "url": "https://api.github.com/repos/opsdroid/opsdroid-audio/commits/d7dbd635df656c868452678644d5e9b3b9ef7033"
+ }
+ }
+ ],
+ "created_at": "2018-11-16T12:17:49+00:00",
+ "updated_at": "2018-11-16T12:17:49+00:00",
+ "repository": {
+ "id": 83927663,
+ "node_id": "MDEwOlJlcG9zaXRvcnk4MzkyNzY2Mw==",
+ "name": "opsdroid-audio",
+ "full_name": "opsdroid/opsdroid-audio",
+ "private": false,
+ "owner": {
+ "login": "opsdroid",
+ "id": 20677702,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjIwNjc3NzAy",
+ "avatar_url": "https://avatars2.githubusercontent.com/u/20677702?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/opsdroid",
+ "html_url": "https://github.com/opsdroid",
+ "followers_url": "https://api.github.com/users/opsdroid/followers",
+ "following_url": "https://api.github.com/users/opsdroid/following{/other_user}",
+ "gists_url": "https://api.github.com/users/opsdroid/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/opsdroid/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/opsdroid/subscriptions",
+ "organizations_url": "https://api.github.com/users/opsdroid/orgs",
+ "repos_url": "https://api.github.com/users/opsdroid/repos",
+ "events_url": "https://api.github.com/users/opsdroid/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/opsdroid/received_events",
+ "type": "Organization",
+ "site_admin": false
+ },
+ "html_url": "https://github.com/opsdroid/opsdroid-audio",
+ "description": "๐ฃ A companion application for opsdroid which adds hotwords, speech recognition and audio responses.",
+ "fork": false,
+ "url": "https://api.github.com/repos/opsdroid/opsdroid-audio",
+ "forks_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/forks",
+ "keys_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/teams",
+ "hooks_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/hooks",
+ "issue_events_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/events",
+ "assignees_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/tags",
+ "blobs_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/languages",
+ "stargazers_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/stargazers",
+ "contributors_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/contributors",
+ "subscribers_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/subscribers",
+ "subscription_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/subscription",
+ "commits_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/merges",
+ "archive_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/downloads",
+ "issues_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/labels{/name}",
+ "releases_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/opsdroid/opsdroid-audio/deployments",
+ "created_at": "2017-03-04T22:04:15Z",
+ "updated_at": "2018-05-21T17:47:36Z",
+ "pushed_at": "2018-11-16T12:17:48Z",
+ "git_url": "git://github.com/opsdroid/opsdroid-audio.git",
+ "ssh_url": "[email protected]:opsdroid/opsdroid-audio.git",
+ "clone_url": "https://github.com/opsdroid/opsdroid-audio.git",
+ "svn_url": "https://github.com/opsdroid/opsdroid-audio",
+ "homepage": null,
+ "size": 2453,
+ "stargazers_count": 3,
+ "watchers_count": 3,
+ "language": "Python",
+ "has_issues": true,
+ "has_projects": true,
+ "has_downloads": true,
+ "has_wiki": true,
+ "has_pages": false,
+ "forks_count": 2,
+ "mirror_url": null,
+ "archived": false,
+ "open_issues_count": 27,
+ "license": {
+ "key": "apache-2.0",
+ "name": "Apache License 2.0",
+ "spdx_id": "Apache-2.0",
+ "url": "https://api.github.com/licenses/apache-2.0",
+ "node_id": "MDc6TGljZW5zZTI="
+ },
+ "forks": 2,
+ "open_issues": 27,
+ "watchers": 3,
+ "default_branch": "master"
+ },
+ "organization": {
+ "login": "opsdroid",
+ "id": 20677702,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjIwNjc3NzAy",
+ "url": "https://api.github.com/orgs/opsdroid",
+ "repos_url": "https://api.github.com/orgs/opsdroid/repos",
+ "events_url": "https://api.github.com/orgs/opsdroid/events",
+ "hooks_url": "https://api.github.com/orgs/opsdroid/hooks",
+ "issues_url": "https://api.github.com/orgs/opsdroid/issues",
+ "members_url": "https://api.github.com/orgs/opsdroid/members{/member}",
+ "public_members_url": "https://api.github.com/orgs/opsdroid/public_members{/member}",
+ "avatar_url": "https://avatars2.githubusercontent.com/u/20677702?v=4",
+ "description": "An open source python chat-ops bot framework"
+ },
+ "sender": {
+ "login": "jacobtomlinson",
+ "id": 1610850,
+ "node_id": "MDQ6VXNlcjE2MTA4NTA=",
+ "avatar_url": "https://avatars3.githubusercontent.com/u/1610850?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/jacobtomlinson",
+ "html_url": "https://github.com/jacobtomlinson",
+ "followers_url": "https://api.github.com/users/jacobtomlinson/followers",
+ "following_url": "https://api.github.com/users/jacobtomlinson/following{/other_user}",
+ "gists_url": "https://api.github.com/users/jacobtomlinson/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/jacobtomlinson/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/jacobtomlinson/subscriptions",
+ "organizations_url": "https://api.github.com/users/jacobtomlinson/orgs",
+ "repos_url": "https://api.github.com/users/jacobtomlinson/repos",
+ "events_url": "https://api.github.com/users/jacobtomlinson/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/jacobtomlinson/received_events",
+ "type": "User",
+ "site_admin": false
+ }
+ }
diff --git a/tests/test_connector_github.py b/tests/test_connector_github.py
new file mode 100644
--- /dev/null
+++ b/tests/test_connector_github.py
@@ -0,0 +1,212 @@
+"""Tests for the RocketChat class."""
+
+import os.path
+
+import asyncio
+import unittest
+import unittest.mock as mock
+import asynctest
+import asynctest.mock as amock
+
+from opsdroid.__main__ import configure_lang
+from opsdroid.core import OpsDroid
+from opsdroid.connector.github import ConnectorGitHub
+from opsdroid.message import Message
+
+
+class TestConnectorGitHub(unittest.TestCase):
+ """Test the opsdroid github connector class."""
+
+ def setUp(self):
+ self.loop = asyncio.new_event_loop()
+
+ def test_init(self):
+ """Test that the connector is initialised properly."""
+ connector = ConnectorGitHub({
+ 'name': 'github',
+ 'token': 'test'
+ })
+ self.assertEqual(None, connector.default_room)
+ self.assertEqual("github", connector.name)
+
+ def test_missing_token(self):
+ """Test that attempt to connect without info raises an error."""
+ with mock.patch('opsdroid.connector.github._LOGGER.error') \
+ as logmock:
+ ConnectorGitHub({})
+ self.assertTrue(logmock.called)
+
+
+class TestConnectorGitHubAsync(asynctest.TestCase):
+ """Test the async methods of the opsdroid github connector class."""
+
+ def setUp(self):
+ configure_lang({})
+ self.connector = ConnectorGitHub({
+ 'name': 'github',
+ 'token': 'test'
+ })
+
+ async def test_connect(self):
+ opsdroid_mock = amock.CoroutineMock()
+ with amock.patch('aiohttp.ClientSession.get') as patched_request:
+ mockresponse = amock.CoroutineMock()
+ mockresponse.status = 200
+ mockresponse.json = amock.CoroutineMock(return_value={
+ "login": 'opsdroid'
+ })
+ patched_request.return_value = asyncio.Future()
+ patched_request.return_value.set_result(mockresponse)
+ await self.connector.connect(opsdroid_mock)
+ self.assertEqual(self.connector.github_username, "opsdroid")
+ self.assertTrue(opsdroid_mock.web_server.web_app.router.add_post.called)
+
+ async def test_connect_failure(self):
+ result = amock.MagicMock()
+ result.status = 401
+
+ with OpsDroid() as opsdroid, \
+ amock.patch('aiohttp.ClientSession.get') as patched_request, \
+ amock.patch('opsdroid.connector.github._LOGGER.error',) \
+ as logmock:
+
+ patched_request.return_value = asyncio.Future()
+ patched_request.return_value.set_result(result)
+
+ await self.connector.connect(opsdroid)
+ self.assertTrue(logmock.called)
+
+ async def test_disconnect(self):
+ self.assertEqual(await self.connector.disconnect(None), None)
+
+ async def test_get_comment(self):
+ """Test a comment create event creates a message and parses it."""
+ with open(os.path.join(os.path.dirname(__file__),
+ 'responses',
+ 'github_comment_payload.json'), 'r') as f:
+ mock_request = amock.CoroutineMock()
+ mock_request.post = amock.CoroutineMock(return_value={
+ 'payload': f.read()
+ })
+ self.connector.opsdroid = amock.CoroutineMock()
+ self.connector.opsdroid.parse = amock.CoroutineMock()
+ await self.connector.github_message_handler(mock_request)
+ message = self.connector.opsdroid.parse.call_args[0][0]
+ self.assertEqual(message.connector.name, 'github')
+ self.assertEqual(message.text, 'hello')
+ self.assertEqual(message.room, 'opsdroid/opsdroid#237')
+ self.assertTrue(self.connector.opsdroid.parse.called)
+
+ async def test_get_pr(self):
+ """Test a PR create event creates a message and parses it."""
+ with open(os.path.join(os.path.dirname(__file__),
+ 'responses',
+ 'github_pr_payload.json'), 'r') as f:
+ mock_request = amock.CoroutineMock()
+ mock_request.post = amock.CoroutineMock(return_value={
+ 'payload': f.read()
+ })
+ self.connector.opsdroid = amock.CoroutineMock()
+ self.connector.opsdroid.parse = amock.CoroutineMock()
+ await self.connector.github_message_handler(mock_request)
+ message = self.connector.opsdroid.parse.call_args[0][0]
+ self.assertEqual(message.connector.name, 'github')
+ self.assertEqual(message.text, 'hello world')
+ self.assertEqual(message.room, 'opsdroid/opsdroid-audio#175')
+ self.assertTrue(self.connector.opsdroid.parse.called)
+
+ async def test_get_issue(self):
+ """Test an issue create event creates a message and parses it."""
+ with open(os.path.join(os.path.dirname(__file__),
+ 'responses',
+ 'github_issue_payload.json'), 'r') as f:
+ mock_request = amock.CoroutineMock()
+ mock_request.post = amock.CoroutineMock(return_value={
+ 'payload': f.read()
+ })
+ self.connector.opsdroid = amock.CoroutineMock()
+ self.connector.opsdroid.parse = amock.CoroutineMock()
+ await self.connector.github_message_handler(mock_request)
+ message = self.connector.opsdroid.parse.call_args[0][0]
+ self.assertEqual(message.connector.name, 'github')
+ self.assertEqual(message.text, 'test')
+ self.assertEqual(message.room, 'opsdroid/opsdroid#740')
+ self.assertTrue(self.connector.opsdroid.parse.called)
+
+ async def test_get_label(self):
+ """Test a label create event doesn't create a message and parse it."""
+ with open(os.path.join(os.path.dirname(__file__),
+ 'responses',
+ 'github_label_payload.json'), 'r') as f:
+ mock_request = amock.CoroutineMock()
+ mock_request.post = amock.CoroutineMock(return_value={
+ 'payload': f.read()
+ })
+ self.connector.opsdroid = amock.CoroutineMock()
+ self.connector.opsdroid.parse = amock.CoroutineMock()
+ await self.connector.github_message_handler(mock_request)
+ self.assertFalse(self.connector.opsdroid.parse.called)
+
+ async def test_get_no_action(self):
+ """Test a status event doesn't create a message and parse it."""
+ with open(os.path.join(os.path.dirname(__file__),
+ 'responses',
+ 'github_status_payload.json'), 'r') as f:
+ mock_request = amock.CoroutineMock()
+ mock_request.post = amock.CoroutineMock(return_value={
+ 'payload': f.read()
+ })
+ self.connector.opsdroid = amock.CoroutineMock()
+ self.connector.opsdroid.parse = amock.CoroutineMock()
+ await self.connector.github_message_handler(mock_request)
+ self.assertFalse(self.connector.opsdroid.parse.called)
+
+ async def test_listen(self):
+ """Test the listen method.
+
+ The GitHub connector listens using an API endoint and so the listen
+ method should just pass and do nothing. We just need to test that it
+ does not block.
+
+ """
+ self.assertEqual(await self.connector.listen(None), None)
+
+ async def test_respond(self):
+ with amock.patch('aiohttp.ClientSession.post') as patched_request:
+ mockresponse = amock.CoroutineMock()
+ mockresponse.status = 201
+ patched_request.return_value = asyncio.Future()
+ patched_request.return_value.set_result(mockresponse)
+ resp = await self.connector.respond(
+ Message('test', 'jacobtomlinson',
+ 'opsdroid/opsdroid#1', self.connector))
+ self.assertTrue(patched_request.called)
+ self.assertTrue(resp)
+
+ async def test_respond_bot_short(self):
+ with amock.patch('aiohttp.ClientSession.post') as patched_request:
+ mockresponse = amock.CoroutineMock()
+ mockresponse.status = 201
+ patched_request.return_value = asyncio.Future()
+ patched_request.return_value.set_result(mockresponse)
+ self.connector.github_username = 'opsdroid-bot'
+ resp = await self.connector.respond(
+ Message('test', 'opsdroid-bot',
+ 'opsdroid/opsdroid#1', self.connector))
+ self.assertFalse(patched_request.called)
+ self.assertTrue(resp)
+
+ async def test_respond_failure(self):
+ with amock.patch('aiohttp.ClientSession.post') as patched_request:
+ mockresponse = amock.CoroutineMock()
+ mockresponse.status = 400
+ mockresponse.json = amock.CoroutineMock(return_value={
+ "error": 'some error'
+ })
+ patched_request.return_value = asyncio.Future()
+ patched_request.return_value.set_result(mockresponse)
+ resp = await self.connector.respond(
+ Message('test', 'opsdroid-bot',
+ 'opsdroid/opsdroid#1', self.connector))
+ self.assertTrue(patched_request.called)
+ self.assertFalse(resp)
| Move GitHub connector into core
This issue covers adding the [GitHub connector](https://github.com/opsdroid/connector-github) to core.
## Background
A while ago we began moving connectors from external plugins into the core of the project (see #185 for more context). We started with [slack](https://github.com/opsdroid/opsdroid/blob/master/opsdroid/connector/slack/__init__.py) and [websockets](https://github.com/opsdroid/opsdroid/blob/master/opsdroid/connector/websocket/__init__.py) but need to go through all the other existing plugins and move them into the core.
## Steps
- Make a new submodule directory in [`opsdroid.connector`](https://github.com/opsdroid/opsdroid/tree/master/opsdroid/connector) and copy the connector code over.
- Update the [`requirements.txt`](https://github.com/opsdroid/opsdroid/blob/master/requirements.txt) with any dependencies from the connector if necessary.
- Write tests for the connector. (See the [Slack connector tests](https://github.com/jacobtomlinson/opsdroid/blob/master/tests/test_connector_slack.py) for inspiration).
- Copy the relevant information from the connector `README.md` into a [new documentation page](https://github.com/opsdroid/opsdroid/tree/master/docs/connectors).
- Add the new page to the [mkdocs.yml](https://github.com/opsdroid/opsdroid/blob/master/mkdocs.yml).
- Add to the [list of connectors](https://github.com/opsdroid/opsdroid/blob/master/docs/configuration-reference.md#connector-modules).
- Add a deprecation notice to the old connector. (See [the slack connector](https://github.com/opsdroid/connector-slack))
| @jacobtomlinson I am interested in taking a stab at this.
Hello @souravsingh please go ahead and work on this issue, let us know if you need any help ๐
Hello @souravsingh how are you doing with this issue, do you need any help with it?
Hey @souravsingh I'm guessing you've not had time to work on this. If anyone else wants to take this up then please go ahead.
I have been getting busy with work. Let me create a WIP pull request.
@souravsingh That's no problem! I look forward to reviewing your PR. | 2018-11-17T08:43:29 |
opsdroid/opsdroid | 755 | opsdroid__opsdroid-755 | [
"661"
] | d1bb5cb89030c7988042f09273a19b8f5af9c9ee | diff --git a/opsdroid/loader.py b/opsdroid/loader.py
--- a/opsdroid/loader.py
+++ b/opsdroid/loader.py
@@ -11,7 +11,7 @@
import sys
import tempfile
import urllib.request
-from collections import Mapping
+from collections.abc import Mapping
import yaml
@@ -47,20 +47,26 @@ def import_module_from_spec(module_spec):
@staticmethod
def import_module(config):
"""Import module namespace as variable and return it."""
- # Check if the module can be imported and proceed with import
-
- # Proceed only if config.name is specified
- # and parent module can be imported
- if config["name"] and importlib.util.find_spec(config["module_path"]):
- module_spec = importlib.util.find_spec(config["module_path"] +
- "." + config["name"])
- if module_spec:
- module = Loader.import_module_from_spec(module_spec)
- _LOGGER.debug(_("Loaded %s: %s"), config["type"],
- config["module_path"])
- return module
-
- module_spec = importlib.util.find_spec(config["module_path"])
+ # Try to import the module from various locations, return the first
+ # successful import, or None if they all failed
+ #
+ # 1. try to import the module directly off PYTHONPATH
+ # 2. try to import a module with the given name in the module_path
+ # 3. try to import the module_path itself
+ module_spec = None
+ namespaces = [
+ config["module"],
+ config["module_path"] + '.' + config["name"],
+ config["module_path"],
+ ]
+ for namespace in namespaces:
+ try:
+ module_spec = importlib.util.find_spec(namespace)
+ if module_spec:
+ break
+ except (ImportError, AttributeError):
+ continue
+
if module_spec:
module = Loader.import_module_from_spec(module_spec)
_LOGGER.debug(_("Loaded %s: %s"),
@@ -69,7 +75,6 @@ def import_module(config):
_LOGGER.error(_("Failed to load %s: %s"),
config["type"], config["module_path"])
-
return None
@staticmethod
@@ -310,8 +315,10 @@ def _load_modules(self, modules_type, modules):
if not isinstance(config, Mapping):
config = {}
config["name"] = module
+ config["module"] = ''
else:
config["name"] = module['name']
+ config["module"] = module.get("module", '')
config["type"] = modules_type
config["is_builtin"] = self.is_builtin_module(config)
config["module_path"] = self.build_module_import_path(config)
@@ -319,7 +326,9 @@ def _load_modules(self, modules_type, modules):
if "branch" not in config:
config["branch"] = DEFAULT_MODULE_BRANCH
- if not config["is_builtin"]:
+ # If the module isn't builtin, or isn't already on the
+ # python path, install it
+ if not (config["is_builtin"] or config["module"]):
# Remove module for reinstall if no-cache set
self.check_cache(config)
| diff --git a/tests/test_loader.py b/tests/test_loader.py
--- a/tests/test_loader.py
+++ b/tests/test_loader.py
@@ -241,6 +241,7 @@ def test_import_module(self):
config["module_path"] = "os"
config["name"] = "path"
config["type"] = "system"
+ config["module"] = ""
module = ld.Loader.import_module(config)
self.assertIsInstance(module, ModuleType)
@@ -250,6 +251,7 @@ def test_import_module_new(self):
config["module_path"] = "os"
config["name"] = ""
config["type"] = "system"
+ config["module"] = ""
module = ld.Loader.import_module(config)
self.assertIsInstance(module, ModuleType)
@@ -259,10 +261,22 @@ def test_import_module_failure(self):
config["module_path"] = "nonexistant"
config["name"] = "module"
config["type"] = "broken"
+ config["module"] = ""
module = ld.Loader.import_module(config)
self.assertEqual(module, None)
+ def test_import_module_from_path(self):
+ config = {}
+ config["module_path"] = ""
+ config["name"] = "module"
+ config["type"] = ""
+ config["module"] = "os.path"
+
+ module = ld.Loader.import_module(config)
+ self.assertIsInstance(module, ModuleType)
+
+
def test_load_config(self):
opsdroid, loader = self.setup()
loader._load_modules = mock.MagicMock()
| Specify modules by Python module name
Currently to configure modules for opsdroid (connectors, databases and skills) you specify a `path` or `repo` config option which tells opsdroid where it can find that specific module. Then at run time opsdroid will pull all of those modules together into an internal location, append that to the Python path and import all the modules.
It would be sensible to also include support for using modules from the regular Python path. These could be specified with `module` key and take the form of `module.submodule` (e.g the same thing you would `import`. These would bypass the custom loader intirely and directly import the package.
| 2018-11-24T05:14:52 |
|
opsdroid/opsdroid | 785 | opsdroid__opsdroid-785 | [
"670"
] | ac4940120a5ec37338efc525c602f63a8ea8ea38 | diff --git a/opsdroid/database/redis/__init__.py b/opsdroid/database/redis/__init__.py
new file mode 100644
--- /dev/null
+++ b/opsdroid/database/redis/__init__.py
@@ -0,0 +1,125 @@
+"""Module for storing data within Redis."""
+from datetime import date, datetime
+import json
+import time
+
+import asyncio_redis
+
+from opsdroid.database import Database
+
+
+class RedisDatabase(Database):
+ """Database class for storing data within a Redis instance."""
+
+ def __init__(self, config, opsdroid=None):
+ """Initialise the sqlite database.
+
+ Set basic properties of the database. Initialise properties like
+ name, connection arguments, database file, table name and config.
+
+ Args:
+ config (dict): The configuration of the database which consists
+ of `file` and `table` name of the sqlite database
+ specified in `configuration.yaml` file.
+
+ """
+ super().__init__(config, opsdroid=opsdroid)
+ self.config = config
+ self.client = None
+ self.host = self.config.get("host", "localhost")
+ self.port = self.config.get("port", 6379)
+ self.database = self.config.get("database", 0)
+ self.password = self.config.get("password", None)
+ self.reconnect = self.config.get("reconnect", False)
+
+ async def connect(self):
+ """Connect to the database.
+
+ This method will connect to a Redis database. By default it will
+ connect to Redis on localhost on port 6379
+
+ """
+ self.client = await asyncio_redis.Connection.create(
+ host=self.host,
+ port=self.port,
+ db=self.database,
+ auto_reconnect=self.reconnect,
+ password=self.password,
+ )
+
+ async def put(self, key, data):
+ """Store the data object in Redis against the key.
+
+ Args:
+ key (string): The key to store the data object under.
+ data (object): The data object to store.
+
+ """
+ data = self.convert_object_to_timestamp(data)
+ await self.client.set(key, json.dumps(data))
+
+ async def get(self, key):
+ """Get data from Redis for a given key.
+
+ Args:
+ key (string): The key to lookup in the database.
+
+ Returns:
+ object or None: The data object stored for that key, or None if no
+ object found for that key.
+
+ """
+ data = await self.client.get(key)
+
+ if data:
+ return self.convert_timestamp_to_object(json.loads(data))
+
+ return None
+
+ async def disconnect(self):
+ """Disconnect from the database."""
+ self.client.close()
+
+ @staticmethod
+ def convert_object_to_timestamp(data):
+ """
+ Serialize dict before storing into Redis.
+
+ Args:
+ dict: Dict to serialize
+
+ Returns:
+ dict: Dict from redis to unserialize
+
+ """
+ for k, value in data.items():
+ if isinstance(value, (datetime, date)):
+ value = '::'.join([
+ type(value).__name__,
+ '%d' % time.mktime(value.timetuple())
+ ])
+ data[k] = value
+ return data
+
+ @staticmethod
+ def convert_timestamp_to_object(data):
+ """
+ Unserialize data from Redis.
+
+ Args:
+ dict: Dict from redis to unserialize
+
+ Returns:
+ dict: Dict to serialize
+
+ """
+ for k, value in data.items():
+ value_type = value.split('::', 1)[0]
+ if value_type == 'datetime':
+ timestamp = int(value.split('::', 1)[1])
+ value = datetime.fromtimestamp(timestamp)
+ elif value_type == 'date':
+ timestamp = int(value.split('::', 1)[1])
+ value = date.fromtimestamp(timestamp)
+ data[k] = value
+ return data
| diff --git a/tests/test_database_redis.py b/tests/test_database_redis.py
new file mode 100644
--- /dev/null
+++ b/tests/test_database_redis.py
@@ -0,0 +1,117 @@
+import asyncio
+import datetime
+import unittest
+
+import asynctest
+import asynctest.mock as amock
+
+from contextlib import suppress
+from opsdroid.database.redis import RedisDatabase
+from opsdroid.__main__ import configure_lang
+
+
+class MockRedisClient:
+ execute = None
+
+
+class TestRedisDatabase(unittest.TestCase):
+ """Test the opsdroid Redis database class."""
+
+ def setUp(self):
+ self.loop = asyncio.new_event_loop()
+ configure_lang({})
+
+ def test_init(self):
+ """Test initialisation of database class.
+
+ This method will test the initialisation of the database
+ class. It will assert if the database class properties are
+ declared and equated to None.
+
+ """
+ database = RedisDatabase({})
+ self.assertEqual(None, database.client)
+ self.assertEqual(0, database.database)
+ self.assertEqual("localhost", database.host)
+ self.assertEqual(6379, database.port)
+ self.assertEqual(None, database.password)
+
+ def test_other(self):
+ unserialized_data = {
+ "example_string": "test",
+ "example_datetime": datetime.datetime.utcfromtimestamp(1538389815),
+ "example_date": datetime.date.fromtimestamp(1538366400),
+ }
+
+ serialized_data = RedisDatabase.convert_object_to_timestamp(unserialized_data)
+
+ self.assertEqual(serialized_data["example_string"], "test")
+ # Typically I would do assertDictEqual on the result, but as datetime are parsed based on the
+ # timezone of the computer it makes the unittest fragile depending on the timezone of the user.
+ self.assertEqual(serialized_data["example_datetime"][0:10], "datetime::")
+ self.assertEqual(serialized_data["example_date"][0:6], "date::")
+
+ def test_convert_timestamp_to_object(self):
+ serialized_data = {
+ "example_date": "date::1538366400",
+ "example_datetime": "datetime::1538389815",
+ "example_string": "test"
+ }
+
+ unserialized_data = RedisDatabase.convert_timestamp_to_object(serialized_data)
+
+ self.assertEqual(unserialized_data["example_string"], "test")
+ # Typically I would do assertDictEqual on the result, but as datetime are parsed based on the
+ # timezone of the computer it makes the unittest fragile depending on the timezone of the user.
+ self.assertIsInstance(unserialized_data["example_datetime"], datetime.datetime)
+ self.assertIsInstance(unserialized_data["example_date"], datetime.date)
+
+
+class TestRedisDatabaseAsync(asynctest.TestCase):
+ """Test the opsdroid Redis Database class."""
+
+ async def test_connect(self):
+ opsdroid = amock.CoroutineMock()
+ database = RedisDatabase({}, opsdroid=opsdroid)
+ import asyncio_redis
+ with amock.patch.object(asyncio_redis.Connection, 'create') as mocked_connection:
+ mocked_connection.side_effect = NotImplementedError
+
+ with suppress(NotImplementedError):
+ await database.connect()
+ self.assertTrue(mocked_connection.called)
+
+ async def test_get(self):
+ db = RedisDatabase({})
+ db.client = MockRedisClient()
+ db.client.get = amock.CoroutineMock(return_value='{"key":"value"}')
+
+ result = await db.get("string")
+
+ self.assertDictEqual(result, dict(key="value"))
+
+ async def test_get_return_None(self):
+ db = RedisDatabase({})
+ db.client = MockRedisClient()
+ db.client.get = amock.CoroutineMock(return_value=None)
+
+ result = await db.get("string")
+
+ self.assertEqual(result, None)
+
+ async def test_put(self):
+ db = RedisDatabase({})
+ db.client = MockRedisClient()
+ db.client.set = amock.CoroutineMock(return_value='{"key":"value"}')
+
+ result = await db.put("string", dict(key="value"))
+
+ async def test_disconnect(self):
+ db = RedisDatabase({})
+ db.client = MockRedisClient()
+ db.client.close = amock.CoroutineMock()
+
+ result = await db.disconnect()
+
+ self.assertTrue(db.client.close.called)
+
| Move redis database module into core
This issue covers adding the [Redis database module](https://github.com/opsdroid/database-redis) to core.
## Background
A while ago we began moving connectors and databases from external plugins into the core of the project (see #185 for more context). We started with [slack](https://github.com/opsdroid/opsdroid/blob/master/opsdroid/connector/slack/__init__.py) and [websockets](https://github.com/opsdroid/opsdroid/blob/master/opsdroid/connector/websocket/__init__.py) connectors but need to go through all the other existing plugins and move them into the core.
## Steps
- Make a new submodule directory in [`opsdroid.database`](https://github.com/opsdroid/opsdroid/tree/master/opsdroid/database) and copy the database code over.
- Update the [`requirements.txt`](https://github.com/opsdroid/opsdroid/blob/master/requirements.txt) with any dependencies from the database if necessary.
- Write tests for the database. (See the [Slack connector tests](https://github.com/jacobtomlinson/opsdroid/blob/master/tests/test_connector_slack.py) for inspiration).
- Copy the relevant information from the database `README.md` into a [new documentation page](https://github.com/opsdroid/opsdroid/tree/master/docs/connectors).
- Add the new page to the [mkdocs.yml](https://github.com/opsdroid/opsdroid/blob/master/mkdocs.yml).
- Add to the [list of databases](https://github.com/opsdroid/opsdroid/blob/master/docs/configuration-reference.md#database-modules).
- Add a deprecation notice to the old database module. (See [the slack connector](https://github.com/opsdroid/connector-slack))
| I will go ahead and tackle this issue. | 2018-12-18T18:13:51 |
opsdroid/opsdroid | 841 | opsdroid__opsdroid-841 | [
"839"
] | b4db89dab4aaed8b0403e0e98d942df683503162 | diff --git a/opsdroid/connector/telegram/__init__.py b/opsdroid/connector/telegram/__init__.py
--- a/opsdroid/connector/telegram/__init__.py
+++ b/opsdroid/connector/telegram/__init__.py
@@ -32,6 +32,9 @@ def __init__(self, config, opsdroid=None):
self.default_user = config.get("default-user", None)
self.whitelisted_users = config.get("whitelisted-users", None)
self.update_interval = config.get("update_interval", 1)
+ self.session = None
+ self._closing = asyncio.Event()
+ self.loop = asyncio.get_event_loop()
try:
self.token = config["token"]
@@ -39,6 +42,45 @@ def __init__(self, config, opsdroid=None):
_LOGGER.error("Unable to login: Access token is missing. "
"Telegram connector will be unavailable.")
+ @staticmethod
+ def get_user(response):
+ """Get user from response.
+
+ The API response is different depending on how
+ the bot is set up and where the message is coming
+ from. This method was created to keep if/else
+ statements to a minium on _parse_message.
+
+ Args:
+ response (dict): Response returned by aiohttp.ClientSession.
+
+ """
+ user = None
+ if "username" in response["message"]["from"]:
+ user = response["message"]["from"]["username"]
+
+ elif "first_name" in response["message"]["from"]:
+ user = response["message"]["from"]["first_name"]
+
+ return user
+
+ def handle_user_permission(self, response, user):
+ """Handle user permissions.
+
+ This will check if the user that tried to talk with
+ the bot is allowed to do so. It will also work with
+ userid to improve security.
+
+ """
+ user_id = response["message"]["from"]["id"]
+
+ if not self.whitelisted_users or \
+ user in self.whitelisted_users or \
+ user_id in self.whitelisted_users:
+ return True
+
+ return False
+
def build_url(self, method):
"""Build the url to connect to the API.
@@ -51,6 +93,23 @@ def build_url(self, method):
"""
return "https://api.telegram.org/bot{}/{}".format(self.token, method)
+ async def delete_webhook(self):
+ """Delete Telegram webhook.
+
+ The Telegram api will thrown an 409 error when an webhook is
+ active and a call to getUpdates is made. This method will
+ try to request the deletion of the webhook to make the getUpdate
+ request possible.
+
+ """
+ _LOGGER.debug("Sending deleteWebhook request to Telegram...")
+ resp = await self.session.get(self.build_url("deleteWebhook"))
+
+ if resp.status == 200:
+ _LOGGER.debug("Telegram webhook deleted successfully.")
+ else:
+ _LOGGER.debug("Unable to delete webhook.")
+
async def connect(self):
"""Connect to Telegram.
@@ -60,18 +119,18 @@ async def connect(self):
"""
_LOGGER.debug("Connecting to telegram")
- async with aiohttp.ClientSession() as session:
- resp = await session.get(self.build_url("getMe"))
-
- if resp.status != 200:
- _LOGGER.error("Unable to connect")
- _LOGGER.error("Telegram error %s, %s",
- resp.status, resp.text)
- else:
- json = await resp.json()
- _LOGGER.debug(json)
- _LOGGER.debug("Connected to telegram as %s",
- json["result"]["username"])
+ self.session = aiohttp.ClientSession()
+ resp = await self.session.get(self.build_url("getMe"))
+
+ if resp.status != 200:
+ _LOGGER.error("Unable to connect")
+ _LOGGER.error("Telegram error %s, %s",
+ resp.status, resp.text)
+ else:
+ json = await resp.json()
+ _LOGGER.debug(json)
+ _LOGGER.debug("Connected to telegram as %s",
+ json["result"]["username"])
async def _parse_message(self, response):
"""Handle logic to parse a received message.
@@ -93,23 +152,26 @@ async def _parse_message(self, response):
"""
for result in response["result"]:
_LOGGER.debug(result)
- if result["message"]["text"]:
- user = result["message"]["from"]["username"]
-
+ if "channel" in result["message"]["chat"]["type"]:
+ _LOGGER.debug("Channel message parsing not supported "
+ "- Ignoring message")
+ elif "message" in result and "text" in result["message"]:
+ user = self.get_user(result)
message = Message(
user,
result["message"]["chat"],
self,
result["message"]["text"])
- if not self.whitelisted_users or \
- user in self.whitelisted_users:
+ if self.handle_user_permission(result, user):
await self.opsdroid.parse(message)
else:
message.text = "Sorry, you're not allowed " \
"to speak with this bot."
await self.respond(message)
self.latest_update = result["update_id"] + 1
+ else:
+ _LOGGER.error("Unable to parse the message.")
async def _get_messages(self):
"""Connect to the Telegram API.
@@ -124,23 +186,30 @@ async def _get_messages(self):
return an empty {}.
"""
- async with aiohttp.ClientSession() as session:
- data = {}
- if self.latest_update is not None:
- data["offset"] = self.latest_update
- resp = await session.get(self.build_url("getUpdates"),
- params=data)
- if resp.status != 200:
- _LOGGER.error("Telegram error %s, %s",
- resp.status, resp.text)
- self.listening = False
-
- else:
- json = await resp.json()
-
- await self._parse_message(json)
-
- async def listen(self):
+ data = {}
+ if self.latest_update is not None:
+ data["offset"] = self.latest_update
+
+ await asyncio.sleep(self.update_interval)
+ resp = await self.session.get(self.build_url("getUpdates"),
+ params=data)
+
+ if resp.status == 409:
+ _LOGGER.info("Can't get updates because previous "
+ "webhook is still active. Will try to "
+ "delete webhook.")
+ await self.delete_webhook()
+
+ if resp.status != 200:
+ _LOGGER.error("Telegram error %s, %s",
+ resp.status, resp.text)
+ self.listening = False
+ else:
+ json = await resp.json()
+
+ await self._parse_message(json)
+
+ async def get_messages_loop(self):
"""Listen for and parse new messages.
The bot will always listen to all opened chat windows,
@@ -153,14 +222,22 @@ async def listen(self):
config.yaml with the param update-interval - this
defaults to 1 second.
- Args:
- opsdroid (OpsDroid): An instance of opsdroid core.
-
"""
while self.listening:
await self._get_messages()
- await asyncio.sleep(self.update_interval)
+ async def listen(self):
+ """Listen method of the connector.
+
+ Every connector has to implement the listen method. When an
+ infinite loop is running, it becomes hard to cancel this task.
+ So we are creating a task and set it on a variable so we can
+ cancel the task.
+
+ """
+ message_getter = self.loop.create_task(self.get_messages_loop())
+ await self._closing.wait()
+ message_getter.cancel()
async def respond(self, message, room=None):
"""Respond with a message.
@@ -172,13 +249,23 @@ async def respond(self, message, room=None):
"""
_LOGGER.debug("Responding with: %s", message.text)
- async with aiohttp.ClientSession() as session:
- data = {}
- data["chat_id"] = message.room["id"]
- data["text"] = message.text
- resp = await session.post(self.build_url("sendMessage"),
- data=data)
- if resp.status == 200:
- _LOGGER.debug("Successfully responded")
- else:
- _LOGGER.error("Unable to respond.")
+ data = dict()
+ data["chat_id"] = message.room["id"]
+ data["text"] = message.text
+ resp = await self.session.post(self.build_url("sendMessage"),
+ data=data)
+ if resp.status == 200:
+ _LOGGER.debug("Successfully responded")
+ else:
+ _LOGGER.error("Unable to respond.")
+
+ async def disconnect(self):
+ """Disconnect from Telegram.
+
+ Stops the infinite loop found in self._listen(), closes
+ aiohttp session.
+
+ """
+ self.listening = False
+ self._closing.set()
+ await self.session.close()
| diff --git a/tests/test_connector_telegram.py b/tests/test_connector_telegram.py
--- a/tests/test_connector_telegram.py
+++ b/tests/test_connector_telegram.py
@@ -1,11 +1,10 @@
"""Tests for the ConnectorTelegram class."""
import asyncio
+import contextlib
import unittest
-import unittest.mock as mock
import asynctest
import asynctest.mock as amock
-from opsdroid.__main__ import configure_lang
from opsdroid.core import OpsDroid
from opsdroid.connector.telegram import ConnectorTelegram
from opsdroid.events import Message
@@ -42,8 +41,10 @@ def setUp(self):
self.connector = ConnectorTelegram({
'name': 'telegram',
'token': 'bot:765test',
- 'whitelisted-users': ['user', 'test']
+ 'whitelisted-users': ['user', 'test', 'AnUser']
}, opsdroid=OpsDroid())
+ with amock.patch('aiohttp.ClientSession') as mocked_session:
+ self.connector.session = mocked_session
async def test_connect(self):
connect_response = amock.Mock()
@@ -59,8 +60,8 @@ async def test_connect(self):
}
}
- with OpsDroid() as opsdroid, \
- amock.patch('aiohttp.ClientSession.get') as patched_request:
+ with amock.patch('aiohttp.ClientSession.get')\
+ as patched_request:
patched_request.return_value = asyncio.Future()
patched_request.return_value.set_result(connect_response)
@@ -73,8 +74,8 @@ async def test_connect_failure(self):
result = amock.MagicMock()
result.status = 401
- with OpsDroid() as opsdroid, \
- amock.patch('aiohttp.ClientSession.get') as patched_request:
+ with amock.patch('aiohttp.ClientSession.get')\
+ as patched_request:
patched_request.return_value = asyncio.Future()
patched_request.return_value.set_result(result)
@@ -82,8 +83,8 @@ async def test_connect_failure(self):
await self.connector.connect()
self.assertLogs('_LOGGER', 'error')
- async def test_parse_message(self):
- response = { 'result': [{
+ async def test_parse_message_username(self):
+ response = {'result': [{
"update_id": 427647860,
"message": {
"message_id": 12,
@@ -99,7 +100,7 @@ async def test_parse_message(self):
"id": 649671308,
"first_name": "A",
"last_name": "User",
- "username": "a_user",
+ "username": "user",
"type": "private"
},
"date": 1538756863,
@@ -107,14 +108,91 @@ async def test_parse_message(self):
}
}]}
- with OpsDroid() as opsdroid, \
- amock.patch('opsdroid.core.OpsDroid.parse') as mocked_parse:
+ with amock.patch('opsdroid.core.OpsDroid.parse') as mocked_parse:
+ await self.connector._parse_message(response)
+ self.assertTrue(mocked_parse.called)
+
+ async def test_parse_message_channel(self):
+ response = {'result': [{
+ "update_id": 427647860,
+ "message": {
+ "message_id": 12,
+ "from": {
+ "id": 649671308,
+ "is_bot": False,
+ "first_name": "A",
+ "last_name": "User",
+ "username": "user",
+ "language_code": "en-GB"
+ },
+ "chat": {
+ "id": 649671308,
+ "first_name": "A",
+ "last_name": "User",
+ "username": "user",
+ "type": "channel"
+ },
+ "date": 1538756863,
+ "text": "Hello"
+ }
+ }]}
+
+ with amock.patch('opsdroid.core.OpsDroid.parse') as mocked_parse:
+ await self.connector._parse_message(response)
+ self.assertLogs('_LOGGER', 'debug')
+
+ async def test_parse_message_first_name(self):
+ response = { 'result': [{
+ "update_id": 427647860,
+ "message": {
+ "message_id": 12,
+ "from": {
+ "id": 649671308,
+ "is_bot": False,
+ "first_name": "AnUser",
+ "type": "private",
+ "language_code": "en-GB"
+ },
+ "chat": {
+ "id": 649671308,
+ "first_name": "AnUser",
+ "type": "private"
+ },
+ "date": 1538756863,
+ "text": "Hello"
+ }
+ }]}
+
+ with amock.patch('opsdroid.core.OpsDroid.parse') as mocked_parse:
await self.connector._parse_message(response)
self.assertTrue(mocked_parse.called)
+ async def test_parse_message_bad_result(self):
+ response = {'result': [{
+ "update_id": 427647860,
+ "message": {
+ "message_id": 12,
+ "from": {
+ "id": 649671308,
+ "is_bot": False,
+ "first_name": "test",
+ "language_code": "en-GB"
+ },
+ "chat": {
+ "id": 649671308,
+ "first_name": "test",
+ "type": "private"
+ },
+ "date": 1538756863,
+ }
+ }]}
+
+ await self.connector._parse_message(response)
+ self.assertLogs('error', '_LOGGER')
+
async def test_parse_message_unauthorized(self):
self.connector.config['whitelisted-users'] = ['user', 'test']
- response = { 'result': [{
+ response = {'result': [{
"update_id": 427647860,
"message": {
"message_id": 12,
@@ -143,8 +221,7 @@ async def test_parse_message_unauthorized(self):
message_text = "Sorry, you're not allowed to speak with this bot."
- with OpsDroid() as opsdroid, \
- amock.patch.object(self.connector, 'respond') \
+ with amock.patch.object(self.connector, 'respond') \
as mocked_respond:
await self.connector._parse_message(response)
self.assertTrue(mocked_respond.called)
@@ -180,9 +257,9 @@ async def test_get_messages(self):
}
]}
- with OpsDroid() as opsdroid, \
- amock.patch('aiohttp.ClientSession.get') as patched_request,\
- amock.patch.object(self.connector, '_parse_message') \
+ with amock.patch.object(self.connector.session, 'get') \
+ as patched_request,\
+ amock.patch.object(self.connector, '_parse_message') \
as mocked_parse_message:
self.connector.latest_update = 54
@@ -194,29 +271,71 @@ async def test_get_messages(self):
self.assertLogs('_LOGGER', 'debug')
self.assertTrue(mocked_parse_message.called)
+ async def test_delete_webhook(self):
+ response = amock.Mock()
+ response.status = 200
+
+ with amock.patch.object(self.connector.session, 'get') \
+ as mock_request:
+ mock_request.return_value = asyncio.Future()
+ mock_request.return_value.set_result(response)
+
+ await self.connector.delete_webhook()
+ self.assertLogs('_LOGGER', 'debug')
+
+ async def test_get_message_webhook(self):
+ response = amock.Mock()
+ response.status = 409
+
+ with amock.patch.object(self.connector.session, 'get') \
+ as mock_request, \
+ amock.patch.object(self.connector, 'delete_webhook') \
+ as mock_method:
+ mock_request.return_value = asyncio.Future()
+ mock_request.return_value.set_result(response)
+
+ await self.connector._get_messages()
+ self.assertLogs('_LOGGER', 'info')
+ self.assertTrue(mock_method.called)
+
+
+ async def test_delete_webhook_failure(self):
+ response = amock.Mock()
+ response.status = 401
+
+ with amock.patch.object(self.connector.session, 'get') \
+ as mock_request:
+ mock_request.return_value = asyncio.Future()
+ mock_request.return_value.set_result(response)
+
+ await self.connector.delete_webhook()
+ self.assertLogs('_LOGGER', 'debug')
+
async def test_get_messages_failure(self):
listen_response = amock.Mock()
listen_response.status = 401
- with OpsDroid() as opsdroid, \
- amock.patch('aiohttp.ClientSession.get') as patched_request:
+ with amock.patch.object(self.connector.session, 'get') \
+ as patched_request:
patched_request.return_value = asyncio.Future()
patched_request.return_value.set_result(listen_response)
await self.connector._get_messages()
self.assertLogs('_LOGGER', 'error')
- async def test_listen(self):
- self.connector.listening = amock.CoroutineMock()
- self.connector.listening.side_effect = Exception()
- await self.connector.listen()
+ async def test_get_messages_loop(self):
+ self.connector._get_messages = amock.CoroutineMock()
+ self.connector._get_messages.side_effect = Exception()
+ with contextlib.suppress(Exception):
+ await self.connector.get_messages_loop()
async def test_respond(self):
post_response = amock.Mock()
post_response.status = 200
with OpsDroid() as opsdroid, \
- amock.patch('aiohttp.ClientSession.post') as patched_request:
+ amock.patch.object(self.connector.session, 'post')\
+ as patched_request:
self.assertTrue(opsdroid.__class__.instances)
test_message = Message(text="This is a test",
@@ -235,7 +354,8 @@ async def test_respond_failure(self):
post_response.status = 401
with OpsDroid() as opsdroid, \
- amock.patch('aiohttp.ClientSession.post') as patched_request:
+ amock.patch.object(self.connector.session, 'post')\
+ as patched_request:
self.assertTrue(opsdroid.__class__.instances)
test_message = Message(text="This is a test",
@@ -247,3 +367,26 @@ async def test_respond_failure(self):
patched_request.return_value.set_result(post_response)
await test_message.respond("Response")
self.assertLogs('_LOGGER', 'debug')
+
+ async def test_listen(self):
+ with amock.patch.object(self.connector.loop, 'create_task') \
+ as mocked_task, \
+ amock.patch.object(self.connector._closing, 'wait') as\
+ mocked_event:
+ mocked_event.return_value = asyncio.Future()
+ mocked_event.return_value.set_result(True)
+ mocked_task.return_value = asyncio.Future()
+ await self.connector.listen()
+
+ self.assertTrue(mocked_event.called)
+ self.assertTrue(mocked_task.called)
+
+ async def test_disconnect(self):
+ with amock.patch.object(self.connector.session, 'close') as mocked_close:
+ mocked_close.return_value = asyncio.Future()
+ mocked_close.return_value.set_result(True)
+
+ await self.connector.disconnect()
+ self.assertFalse(self.connector.listening)
+ self.assertTrue(self.connector.session.closed())
+ self.assertEqual(self.connector._closing.set(), None)
\ No newline at end of file
| Telegram connector crashes when receiving a message from a channel
Hi,
I have found an issue that make telegram connector craches when receiving a message from a channel. the problem is that the json does not contain a **message** entry
```
DEBUG opsdroid.core: Parsing input: hi
DEBUG opsdroid.core: Processing parsers...
DEBUG opsdroid.connector.telegram: {'update_id': 539026669, 'channel_post': {'message_id': 10, 'chat': {'id': -1001448219498, 'title': 'test-channel', 'type': 'channel'}, 'date': 1550093042, 'text': 'hi'}}
DEBUG opsdroid.connector.telegram: Responding with: Hi IObreaker
DEBUG asyncio: Using selector: EpollSelector
Traceback (most recent call last):
File "/usr/bin/opsdroid", line 11, in <module>
load_entry_point('opsdroid==0.14.1', 'console_scripts', 'opsdroid')()
File "/usr/lib64/python3.6/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/usr/lib64/python3.6/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/usr/lib64/python3.6/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/lib64/python3.6/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/usr/lib/python3.6/site-packages/opsdroid/__main__.py", line 197, in main
opsdroid.run()
File "/usr/lib/python3.6/site-packages/opsdroid/core.py", line 135, in run
self.eventloop.run_until_complete(asyncio.gather(*pending))
File "/usr/lib64/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/usr/lib/python3.6/site-packages/opsdroid/connector/telegram/__init__.py", line 164, in listen
await self._get_messages()
File "/usr/lib/python3.6/site-packages/opsdroid/connector/telegram/__init__.py", line 144, in _get_messages
await self._parse_message(json)
File "/usr/lib/python3.6/site-packages/opsdroid/connector/telegram/__init__.py", line 96, in _parse_message
if result["message"]["text"]:
KeyError: 'message'
```
Testing if "message" in result before parsing should correct this issue in function in function **async def _parse_message(self, response):**
"Telegram changes many entries name, testing if they exists is mandatory before processing to avoid exception"
opsdroid v0.14.1
Python 3.6.7
Opsdroid rocks :-)
Regards
| 2019-02-14T10:04:10 |
|
opsdroid/opsdroid | 844 | opsdroid__opsdroid-844 | [
"764"
] | 63609123fd9ee15f87032c72f133f3f0cba3b69d | diff --git a/opsdroid/connector/slack/__init__.py b/opsdroid/connector/slack/__init__.py
--- a/opsdroid/connector/slack/__init__.py
+++ b/opsdroid/connector/slack/__init__.py
@@ -58,6 +58,9 @@ async def connect(self):
_LOGGER.error(error)
_LOGGER.error("Failed to connect to Slack, retrying in 10")
await self.reconnect(10)
+ except slacker.Error as error:
+ _LOGGER.error("Unable to connect to Slack due to %s - "
+ "The Slack Connector will not be available.", error)
except Exception:
await self.disconnect()
raise
@@ -79,7 +82,10 @@ async def disconnect(self):
async def listen(self):
"""Listen for and parse new messages."""
while self.listening:
- await self.receive_from_websocket()
+ try:
+ await self.receive_from_websocket()
+ except AttributeError:
+ break
async def receive_from_websocket(self):
"""Get the next message from the websocket."""
| diff --git a/tests/test_connector_slack.py b/tests/test_connector_slack.py
--- a/tests/test_connector_slack.py
+++ b/tests/test_connector_slack.py
@@ -5,6 +5,7 @@
import unittest.mock as mock
import asynctest
import asynctest.mock as amock
+import slacker
from opsdroid.core import OpsDroid
from opsdroid.connector.slack import ConnectorSlack
@@ -72,6 +73,16 @@ async def test_connect(self):
self.assertTrue(mocked_websocket_connect.called)
self.assertTrue(connector.keepalive_websocket.called)
+ async def test_connect_auth_fail(self):
+ connector = ConnectorSlack({"api-token": "abc123"}, opsdroid=OpsDroid())
+ opsdroid = amock.CoroutineMock()
+ opsdroid.eventloop = self.loop
+ connector.slacker.rtm.start = amock.CoroutineMock()
+ connector.slacker.rtm.start.side_effect = slacker.Error()
+
+ await connector.connect()
+ self.assertLogs('_LOGGER', 'error')
+
async def test_reconnect_on_error(self):
import aiohttp
connector = ConnectorSlack({"api-token": "abc123"}, opsdroid=OpsDroid())
@@ -101,6 +112,14 @@ async def test_listen_loop(self):
await connector.listen()
self.assertTrue(connector.receive_from_websocket.called)
+ async def test_listen_break_loop(self):
+ """Test that listening consumes from the socket."""
+ connector = ConnectorSlack({"api-token": "abc123"}, opsdroid=OpsDroid())
+ connector.receive_from_websocket = amock.CoroutineMock()
+ connector.receive_from_websocket.side_effect = AttributeError
+ await connector.listen()
+ self.assertTrue(connector.receive_from_websocket.called)
+
async def test_receive_from_websocket(self):
"""Test receive_from_websocket receives and reconnects."""
import websockets
| Handle slacker connector invalid token exception
<!-- Before you post an issue or if you are unsure about something join our gitter channel https://gitter.im/opsdroid/ and ask away! We are more than happy to help you. -->
# Description
This issue was found by @petri with #763
When we use an invalid token with the slacker connector opsdroid crashes badly. This is probably due to the fact that we changed dependencies and the code doesn't handle the exception raised by the aioslacker library.
We should probably refactor the connect method to check if the exception `slacker.Error: invalid_auth` was raised, if so an error message should be logged and opsdroid should still be able to run.
## Steps to Reproduce
- Set a slack connector with a bad token on config.yaml
- run opsdroid
## Expected Functionality
Opsdroid should still run but a message should be logged that the connector won't be active due to bad token.
## Experienced Functionality
Opsdroid crashes with a traceback
```python
Traceback (most recent call last):
File "/usr/local/Cellar/python3/3.6.1/Frameworks/Python.framework/Versions/3.6/lib/python3.6/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/local/Cellar/python3/3.6.1/Frameworks/Python.framework/Versions/3.6/lib/python3.6/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/Users/fabiorosado/Documents/GitHub/opsdroid/opsdroid/__main__.py", line 206, in <module>
init()
File "/Users/fabiorosado/Documents/GitHub/opsdroid/opsdroid/__main__.py", line 203, in init
main()
File "/Users/fabiorosado/.local/share/virtualenvs/opsdroid-13bLHlYD/lib/python3.6/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/Users/fabiorosado/.local/share/virtualenvs/opsdroid-13bLHlYD/lib/python3.6/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/Users/fabiorosado/.local/share/virtualenvs/opsdroid-13bLHlYD/lib/python3.6/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/Users/fabiorosado/.local/share/virtualenvs/opsdroid-13bLHlYD/lib/python3.6/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/Users/fabiorosado/Documents/GitHub/opsdroid/opsdroid/__main__.py", line 196, in main
opsdroid.load()
File "/Users/fabiorosado/Documents/GitHub/opsdroid/opsdroid/core.py", line 153, in load
self.start_connectors(self.modules["connectors"])
File "/Users/fabiorosado/Documents/GitHub/opsdroid/opsdroid/core.py", line 248, in start_connectors
self.eventloop.run_until_complete(connector.connect(self))
File "/usr/local/Cellar/python3/3.6.1/Frameworks/Python.framework/Versions/3.6/lib/python3.6/asyncio/base_events.py", line 466, in run_until_complete
return future.result()
File "/Users/fabiorosado/Documents/GitHub/opsdroid/opsdroid/connector/slack/__init__.py", line 50, in connect
connection = await self.slacker.rtm.start()
File "/Users/fabiorosado/.local/share/virtualenvs/opsdroid-13bLHlYD/lib/python3.6/site-packages/aioslacker/__init__.py", line 97, in __request
raise Error(response.error)
slacker.Error: invalid_auth
Exception ignored in: <bound method BaseEventLoop.__del__ of <_UnixSelectorEventLoop running=False closed=True debug=False>>
Traceback (most recent call last):
File "/usr/local/Cellar/python3/3.6.1/Frameworks/Python.framework/Versions/3.6/lib/python3.6/asyncio/base_events.py", line 511, in __del__
File "/usr/local/Cellar/python3/3.6.1/Frameworks/Python.framework/Versions/3.6/lib/python3.6/asyncio/unix_events.py", line 65, in close
File "/usr/local/Cellar/python3/3.6.1/Frameworks/Python.framework/Versions/3.6/lib/python3.6/asyncio/unix_events.py", line 146, in remove_signal_handler
File "/usr/local/Cellar/python3/3.6.1/Frameworks/Python.framework/Versions/3.6/lib/python3.6/signal.py", line 47, in signal
TypeError: signal handler must be signal.SIG_IGN, signal.SIG_DFL, or a callable object
```
## Versions
- **Opsdroid version:** latest
- **Python version:** 3.6
- **OS/Docker version:** MacOs Mojave
## Configuration File
Please include your version of the configuration file bellow.
```yaml
connectors:
- name: slack
api-token: "jdit-ksd12okr"
```
## Additional Details
Any other details you wish to include such as screenshots, console messages, etc.
<!-- Love opsdroid? Please consider supporting our collective:
+๐ https://opencollective.com/opsdroid/donate -->
| We should check whether the connectors actually connect [here](https://github.com/opsdroid/opsdroid/blob/44df8f609656ab5bafef1ff79c14ab8cc2cbc647/opsdroid/core.py#L248). If they don't we should probably gracefully exit opsdroid.
The connector itself should return `True` or `False` on connect and catch all errors. Then opsdroid core should handle when a connector returns `False`.
Okay so we need to change all connectors in order to return either true or false and then handle that return on the core, this might add a few more if/else nested block in the core though so however we implement it should try to avoid that
This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions.
| 2019-02-14T16:59:34 |
opsdroid/opsdroid | 866 | opsdroid__opsdroid-866 | [
"862"
] | 285d6dfe9b7ba4e279b5d6c972c096eaffe0f09d | diff --git a/opsdroid/connector/telegram/__init__.py b/opsdroid/connector/telegram/__init__.py
--- a/opsdroid/connector/telegram/__init__.py
+++ b/opsdroid/connector/telegram/__init__.py
@@ -152,6 +152,8 @@ async def _parse_message(self, response):
"""
for result in response["result"]:
_LOGGER.debug(result)
+ if result.get('edited_message', None):
+ result['message'] = result.pop('edited_message')
if "channel" in result["message"]["chat"]["type"]:
_LOGGER.debug("Channel message parsing not supported "
"- Ignoring message")
| diff --git a/tests/test_connector_telegram.py b/tests/test_connector_telegram.py
--- a/tests/test_connector_telegram.py
+++ b/tests/test_connector_telegram.py
@@ -112,6 +112,35 @@ async def test_parse_message_username(self):
await self.connector._parse_message(response)
self.assertTrue(mocked_parse.called)
+ async def test_parse_edited_message(self):
+ response = {'result': [{
+ 'update_id': 246644499,
+ 'edited_message': {
+ 'message_id': 150,
+ 'from': {
+ 'id': 245245245,
+ 'is_bot': False,
+ 'first_name': 'IOBreaker',
+ 'language_code': 'en'},
+ 'chat': {
+ 'id': 245245245,
+ 'first_name': 'IOBreaker',
+ 'type': 'private'},
+ 'date': 1551797346,
+ 'edit_date': 1551797365,
+ 'text': 'hello2'}}]}
+ response_copy = list(response)
+ mocked_status = amock.CoroutineMock()
+ mocked_status.status = 200
+ with amock.patch('opsdroid.core.OpsDroid.parse') as mocked_parse, \
+ amock.patch.object(self.connector.session, 'post') \
+ as patched_request:
+ patched_request.return_value = asyncio.Future()
+ patched_request.return_value.set_result(mocked_status)
+ self.assertTrue(response['result'][0].get('edited_message'))
+ await self.connector._parse_message(response)
+
+
async def test_parse_message_channel(self):
response = {'result': [{
"update_id": 427647860,
@@ -137,12 +166,12 @@ async def test_parse_message_channel(self):
}
}]}
- with amock.patch('opsdroid.core.OpsDroid.parse') as mocked_parse:
+ with amock.patch('opsdroid.core.OpsDroid.parse'):
await self.connector._parse_message(response)
self.assertLogs('_LOGGER', 'debug')
async def test_parse_message_first_name(self):
- response = { 'result': [{
+ response = {'result': [{
"update_id": 427647860,
"message": {
"message_id": 12,
@@ -162,7 +191,6 @@ async def test_parse_message_first_name(self):
"text": "Hello"
}
}]}
-
with amock.patch('opsdroid.core.OpsDroid.parse') as mocked_parse:
await self.connector._parse_message(response)
self.assertTrue(mocked_parse.called)
| Telegram Connector : Parsing edited message cause process to freeze
# Description
Hi,
I am experiencing a strange behaviour, when opsdroid is not active and someone send many messages and call them back for modification and send them back. this cause parsing error when opsdroid start receiving those messages and parsing them.
The problem is that the function **async def _parse_message(self, response):** try to parse a json with looking for **message** in it's tests but in our case the entry name became **edited_message**
An example of an edited message :
```
{'update_id': 987978987, 'edited_message': {'message_id': 142, 'from': {'id': 65464848, 'is_bot': False, 'first_name': 'IOBreaker', 'language_code': 'en'}, 'chat': {'id': 9849489499, 'first_name': 'IOBreaker', 'type': 'private'}, 'date': 1551697059, 'edit_date': 1551697076, 'text': 'test message'}}
```
## Steps to Reproduce
- Stop opsdroid
- Send messages
- edit the message by clicking on up arrow
- send edited message back
- doing this 4 times :-)
- activate opsdroid
## Expected Functionality
detect if the message is an edited one or an "original" one
in case of an edited one, use the edited_message entry instead of message entry
## Experienced Functionality
here is some logs to help understanding the freeze when the function try to test
> if "channel" in result["message"]["chat"]["type"]
or any other message instead of edited_message
```
{'ok': True, 'result': [{'update_id': 684648646, 'edited_message': {'message_id': 142, 'from': {'id': 987456897, 'is_bot': False, 'first_name': 'IOBreaker', 'language_code': 'en'}, 'chat': {'id': 987456897, 'first_name': 'IOBreaker', 'type': 'private'}, 'date': 1551697059, 'edit_date': 1551697076, 'text': 'please launch a vmware instance with those paramaters --os=windows --template=windows2016'}}, {'update_id': 246644493, 'message': {'message_id': 145, 'from': {'id': 987456897, 'is_bot': False, 'first_name': 'IOBreaker', 'language_code': 'en'}, 'chat': {'id': 987456897, 'first_name': 'IOBreaker', 'type': 'private'}, 'date': 1551697087, 'text': 'please launch a vmware server with those paramaters --os=windows --template=windows2016 --ip=92.0.34.0.7 --hostname=helios --note="it\'s my test machine ".'}}, {'update_id': 246644494, 'message': {'message_id': 146, 'from': {'id': 987456897, 'is_bot': False, 'first_name': 'IOBreaker', 'language_code': 'en'}, 'chat': {'id': 987456897, 'first_name': 'IOBreaker', 'type': 'private'}, 'date': 1551697540, 'text': 'hi'}}, {'update_id': 246644495, 'message': {'message_id': 147, 'from': {'id': 987456897, 'is_bot': False, 'first_name': 'IOBreaker', 'language_code': 'en'}, 'chat': {'id': 987456897, 'first_name': 'IOBreaker', 'type': 'private'}, 'date': 1551731191, 'text': 'hi'}}]}
```
Where the freeze occurs :
```
DEBUG opsdroid.connector.websocket: Starting Websocket connector
DEBUG opsdroid.connector.telegram: Loaded telegram connector
DEBUG opsdroid.connector.telegram: Connecting to telegram
DEBUG opsdroid.connector.telegram: {'ok': True, 'result': {'id': 731324986, 'is_bot': True, 'first_name': 'ChamixLocalBot', 'username': 'ChamixLocalBot'}}
DEBUG opsdroid.connector.telegram: Connected to telegram as ChamixLocalBot
INFO opsdroid.core: Opsdroid is now running, press ctrl+c to exit.
INFO opsdroid.web: Started web server on http://127.0.0.1:8080
DEBUG opsdroid.connector.telegram: =======> Testing if "channel" in result["message"]["chat"]["type"]
DEBUG opsdroid.parsers.crontab: Running crontab skills
DEBUG opsdroid.parsers.crontab: Running crontab skills
DEBUG opsdroid.parsers.crontab: Running crontab skills
DEBUG opsdroid.parsers.crontab: Running crontab skills
```
> this is only my debug line :-) : DEBUG opsdroid.connector.telegram: =======> Testing if "channel" in result["message"]["chat"]["type"]
## Versions
- **Opsdroid version: v0.14.1+19.g6360912.dirty**
- **Python version: 3.7.0**
Regards
| Thanks for raising this @iobreaker just to clarify, on your log you have an original message and then an edited message? It seems you tried to run the vm command twice. I'm trying figure out if Telegram sends every edited message with the Json response or if it replaces it.
If the Telegram API returns all the edited messages (let's say you edit the same message 3 times) we will have to add some logic to just use the last version of the edited message
Hi @FabioRosado
I confirm, when you edit and send the message back the error occurs.
Bellow an example I just did that reproduce the error :
> IOBReaker - Initial message- : hello
>
```
DEBUG opsdroid.connector.telegram: =======> <2> : {'update_id': 246644498, 'message': {'message_id': 150, 'from': {'id': 245245245, 'is_bot': False, 'first_name': 'IOBreaker', 'language_code': 'en'}, 'chat': {'id': 245245245, 'first_name': 'IOBreaker', 'type': 'private'}, 'date': 1551797346, 'text': 'hello'}}
DEBUG opsdroid.connector.telegram: =======> <4> : {'update_id': 246644498, 'message': {'message_id': 150, 'from': {'id': 245245245, 'is_bot': False, 'first_name': 'IOBreaker', 'language_code': 'en'}, 'chat': {'id': 245245245, 'first_name': 'IOBreaker', 'type': 'private'}, 'date': 1551797346, 'text': 'hello'}}
```
> IOBReaker -recall and edit the same message- : hello2
```
DEBUG opsdroid.connector.telegram: =======> <2> : {'update_id': 246644499, 'edited_message': {'message_id': 150, 'from': {'id': 245245245, 'is_bot': False, 'first_name': 'IOBreaker', 'language_code': 'en'}, 'chat': {'id': 245245245, 'first_name': 'IOBreaker', 'type': 'private'}, 'date': 1551797346, 'edit_date': 1551797365, 'text': 'hello2'}}
```
> IOBReaker - new message - : Hi all
```
DEBUG opsdroid.parsers.crontab: Running crontab skills
DEBUG opsdroid.parsers.crontab: Running crontab skills
DEBUG opsdroid.parsers.crontab: Running crontab skills
DEBUG opsdroid.parsers.crontab: Running crontab skills
```
No message will be processed since opsdroid received the edited message
I Do not think that the solution is to send only the edited message because each time Telegram does not retain the original message but send each one after user confirmation.
More than that, the user can send an original message like :
- buy a packet of **chips**
and recall the message to only modify 1 item like
- buy a packet of **candies**
The solution i think can be detecting if the message is an edited one or a standard one, for each message type execute the correct workflow/function
Hope that help :-)
I foresee some issues with implementing the edited message logic. I won't be able to keep track of previous messages - the bot will "consume" messages as they come.
So if you type an original message like:
`Hello`
And then edit it to `Bye`, opsdroid will reply both to the `hello` and `bye` message. Do you think this would be an alright solution for the connector?
Hi @FabioRosado
Yes it's what i explained in my previous message using the example of chips and candies :-)
Two solutions to correct this issue
## The quick one
Changing the key of the returned dict from **edited_message** to **message**:
I checked the dict returned by telegram for edited_message, it has the same structure as the message one.
This solution will work until Telegram decide to change something
**The code : it cost 2 lines of modifications **
```
async def _parse_message(self, response):
for result in response["result"]:
if result.get('edited_message',None):
result['message'] = result.pop('edited_message')
if "channel" in result["message"]["chat"]["type"]:
_LOGGER.debug("Channel message parsing not supported "
"- Ignoring message")
elif "message" in result and "text" in result["message"]:
user = self.get_user(result)
...
...
```
## The "good" but long one
Let **_parse_message** function manage the split of response contain and and create an other function like **_extract_message** in charge of turning the right message object to **_parse_message** .
Please note that **handle_user_permission** and **get_user** has to change too (because they receive response as a param)
**The code :**
```
@staticmethod
def get_user(response):
user = None
if response.get('message',None):
if "username" in response["message"]["from"]:
user = response["message"]["from"]["username"]
elif "first_name" in response["message"]["from"]:
user = response["message"]["from"]["first_name"]
elif response.get('edited_message',None):
if "username" in response["edited_message"]["from"]:
user = response["edited_message"]["from"]["username"]
elif "first_name" in response["edited_message"]["from"]:
user = response["edited_message"]["from"]["first_name"]
return user
```
```
def handle_user_permission(self, response, user):
if response.get('message',None):
user_id = response["message"]["from"]["id"]
elif response.get('edited_message',None):
ser_id = response["edited_message"]["from"]["id"]
if not self.whitelisted_users or \
user in self.whitelisted_users or \
user_id in self.whitelisted_users:
return True
return False
```
```
async def _extract_message(self, result):
"""Handle logic to extract a received message.
Since message can be a standard one (initial) or
a modified one (recalled, modified and sent back), this function
will detect which kind a message is received and extract information from it
Args:
response (dict): result returned by aiohttp.ClientSession.
Return:
message (object) : Message object.
"""
message = None
if result.get('message',None):
if "channel" in result["message"]["chat"]["type"]:
_LOGGER.debug("Channel message parsing not supported - Ignoring message")
else:
if "message" in result and "text" in result["message"]:
user = self.get_user(result)
message = Message( user, result["message"]["chat"], self, result["message"]["text"])
else:
_LOGGER.debug("Unable to extract text message.")
elif result.get('edited_message',None):
if "channel" in result["edited_message"]["chat"]["type"]:
_LOGGER.debug("Channel message parsing not supported - Ignoring message")
else:
if "edited_message" in result and "text" in result["edited_message"]:
user = self.get_user(result)
message = Message( user, result["edited_message"]["chat"], self, result["edited_message"]["text"])
else:
_LOGGER.debug("Unable to extract text message.")
else:
_LOGGER.error("Unable to parse the message.")
return message
```
```
async def _parse_message(self, response):
for result in response["result"]:
message = await self._extract_message(result)
if message:
user = self.get_user(result)
if self.handle_user_permission(result, user):
await self.opsdroid.parse(message)
else:
message.text = "Sorry, you're not allowed to speak with this bot."
await self.respond(message)
self.latest_update = result["update_id"] + 1
```
## Tests
I tested the two methods, both works :
> IObreaker Initial Message : buy a packet of **chips**
```
DEBUG opsdroid.connector.telegram: ========> Calling parse_message using : {'ok': True, 'result': [{'update_id': 246644523, 'message': {'message_id': 168, 'from': {'id': 112233445, 'is_bot': False, 'first_name': 'IOBreaker', 'language_code': 'en'}, 'chat': {'id': 112233445, 'first_name': 'IOBreaker', 'type': 'private'}, 'date': 1551864488, 'text': 'buy a packet of chips'}}]}
DEBUG opsdroid.connector.telegram: ========> Calling _extract_message using : {'update_id': 246644523, 'message': {'message_id': 168, 'from': {'id': 112233445, 'is_bot': False, 'first_name': 'IOBreaker', 'language_code': 'en'}, 'chat': {'id': 112233445, 'first_name': 'IOBreaker', 'type': 'private'}, 'date': 1551864488, 'text': 'buy a packet of chips'}}
DEBUG opsdroid.connector.telegram: ========> Parssing standard message
DEBUG opsdroid.connector.telegram: ========> Handlling message Object
DEBUG opsdroid.connector.telegram: ========> Calling self.opsdroid.parse
DEBUG opsdroid.core: Parsing input: buy a packet of chips
DEBUG opsdroid.core: Processing parsers...
```
> IObreaker modified Message : buy a packet of **candies**
```
DEBUG opsdroid.connector.telegram: ========> Calling parse_message using : {'ok': True, 'result': [{'update_id': 246644524, 'edited_message': {'message_id': 168, 'from': {'id': 112233445, 'is_bot': False, 'first_name': 'IOBreaker', 'language_code': 'en'}, 'chat': {'id': 112233445, 'first_name': 'IOBreaker', 'type': 'private'}, 'date': 1551864488, 'edit_date': 1551864493, 'text': 'buy a packet of candies'}}]}
DEBUG opsdroid.connector.telegram: ========> Calling _extract_message using : {'update_id': 246644524, 'edited_message': {'message_id': 168, 'from': {'id': 112233445, 'is_bot': False, 'first_name': 'IOBreaker', 'language_code': 'en'}, 'chat': {'id': 112233445, 'first_name': 'IOBreaker', 'type': 'private'}, 'date': 1551864488, 'edit_date': 1551864493, 'text': 'buy a packet of candies'}}
DEBUG opsdroid.connector.telegram: ========> Parssing edited message
DEBUG opsdroid.connector.telegram: ========> Handlling message Object
DEBUG opsdroid.connector.telegram: ========> Calling self.opsdroid.parse
DEBUG opsdroid.core: Parsing input: buy a packet of candies
DEBUG opsdroid.core: Processing parsers...
```
> IObreaker new message : forget about it
```
DEBUG opsdroid.connector.telegram: ========> Calling parse_message using : {'ok': True, 'result': [{'update_id': 246644525, 'message': {'message_id': 169, 'from': {'id': 112233445, 'is_bot': False, 'first_name': 'IOBreaker', 'language_code': 'en'}, 'chat': {'id': 112233445, 'first_name': 'IOBreaker', 'type': 'private'}, 'date': 1551864518, 'text': 'forget about it'}}]}
DEBUG opsdroid.connector.telegram: ========> Calling _extract_message using : {'update_id': 246644525, 'message': {'message_id': 169, 'from': {'id': 112233445, 'is_bot': False, 'first_name': 'IOBreaker', 'language_code': 'en'}, 'chat': {'id': 112233445, 'first_name': 'IOBreaker', 'type': 'private'}, 'date': 1551864518, 'text': 'forget about it'}}
DEBUG opsdroid.connector.telegram: ========> Parssing standard message
DEBUG opsdroid.connector.telegram: ========> Handlling message Object
DEBUG opsdroid.connector.telegram: ========> Calling self.opsdroid.parse
```
> IObreaker new message : ok ?
```
DEBUG opsdroid.connector.telegram: ========> Calling parse_message using : {'ok': True, 'result': [{'update_id': 246644526, 'message': {'message_id': 170, 'from': {'id': 112233445, 'is_bot': False, 'first_name': 'IOBreaker', 'language_code': 'en'}, 'chat': {'id': 112233445, 'first_name': 'IOBreaker', 'type': 'private'}, 'date': 1551864527, 'text': 'ok ?'}}]}
DEBUG opsdroid.connector.telegram: ========> Calling _extract_message using : {'update_id': 246644526, 'message': {'message_id': 170, 'from': {'id': 112233445, 'is_bot': False, 'first_name': 'IOBreaker', 'language_code': 'en'}, 'chat': {'id': 112233445, 'first_name': 'IOBreaker', 'type': 'private'}, 'date': 1551864527, 'text': 'ok ?'}}
DEBUG opsdroid.connector.telegram: ========> Parssing standard message
DEBUG opsdroid.connector.telegram: ========> Handlling message Object
DEBUG opsdroid.connector.telegram: ========> Calling self.opsdroid.parse
DEBUG opsdroid.core: Parsing input: ok ?
DEBUG opsdroid.core: Processing parsers...
```
No freezing detected.
Regards
I would be keen to discuss whether we think bots should respond to edited messages at all.
The main reason I raise this is because it is possible to edit messages way back in the past in many chat clients. This could result in unexpected actions from the bot.
Good question.
If i take the example of sending a big CLI to the bot to execute an action and in case of this CLI is wrong, i would be really happy to just have to modify the error rather than rewriting the whole thing.
if you allow me, I will ask the question in a different way: when a chat service allows an action (perhaps seen by the user as an excellent action), should opsdroid answer it or not?
P.S : In both cases, the message should be handled (even with no response like for channel message)
I can see the reasoning behind jacobs input. Personally, I think opsdroid should handle the edited message. Taking in consideration what @iobreaker was trying to do, if you mess up a long code by mistake, it's easier to just press the `up` arrow and get the last message, correct the mistake and sent the command back to the bot.
I am going to start a PR to fix this issue - I have also started using telegram more often and quite like it, even though the documentation kind of sucks haha
Sounds good to me. I'm happy to go down this route, I just wanted us to decide to do it explicitly. I wonder if we should start recording these kind of design decisions somewhere?
Not sure how should we record them to be honest, each connector have their own ways to handle edit messages. I wonder how did @Cadair solved this issue with the matrix connector -I think he mentioned something about edited messages handling as well?
That's kind of my point. I would like the handling to be consistent. We've captured the discussion and decision in this issue, but is that enough? Maybe we should add something to the "creating connectors" section of the docs.
I see your point and yeah I agree that we should maybe add that bit to documentation. In regards of edited messages, I think the best way would be run only the edited message, but in order to get that done we would have to somehow gather a list of messages and handle them - this would cause opsdroid to delay the response so it might not be the best way to do it ๐ค
I think what should happen is that an `Edit` event should be raised, and then skills and connectors can implement them in whatever way they want :grin: (see #826 )
(matrix dosen't have event editing yet, so not currently a problem)
That's a fair point and I agree an edit event is a good idea. However it would be great to have consistency in implementation.
Implementation in what way? I see an `Edit` event having a `new_message` attribute, and maybe some representation of the diff or something. What did you have in mind? | 2019-03-06T17:30:47 |
opsdroid/opsdroid | 868 | opsdroid__opsdroid-868 | [
"867"
] | 8dfa6c4caec3558c3f7fb69bbd3e03dda9dd725b | diff --git a/opsdroid/connector/websocket/__init__.py b/opsdroid/connector/websocket/__init__.py
--- a/opsdroid/connector/websocket/__init__.py
+++ b/opsdroid/connector/websocket/__init__.py
@@ -82,10 +82,9 @@ async def websocket_handler(self, request):
await websocket.prepare(request)
self.active_connections[socket] = websocket
-
async for msg in websocket:
if msg.type == aiohttp.WSMsgType.TEXT:
- message = Message(None, socket, self, msg.data)
+ message = Message(msg.data, None, None, self)
await self.opsdroid.parse(message)
elif msg.type == aiohttp.WSMsgType.ERROR:
_LOGGER.error('Websocket connection closed with exception %s',
diff --git a/opsdroid/core.py b/opsdroid/core.py
--- a/opsdroid/core.py
+++ b/opsdroid/core.py
@@ -432,22 +432,23 @@ async def parse(self, message):
"""Parse a string against all skills."""
self.stats["messages_parsed"] = self.stats["messages_parsed"] + 1
tasks = []
- if message is not None and message.text.strip() != "":
- _LOGGER.debug(_("Parsing input: %s"), message.text)
+ if message is not None:
+ if str(message.text).strip():
+ _LOGGER.debug(_("Parsing input: %s"), message.text)
- tasks.append(
- self.eventloop.create_task(parse_always(self, message)))
-
- unconstrained_skills = await self._constrain_skills(
- self.skills, message)
- ranked_skills = await self.get_ranked_skills(
- unconstrained_skills, message)
- if ranked_skills:
tasks.append(
- self.eventloop.create_task(
- self.run_skill(ranked_skills[0]["skill"],
- ranked_skills[0]["config"],
- message)))
+ self.eventloop.create_task(parse_always(self, message)))
+
+ unconstrained_skills = await self._constrain_skills(
+ self.skills, message)
+ ranked_skills = await self.get_ranked_skills(
+ unconstrained_skills, message)
+ if ranked_skills:
+ tasks.append(
+ self.eventloop.create_task(
+ self.run_skill(ranked_skills[0]["skill"],
+ ranked_skills[0]["config"],
+ message)))
return tasks
| Websokect connector : message.text always None exception in core.py
Hi Team,
I downloaded the last commit of opsdroid and test it with opsdroid-desktop (last version v0.2.7) in websocket mode.
After the first message saying hello, I've got an exception
```
File "/home/hicham/Developments/Bots/opsdroid/opsdroid-master/opsdroid/core.py", line 435, in parse
if message is not None and message.text.strip() != "":
AttributeError: 'NoneType' object has no attribute 'strip'
ERROR aiohttp.server: Unhandled exception
```
## Steps to Reproduce
1- Install the last version of opsdroid (after the #826 patch commit)
2- Start opsdroid-desktop (i tested with v0.2.7)
3- Connect to opsdroid
4- Send any message
5- Error occurs
## Expected Functionality
- No crash
- None message should be handled
- message.text must not be None
## Experienced Functionality
The crash produce the following output
```
DEBUG opsdroid.connector.websocket: Starting Websocket connector
INFO opsdroid.core: Opsdroid is now running, press ctrl+c to exit.
INFO opsdroid.web: Started web server on http://127.0.0.1:8080
INFO aiohttp.access: 127.0.0.1 [07/Mar/2019:14:35:22 +0000] "POST /connector/websocket HTTP/1.1" 200 220 "-" "-"
DEBUG opsdroid.connector.websocket: User connected to 3d406d62-40e6-11e9-bd4a-4865ee166460
ERROR aiohttp.server: Error handling request
Traceback (most recent call last):
File "/usr/lib/python3.7/site-packages/aiohttp/web_protocol.py", line 418, in start
resp = await task
File "/usr/lib/python3.7/site-packages/aiohttp/web_app.py", line 458, in _handle
resp = await handler(request)
File "/home/hicham/Developments/Bots/opsdroid/opsdroid-master/opsdroid/connector/websocket/__init__.py", line 89, in websocket_handler
await self.opsdroid.parse(message)
File "/home/hicham/Developments/Bots/opsdroid/opsdroid-master/opsdroid/core.py", line 435, in parse
if message is not None and message.text.strip() != "":
AttributeError: 'NoneType' object has no attribute 'strip'
ERROR aiohttp.server: Unhandled exception
```
I did a first modification (to handle the case of message is None) in **opsdroid/core.py**
```python
async def parse(self, message):
"""Parse a string against all skills."""
self.stats["messages_parsed"] = self.stats["messages_parsed"] + 1
tasks = []
_LOGGER.debug("===========> message.text = " + str(message.text))
if message.text is not None:
if str(message.text).strip() != "":
_LOGGER.debug(_("Parsing input: %s"), message.text)
...
...
```
After this modification the crash was avaoided and new exception occurs
```
ERROR opsdroid.core: {'message': 'Task exception was never retrieved', 'exception': TypeError('can only concatenate str (not "ConnectorWebsocket") to str'), 'future': <Task finished coro=<OpsDroid.run_skill() done, defined at /home/hicham/Developments/Bots/opsdroid/opsdroid-master/opsdroid/core.py:328> exception=TypeError('can only concatenate str (not "ConnectorWebsocket") to str')>}
```
After looking after the error, i noticed that in message class, the message initiator has a parameter order different that the order used to init the message object in websocket connector
```python
class Message(NewMessage):
"""A message object.
Deprecated. Use ``opsdroid.events.Message`` instead.
"""
def __init__(self, text, user, room, connector,
raw_message=None): # noqa: D401
"""Deprecated opsdroid.message.Message object."""
super().__init__(text, user, room, connector,
raw_event=raw_message)
```
In the new websocket connector line 88 :
```python
async def new_websocket_handler(self, request):
...
...
async for msg in websocket:
if msg.type == aiohttp.WSMsgType.TEXT:
message = Message(None, socket, self, msg.data)
...
...
```
As you can see, the Noneis sent for text param.
I changed it to
```python
async for msg in websocket:
if msg.type == aiohttp.WSMsgType.TEXT:
message = Message(msg.data, socket, None, self)
```
After this modification, it start working
```
DEBUG opsdroid.connector.websocket: Starting Websocket connector
INFO opsdroid.core: Opsdroid is now running, press ctrl+c to exit.
INFO opsdroid.web: Started web server on http://127.0.0.1:8080
INFO aiohttp.access: 127.0.0.1 [07/Mar/2019:15:32:28 +0000] "POST /connector/websocket HTTP/1.1" 200 220 "-" "-"
DEBUG opsdroid.connector.websocket: User connected to 374e1564-40ee-11e9-a6f2-4865ee166460
DEBUG opsdroid.core: ===========> message.text = hi
DEBUG opsdroid.core: Parsing input: hi
DEBUG opsdroid.connector.websocket: Responding with: 'Hey 374e1564-40ee-11e9-a6f2-4865ee166460' in target 374e1564-40ee-11e9-a6f2-4865ee166460
DEBUG opsdroid.core: ===========> message.text = how are you ?
DEBUG opsdroid.core: Parsing input: how are you ?
DEBUG opsdroid.core: ===========> message.text = hi
DEBUG opsdroid.core: Parsing input: hi
DEBUG opsdroid.connector.websocket: Responding with: 'Hey 374e1564-40ee-11e9-a6f2-4865ee166460' in target 374e1564-40ee-11e9-a6f2-4865ee166460

```
Perhaps more diag is needed.
## Versions
- **Opsdroid version: last one after #826 commit**
- **Python version: 3.7.0 **
Kind Regards
Hicham
| @Cadair could you take a look at this?
*mutters something quietly about the dangers of rebasing*
Yep, I will patch this as soon as I get a second. | 2019-03-07T21:55:23 |
|
opsdroid/opsdroid | 873 | opsdroid__opsdroid-873 | [
"872"
] | 4a0647de04499c449c434955b72ae76fefd73ee7 | diff --git a/opsdroid/core.py b/opsdroid/core.py
--- a/opsdroid/core.py
+++ b/opsdroid/core.py
@@ -448,7 +448,7 @@ async def parse(self, message):
self.eventloop.create_task(
self.run_skill(ranked_skills[0]["skill"],
ranked_skills[0]["config"],
- message)))
+ ranked_skills[0]["message"])))
return tasks
diff --git a/opsdroid/parsers/regex.py b/opsdroid/parsers/regex.py
--- a/opsdroid/parsers/regex.py
+++ b/opsdroid/parsers/regex.py
@@ -2,6 +2,7 @@
import logging
import re
+import copy
_LOGGER = logging.getLogger(__name__)
@@ -27,12 +28,13 @@ async def parse_regex(opsdroid, skills, message):
regex = re.search(opts["expression"],
message.text, re.IGNORECASE)
if regex:
- message.regex = regex
+ new_message = copy.copy(message)
+ new_message.regex = regex
matched_skills.append({
"score": await calculate_score(
opts["expression"], opts["score_factor"]),
"skill": skill,
"config": skill.config,
- "message": message
+ "message": new_message
})
return matched_skills
| Regex parser : No parsed group detected when using two skills with regex_matcher
Hi
# Description
When activating more than one skill using regex (example : hello skill and welcome skill or loudnoise skill) if the second skill use group name in regex parser, no group are detected and passed into **message.regex.group**
## Steps to Reproduce
1- clone the last version of opsdroid
2- activate regex parser in config file
3- activate hello skill in config file
4- activate welcome skill in config file (parsing sentence like : welcome iobreaker to paris)
5- Activate telegram connector (I tested using telegram)
6- send : welcome iobreaker to paris
7- error will appear in opsdroid logs
## Expected Functionality
- Groups should be present in **message.regex.groups()**
## Experienced Functionality
You can use this skill (welcome skill) i created to facilitate the debug
```python
from opsdroid.matchers import match_regex
import logging
_LOGGER = logging.getLogger(__name__)
@match_regex(r'(welcome)(\s*)(?P<name>\w+)(\s*)(to|at)(\s*)(?P<location>\w+)', case_sensitive=False)
async def welcome(opsdroid, config, message):
_LOGGER.debug("=========> triggering welcome skill")
_LOGGER.debug("=========> group('name') = {}".format(message.regex.group("name")))
_LOGGER.debug("=========> group('location') = {}".format(message.regex.group("location")))
```
When sending the message **welcome iobreaker to paris** without activating hello skill all works fine
```
INFO opsdroid.web: Started web server on http://127.0.0.1:8080
DEBUG opsdroid.connector.telegram: {'update_id': 246644582, 'message': {'message_id': 286, 'from': {'id': 111222555, 'is_bot': False, 'first_name': 'IOBreaker', 'language_code': 'en'}, 'chat': {'id': 111222555, 'first_name': 'IOBreaker', 'type': 'private'}, 'date': 1552162058, 'text': 'welcome hicham to paris'}}
DEBUG opsdroid.core: Parsing input: welcome hicham to paris
DEBUG opsdroid.core: Processing parsers...
DEBUG opsdroid-modules.skill.welcome: =========> triggering welcome skill
DEBUG opsdroid-modules.skill.welcome: =========> group('name') = hicham
DEBUG opsdroid-modules.skill.welcome: =========> group('location') = paris
```
When activating the hello skill and the welcome skill the error happened : **IndexError: no such group**
```
INFO opsdroid.web: Started web server on http://127.0.0.1:8080
DEBUG opsdroid.parsers.crontab: Running crontab skills
DEBUG opsdroid.connector.telegram: {'update_id': 246644579, 'message': {'message_id': 280, 'from': {'id': 111222555, 'is_bot': False, 'first_name': 'IOBreaker', 'language_code': 'en'}, 'chat': {'id': 111222555, 'first_name': 'IOBreaker', 'type': 'private'}, 'date': 1552161920, 'text': 'welcome hicham to paris'}}
DEBUG opsdroid.core: Parsing input: welcome hicham to paris
DEBUG opsdroid.core: Processing parsers...
DEBUG opsdroid-modules.skill.welcome: =========> triggering welcome skill
DEBUG opsdroid.connector.telegram: Responding with: Whoops there has been an error
DEBUG opsdroid.connector.telegram: Successfully responded
DEBUG opsdroid.connector.telegram: Responding with: Check the log for details
DEBUG opsdroid.connector.telegram: Successfully responded
ERROR opsdroid.core: Exception when running skill 'welcome'
Traceback (most recent call last):
File "/Users/hicham/Developments/Bots/opsdroid-iobreaker/opsdroid/opsdroid/core.py", line 336, in run_skill
await skill(self, config, message)
File "/Users/hicham/Library/Application Support/opsdroid/opsdroid-modules/skill/welcome/__init__.py", line 12, in welcome
_LOGGER.debug("=========> group('name') = {}".format(message.regex.group("name")))
IndexError: no such group
```
Perhaps I am missing something !! but it's a strange as a behaviour/isue
## Versions
- Opsdroid version: v0.14.1+28.gfed4488.dirty
- Python version: 3.7.2
## Configuration File
```yaml
## Parsers
parsers:
# ## Regex (http://opsdroid.readthedocs.io/en/stable/matchers/regex)
- name: regex
enabled: true
## Skill modules
skills:
- name: welcome
path: /Users/hicham/Developments/Bots/opsdroid-data/skills/skill-welcome
debug: true
no-cache: true
## Dance (https://github.com/opsdroid/skill-dance)
# - name: dance
## Hello (https://github.com/opsdroid/skill-hello)
- name: hello
```
Regards
| Ok i thnink i found why this issue is present;
We can understand the problem trough the debug log bellow
```
DEBUG opsdroid.connector.telegram: {'update_id': 246644589, 'message': {'message_id': 304, 'from': {'id': 112233445, 'is_bot': False, 'first_name': 'IOBreaker', 'language_code': 'en'}, 'chat': {'id': 112233445, 'first_name': 'IOBreaker', 'type': 'private'}, 'date': 1552228481, 'text': 'welcome hicham to paris'}}
DEBUG opsdroid.core: Parsing input: welcome hicham to paris
DEBUG opsdroid.parsers.regex: =========> Working on Skill function : <function welcome at 0x10e904ea0>
DEBUG opsdroid.parsers.regex: ==================> Working on matcher : {'regex': {'expression': '(welcome)(\\s*)(?P<name>\\w+)(\\s*)(to|at)(\\s*)(?P<location>\\w+)', 'case_sensitive': False, 'score_factor': 0.6}}
DEBUG opsdroid.parsers.regex: ===========================> Test regex against message is : <re.Match object; span=(0, 23), match='welcome hicham to paris'>
DEBUG opsdroid.parsers.regex: ===========================> Test regex groups are : {'name': 'hicham', 'location': 'paris'}
DEBUG opsdroid.parsers.regex: =========> Working on Skill function : <function hello at 0x10e950840>
DEBUG opsdroid.parsers.regex: ==================> Working on matcher : {'regex': {'expression': 'hi|hello|hey|hallo', 'case_sensitive': True, 'score_factor': 0.6}}
DEBUG opsdroid.parsers.regex: ===========================> Test regex against message is : <re.Match object; span=(8, 10), match='hi'>
DEBUG opsdroid.parsers.regex: ===========================> Test regex groups are : {}
DEBUG opsdroid.parsers.regex: =========> Working on Skill function : <function goodbye at 0x10e9509d8>
DEBUG opsdroid.parsers.regex: ==================> Working on matcher : {'regex': {'expression': "bye( bye)?|see y(a|ou)|au revoir|gtg|I(\\')?m off", 'case_sensitive': True, 'score_factor': 0.6}}
DEBUG opsdroid.parsers.regex: ===========================> Test regex against message is : None
DEBUG opsdroid.core: Processing parsers...
DEBUG opsdroid-modules.skill.welcome: =========> triggering welcome skill
DEBUG opsdroid.connector.telegram: Responding with: Whoops there has been an error
DEBUG opsdroid.connector.telegram: Successfully responded
DEBUG opsdroid.connector.telegram: Responding with: Check the log for details
DEBUG opsdroid.connector.telegram: Successfully responded
ERROR opsdroid.core: Exception when running skill 'welcome'
Traceback (most recent call last):
File "/Users/hicham/Developments/Bots/opsdroid-iobreaker/opsdroid/opsdroid/core.py", line 336, in run_skill
await skill(self, config, message)
File "/Users/hicham/Library/Application Support/opsdroid/opsdroid-modules/skill/welcome/__init__.py", line 12, in welcome
_LOGGER.debug("=========> group('name') = {}".format(message.regex.group("name")))
IndexError: no such group
```
### First problem :
The function **async def parse_regex(opsdroid, skills, message):** in **regex.py** parser file look for any function with a regex macher and then parse the message against it.
the problem is that this function alter message object each time here :
```
if regex:
message.regex = regex #<----------
matched_skills.append({
"score": await calculate_score(
opts["expression"], opts["score_factor"]),
"skill": skill,
"config": skill.config,
"message": message
})
```
### Second problem :
The function **async def parse(self, message):** in **core.py** always use the original message object instead of the one sent by parse_regex function
```
if ranked_skills:
tasks.append(
self.eventloop.create_task(
self.run_skill(ranked_skills[0]["skill"],
ranked_skills[0]["config"],
message))) #<----------
```
I fixed those two problems like this :
> rexex.py file (making a shallow copy of the message object before playing with it)
```python
...
...
import copy
async def parse_regex(opsdroid, skills, message):
...
...
if regex:
currentMessage = copy.copy(message)
data.regex = regex
matched_skills.append({
"score": await calculate_score(
opts["expression"], opts["score_factor"]),
"skill": skill,
"config": skill.config,
"message": currentMessage
})
return matched_skills
```
> core.py file ( passing ranked_skills[0]["message"] instead of message)
```python
async def parse(self, message):
"""Parse a string against all skills."""
self.stats["messages_parsed"] = self.stats["messages_parsed"] + 1
tasks = []
if message is not None:
if str(message.text).strip():
_LOGGER.debug(_("Parsing input: %s"), message.text)
tasks.append(
self.eventloop.create_task(parse_always(self, message)))
unconstrained_skills = await self._constrain_skills(
self.skills, message)
ranked_skills = await self.get_ranked_skills(
unconstrained_skills, message)
if ranked_skills:
tasks.append(
self.eventloop.create_task(
self.run_skill(ranked_skills[0]["skill"],
ranked_skills[0]["config"],
ranked_skills[0]["message"])))
return tasks
```
Now all is working fine
```
INFO opsdroid.web: Started web server on http://127.0.0.1:8080
DEBUG opsdroid.connector.telegram: {'update_id': 246644673, 'message': {'message_id': 485, 'from': {'id': 112233445, 'is_bot': False, 'first_name': 'IOBreaker', 'language_code': 'en'}, 'chat': {'id': 112233445, 'first_name': 'IOBreaker', 'type': 'private'}, 'date': 1552237760, 'text': 'welcome ali to paris'}}
DEBUG opsdroid.core: Parsing input: welcome ali to paris
DEBUG opsdroid.core: Processing parsers...
DEBUG opsdroid-modules.skill.welcome: =========> triggering welcome skill
DEBUG opsdroid-modules.skill.welcome: =========> message groups = {'name': 'ali', 'location': 'paris'}
DEBUG opsdroid.connector.telegram: {'update_id': 246644674, 'message': {'message_id': 486, 'from': {'id': 112233445, 'is_bot': False, 'first_name': 'IOBreaker', 'language_code': 'en'}, 'chat': {'id': 112233445, 'first_name': 'IOBreaker', 'type': 'private'}, 'date': 1552237768, 'text': 'welcome hicham to paris'}}
DEBUG opsdroid.core: Parsing input: welcome hicham to paris
DEBUG opsdroid.core: Processing parsers...
DEBUG opsdroid-modules.skill.welcome: =========> triggering welcome skill
DEBUG opsdroid-modules.skill.welcome: =========> message groups = {'name': 'hicham', 'location': 'paris'}
DEBUG opsdroid.connector.telegram: {'update_id': 246644675, 'message': {'message_id': 487, 'from': {'id': 112233445, 'is_bot': False, 'first_name': 'IOBreaker', 'language_code': 'en'}, 'chat': {'id': 112233445, 'first_name': 'IOBreaker', 'type': 'private'}, 'date': 1552237782, 'text': 'hi'}}
DEBUG opsdroid.core: Parsing input: hi
DEBUG opsdroid.core: Processing parsers...
DEBUG opsdroid-modules.skill.hello: =====> Executing hello function
DEBUG opsdroid.connector.telegram: Responding with: Hi IOBreaker
DEBUG opsdroid.connector.telegram: Successfully responded
DEBUG opsdroid.connector.telegram: {'update_id': 246644676, 'message': {'message_id': 489, 'from': {'id': 112233445, 'is_bot': False, 'first_name': 'IOBreaker', 'language_code': 'en'}, 'chat': {'id': 112233445, 'first_name': 'IOBreaker', 'type': 'private'}, 'date': 1552237785, 'text': 'goodbye'}}
DEBUG opsdroid.core: Parsing input: goodbye
DEBUG opsdroid.core: Processing parsers...
DEBUG opsdroid-modules.skill.hello: =====> Executing goodbye function
DEBUG opsdroid.connector.telegram: Responding with: Au revoir IOBreaker
DEBUG opsdroid.connector.telegram: Successfully responded
```
I would like to have your feedback before doing a PR with the fix
Regards
Thanks for this! That sounds like a reasonable solution.
I would be keen for you to check the other parsers to see if they suffer form the same problem when raising your PR.
I checked some parsers, those seems ok.
I will create a PR with the modifications to correct this issue.
Proposition : It will be a good thing to allow user to choose between a full match (re.fullmatch) or a simple match (re.match) This will prevent a confusion between two skills the first parsing hi and the second parsing hihi (it's just an example :-) )
It will add an other layer of security and give the used teh ability to execute the skill only if the message match exactly the patten.
Sounds good. There is already a kwarg to set whether the regex is case sensitive or not. You could add this as another one.
ok, to keep subjects separated, i will work on the fix first and after on adding the choices between a full or a simple match | 2019-03-11T21:19:09 |
|
opsdroid/opsdroid | 881 | opsdroid__opsdroid-881 | [
"860"
] | de8d5ec638f8f32b5b130f2862f01894a17e9ee1 | diff --git a/opsdroid/loader.py b/opsdroid/loader.py
--- a/opsdroid/loader.py
+++ b/opsdroid/loader.py
@@ -188,7 +188,7 @@ def _communicate_process(process):
@staticmethod
def _load_intents(config):
- intent_file = os.path.join(config["install_path"], "intents.md")
+ intent_file = os.path.join(config["install_path"], "intents.yml")
if os.path.isfile(intent_file):
with open(intent_file, 'r') as intent_file_handle:
intents = intent_file_handle.read()
diff --git a/opsdroid/parsers/rasanlu.py b/opsdroid/parsers/rasanlu.py
--- a/opsdroid/parsers/rasanlu.py
+++ b/opsdroid/parsers/rasanlu.py
@@ -16,9 +16,8 @@
async def _get_all_intents(skills):
"""Get all skill intents and concatenate into a single markdown string."""
- matchers = [matcher for skill in skills for matcher in skill.matchers]
- intents = [matcher["intents"] for matcher in matchers
- if matcher["intents"] is not None]
+ intents = [skill["intents"] for skill in skills
+ if skill["intents"] is not None]
if not intents:
return None
intents = "\n\n".join(intents)
@@ -100,9 +99,16 @@ async def train_rasanlu(config, skills):
url = await _build_training_url(config)
+ # https://github.com/RasaHQ/rasa_nlu/blob/master/docs/http.rst#post-train
+ # Note : The request should always be sent as
+ # application/x-yml regardless of wether you use
+ # json or md for the data format. Do not send json as
+ # application/json for example.+
+ headers = {'content-type': 'application/x-yml'}
+
try:
training_start = arrow.now()
- resp = await session.post(url, data=intents)
+ resp = await session.post(url, data=intents, headers=headers)
except aiohttp.client_exceptions.ClientConnectorError:
_LOGGER.error(_("Unable to connect to Rasa NLU, training failed."))
return False
| diff --git a/tests/test_loader.py b/tests/test_loader.py
--- a/tests/test_loader.py
+++ b/tests/test_loader.py
@@ -217,7 +217,7 @@ def test_loading_intents(self):
self._tmp_dir, os.path.normpath("test/module/test"))
os.makedirs(config['install_path'], mode=0o777)
intent_contents = "Hello world"
- intents_file = os.path.join(config['install_path'], "intents.md")
+ intents_file = os.path.join(config['install_path'], "intents.yml")
with open(intents_file, 'w') as intents:
intents.write(intent_contents)
loaded_intents = ld.Loader._load_intents(config)
diff --git a/tests/test_parser_rasanlu.py b/tests/test_parser_rasanlu.py
--- a/tests/test_parser_rasanlu.py
+++ b/tests/test_parser_rasanlu.py
@@ -346,9 +346,9 @@ async def test__get_all_intents(self):
await self.getMockSkill(),
await self.getMockSkill()
]
- skills[0].matchers = [{"intents": "Hello"}]
- skills[1].matchers = [{"intents": None}]
- skills[2].matchers = [{"intents": "World"}]
+ skills[0] = {"intents": "Hello"}
+ skills[1] = {"intents": None}
+ skills[2] = {"intents": "World"}
intents = await rasanlu._get_all_intents(skills)
self.assertEqual(type(intents), type(b""))
self.assertEqual(intents, b"Hello\n\nWorld")
| rasanlu parser : dict object has no attribute matchers
Hi all,
# Description
When using rasanlu parser with no intents file (no need to train the module, already done) an attribute issue error happened in function **_get_all_intents(skills)**
```
...
matchers = [matcher for skill in skills for matcher in skill.matchers]
File "/Users/IOBreaker/Developments/Bots/opsdroid/opsdroid/parsers/rasanlu.py", line 19, in <listcomp>
matchers = [matcher for skill in skills for matcher in skill.matchers]
AttributeError: 'dict' object has no attribute 'matchers'
```
## Steps to Reproduce
1- create or use a skill that respond to an intent
2- use rasanlu as a parser
3- do not provide an intent file to opsdroid train your model outside of opsdroid
4- start opsdroid
## Expected Functionality
if no intent provided, not training step should be issued (no crash)
## Experienced Functionality
issue error happened in function **_get_all_intents(skills)**
```
INFO opsdroid.parsers.rasanlu: Starting Rasa NLU training.
DEBUG asyncio: Using selector: KqueueSelector
Traceback (most recent call last):
File "/usr/local/Cellar/python/3.7.0/Frameworks/Python.framework/Versions/3.7/lib/python3.7/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/local/Cellar/python/3.7.0/Frameworks/Python.framework/Versions/3.7/lib/python3.7/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/Users/IOBreaker/Developments/Bots/opsdroid/opsdroid/__main__.py", line 206, in <module>
init()
File "/Users/IOBreaker/Developments/Bots/opsdroid/opsdroid/__main__.py", line 203, in init
main()
File "/Users/IOBreaker/Developments/Bots/opsdroid/venv/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/Users/IOBreaker/Developments/Bots/opsdroid/venv/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/Users/IOBreaker/Developments/Bots/opsdroid/venv/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/Users/IOBreaker/Developments/Bots/opsdroid/venv/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/Users/IOBreaker/Developments/Bots/opsdroid/opsdroid/__main__.py", line 196, in main
opsdroid.load()
File "/Users/IOBreaker/Developments/Bots/opsdroid/opsdroid/core.py", line 152, in load
self.train_parsers(self.modules["skills"])
File "/Users/IOBreaker/Developments/Bots/opsdroid/opsdroid/core.py", line 270, in train_parsers
asyncio.gather(*tasks, loop=self.eventloop))
File "/usr/local/Cellar/python/3.7.0/Frameworks/Python.framework/Versions/3.7/lib/python3.7/asyncio/base_events.py", line 568, in run_until_complete
return future.result()
File "/Users/IOBreaker/Developments/Bots/opsdroid/opsdroid/parsers/rasanlu.py", line 87, in train_rasanlu
intents = await _get_all_intents(skills)
File "/Users/IOBreaker/Developments/Bots/opsdroid/opsdroid/parsers/rasanlu.py", line 19, in _get_all_intents
matchers = [matcher for skill in skills for matcher in skill.matchers]
File "/Users/IOBreaker/Developments/Bots/opsdroid/opsdroid/parsers/rasanlu.py", line 19, in <listcomp>
matchers = [matcher for skill in skills for matcher in skill.matchers]
AttributeError: 'dict' object has no attribute 'matchers'
```
This error is due to this line :
> matchers = [matcher for skill in skills for matcher in skill.matchers]
Exeample of 3 skills used with rasanlu parser :
```
[
{'module': <module 'opsdroid-modules.skill.discussion' from '/Users/IOBreaker/Library/Application Support/opsdroid/opsdroid-modules/skill/discussion/__init__.py'>, 'config': {'name': 'discussion', 'path': '/Users/IOBreaker/Developments/Bots/opsdroid-data/skills/skill-rasanlu-discussion', 'debug': False, 'no-cache': True, 'module': '', 'type': 'skill', 'is_builtin': None, 'entrypoint': None, 'module_path': 'opsdroid-modules.skill.discussion', 'install_path': '/Users/IOBreaker/Library/Application Support/opsdroid/opsdroid-modules/skill/discussion', 'branch': 'master'}, 'intents': None},
{'module': <module 'opsdroid-modules.skill.hyperv' from '/Users/IOBreaker/Library/Application Support/opsdroid/opsdroid-modules/skill/hyperv/__init__.py'>, 'config': {'name': 'hyperv', 'path': '/Users/IOBreaker/Developments/Bots/opsdroid-data/skills/skill-rasanlu-hyperv', 'apixu-key': 'ee6eef66ee4e4ee3aee64539192002', 'debug': False, 'no-cache': True, 'module': '', 'type': 'skill', 'is_builtin': None, 'entrypoint': None, 'module_path': 'opsdroid-modules.skill.hyperv', 'install_path': '/Users/IOBreaker/Library/Application Support/opsdroid/opsdroid-modules/skill/hyperv', 'branch': 'master'}, 'intents': None},
{'module': <module 'opsdroid-modules.skill.vmware' from '/Users/IOBreaker/Library/Application Support/opsdroid/opsdroid-modules/skill/vmware/__init__.py'>, 'config': {'name': 'vmware', 'path': '/Users/IOBreaker/Developments/Bots/opsdroid-data/skills/skill-vmware', 'no-cache': True, 'module': '', 'type': 'skill', 'is_builtin': None, 'entrypoint': None, 'module_path': 'opsdroid-modules.skill.vmware', 'install_path': '/Users/IOBreaker/Library/Application Support/opsdroid/opsdroid-modules/skill/vmware', 'branch': 'master'}, 'intents': None}
]
```
## Versions
- **Opsdroid version: v0.14.1+19.g6360912**
- **Python version: 3.7.0**
## Configuration File
```
parsers:
# ## Rasanlu (http://opsdroid.readthedocs.io/en/stable/matchers/rasanlu)
- name: rasanlu
url: http://localhost:5000
project: myProject
model: myModule
min-score: 0.8
skills:
- name: social
path: /Users/IOBreaker/Developments/Bots/opsdroid-data/skills/skill-rasanlu-vmware
debug: true
no-cache: true
```
| Thanks for raising this! Looks like we need some more error checking!
Hi all,
can someone confirm why the code is passing each skill.matchers to get intents !?
```python
matchers = [matcher for skill in skills for matcher in skill.matchers]
```
What i can notice is that intents are linked to the skill itself as you can see in this log
```
INFO opsdroid.parsers.rasanlu: Starting Rasa NLU training.
DEBUG opsdroid.parsers.rasanlu: =======> skill = {'module': <module 'opsdroid-modules.skill.thanks' from '/home/hicham/.local/share/opsdroid/opsdroid-modules/skill/thanks/__init__.py'>, 'config': {'name': 'thanks', 'path': '/home/hicham/Developments/Bots/opsdroid/miBot/skills/skill-thanks', 'no-cache': True, 'module': '', 'type': 'skill', 'is_builtin': None, 'entrypoint': None, 'module_path': 'opsdroid-modules.skill.thanks', 'install_path': '/home/hicham/.local/share/opsdroid/opsdroid-modules/skill/thanks', 'branch': 'master'}, 'intents': '## intent:thanks\n- thanks\n- thx\n- thank you'}
```
I am working on a fix (the fix is working and ready for deployment) but i want to be sure that intent has nothing to do with skill.matchers in our case because intents can be loaded directly from each skill
```python
intents = [skill["intents"] for skill in skills
if skill["intents"] is not None]
```
Regards
@jacobtomlinson or @FabioRosado
Any idea please ?
We are using the RasaNLU format for intents and it is the only matcher which uses them. However I'm keen to use it as a generic way to specify intents and so it should be treated separately to the matchers.
No problem.
I am just trying to understand the logic befind the code :-)
the function **self._load_intents(config)** in **loader.py** is called at the beginning, when it find a intents.md file in the skill's directory, the content of this file is added to the intents dict.
This produce the bellow output : **'intents': '## intent:user_say_iam_back\ni am back\ni am back again'**
```
INFO opsdroid.parsers.rasanlu: Starting Rasa NLU training.
DEBUG opsdroid.parsers.rasanlu: ======> RASA : {'module': <module 'opsdroid-modules.skill.social' from '/Users/hicham/Library/Application Support/opsdroid/opsdroid-modules/skill/social/__init__.py'>, 'config': {'name': 'social', 'path': '/Users/hicham/Developments/Bots/opsdroid-data/skills/skill-rasanlu-social', 'debug': False, 'no-cache': True, 'module': '', 'type': 'skill', 'is_builtin': None, 'entrypoint': None, 'module_path': 'opsdroid-modules.skill.social', 'install_path': '/Users/hicham/Library/Application Support/opsdroid/opsdroid-modules/skill/social', 'branch': 'master'}, 'intents': '## intent:user_say_iam_back\ni am back\ni am back again'}
```
as you can see, no matcher attribute or dict is added to the skill's dict.
the second step after is calling the function def **train_parsers(self, skills)** in **core.py** to train rasanlu.
```python
async def _get_all_intents(skills):
"""Get all skill intents and concatenate into a single markdown string."""
matchers = [matcher for skill in skills for matcher in skill.matchers]
intents = [matcher["intents"] for matcher in matchers
if matcher["intents"] is not None]
if not intents:
return None
intents = "\n\n".join(intents)
return unicodedata.normalize("NFKD", intents).encode('ascii')
```
In this step no matcher exist in skill's dict, so it is normal to have an error when executing
```python
matchers = [matcher for skill in skills for matcher in skill.matchers]
```
It's why is suggested to use instead
```python
intents = [skill["intents"] for skill in skills
if skill["intents"] is not None]
```
If matchers info must be in the skill dict before calling the **train_parsers(self, skills)** and **_get_all_intents(skills)** it means that somewhere a step is missing !!
Yeah thanks for catching this. If you could make your proposed changes that would be great.
ok, I will work on it and submit a PR when it's done
Unfortunately, this issue is purely not the only one, it is a drop in the ocean.
Correcting the initial issue i am now getting this one :
```
ERROR opsdroid.parsers.rasanlu: Bad Rasa NLU response - {
"error": "Content-Type must be 'application/x-yml' or 'application/json'"
}
```
When trying to to correct this second issue, got an other one
```
ERROR opsdroid.parsers.rasanlu: Bad Rasa NLU response - {
"error": "'str' object has no attribute 'get'"
}
'status': 400, 'reason': 'Bad Request'
```
So 3 issues in One
Continue working :-)
Fixed the two first errors.
the error with str object is related to rasanlu itself i think
here is my testing version of rasanlu
```
{
"version": "0.14.4",
"minimum_compatible_version": "0.13.0a2"
}
```
I added a new **train** parameter in config file to let users choose if they want opsdroid to train the model or not _**(training is not always needed)**_ :
```yaml
parsers:
# ## Rasanlu (http://opsdroid.readthedocs.io/en/stable/matchers/rasanlu)
- name: rasanlu
url: http://localhost:5000
project: ergo
model: ergo
token: "zgazrgzrgzrgazrgaz4er5et3gezg35a"
train: true. # <------
min-score: 0.8
```
Is it ok for you ?
I seem to remember that the training should only happen if the intents have changed. Therefore the `train` config option may be unnecessary.
hi @jacobtomlinson , understood but what i meean is that the training step here is always started even if the rasa training does not occurs. why starting it each time even if it's not needed (or user doses not want it to happen).
The only way to prevent this for the moment is to force user to not have/or move the md file out of the skill directory
No problem to get `train` out of the scope if you think it's is not a relevant change
I think if there is an `md` file then the training step should be triggered. It should then take a hash of the `md` file, compare it with the last trained hash (I can't remember where this is stored) and decide whether or not to actually do the training.
Therefore I think that this is intended behavior and shouldn't add overhead if training has already been done.
got it :-), thanks a lot | 2019-03-20T10:08:14 |
opsdroid/opsdroid | 887 | opsdroid__opsdroid-887 | [
"835",
"835"
] | 18847d513f79c42bad472c851eacf643d8cabef0 | diff --git a/opsdroid/connector/matrix/connector.py b/opsdroid/connector/matrix/connector.py
--- a/opsdroid/connector/matrix/connector.py
+++ b/opsdroid/connector/matrix/connector.py
@@ -142,6 +142,14 @@ async def listen(self): # pragma: no cover
message = await self._parse_sync_response(response)
await self.opsdroid.parse(message)
+ except MatrixRequestError as mre:
+ # We can safely ignore timeout errors. The non-standard error
+ # codes are returned by Cloudflare.
+ if mre.code in [504, 522, 524]:
+ _LOGGER.info('Matrix Sync Timeout (code: %d)', mre.code)
+ continue
+
+ _LOGGER.exception('Matrix Sync Error')
except CancelledError:
raise
except Exception: # pylint: disable=W0703
| Matrix Connector should not log HTML page contents on a 504
If the matrix server responds with a 504 error on `/sync` (which is pretty common) the connector should not log the whole HTML page, as it clutters the log.
This is *especially* true on a matrix.org account as you get a whole cloudflare error page.
Matrix Connector should not log HTML page contents on a 504
If the matrix server responds with a 504 error on `/sync` (which is pretty common) the connector should not log the whole HTML page, as it clutters the log.
This is *especially* true on a matrix.org account as you get a whole cloudflare error page.
| 2019-03-30T13:00:42 |
||
opsdroid/opsdroid | 898 | opsdroid__opsdroid-898 | [
"888"
] | 0e2be0d4425afc959a68349ca90db08e7de52da0 | diff --git a/opsdroid/loader.py b/opsdroid/loader.py
--- a/opsdroid/loader.py
+++ b/opsdroid/loader.py
@@ -105,7 +105,7 @@ def is_builtin_module(config):
return importlib.util.find_spec(
'opsdroid.{module_type}.{module_name}'.format(
module_type=config["type"],
- module_name=config["name"]
+ module_name=config["name"].lower()
)
)
except ImportError:
@@ -116,7 +116,7 @@ def build_module_import_path(config):
"""Generate the module import path from name and type."""
if config["is_builtin"]:
return "opsdroid" + "." + config["type"] + \
- "." + config["name"]
+ "." + config["name"].lower()
return MODULES_DIRECTORY + "." + config["type"] + \
"." + config["name"]
| Opsdroid installs connector from repository instead of core
<!-- Before you post an issue or if you are unsure about something join our gitter channel https://gitter.im/opsdroid/ and ask away! We are more than happy to help you. -->
# Description
I have got my hands on a raspberry pi 3 and decided to install opsdroid on it and keep it on permanently. I had set up the Slack and Telegram connector, but I kept getting the exception:
```python
(...)
File "/usr/local/lib/python3.6/site-packages/opsdroid/core.py", line 279, in start_connectors
connector = cls(connector_module["config"], self)
TypeError: __init__() takes 2 positional arguments but 3 were given
```
I had to do a bit of digging in order to try and figure out why this was happening since I installed opsdroid from pip. Also, on my Kali machine, it worked just fine.
After checking the `config.yaml` I've noticed that in the Kali machine `telegram` was spelt with a lower `t` whilst the raspberry one was with a capital `T`.
Apparently, typing `- name: Telegram` will clone the telegram connector from the deprecated repo, but typing `- name: telegram` will use the one in core.
I haven't tested
## Steps to Reproduce
- Add any connector that has been merged to the core.
- Set up the connector in `config.yaml` starting with capital letter (`- name: Slack`)
- Run opsdroid
- The exception occurs because opsdroid downloaded the deprecated repository
## Expected Functionality
Opsdroid should install the newer version of the connector that has been merged to the core.
## Experienced Functionality
Explain what happened instead(Please include the debug log).
## Versions
- **Opsdroid version:** stable
- **Python version:** 3.6
- **OS/Docker version:** raspbian
<!-- Love opsdroid? Please consider supporting our collective:
+๐ https://opencollective.com/opsdroid/donate -->
| 2019-04-12T09:48:15 |
||
opsdroid/opsdroid | 902 | opsdroid__opsdroid-902 | [
"901"
] | 11508b513ea563a58fc95581b7032256449c65c1 | diff --git a/opsdroid/core.py b/opsdroid/core.py
--- a/opsdroid/core.py
+++ b/opsdroid/core.py
@@ -422,11 +422,12 @@ async def _constrain_skills(self, skills, message):
list: A list of the skills which were not constrained.
"""
- for skill in skills:
- for constraint in skill.constraints:
- if not constraint(message):
- skills.remove(skill)
- return skills
+ return [
+ skill for skill in skills if all(
+ constraint(message)
+ for constraint in skill.constraints
+ )
+ ]
async def parse(self, message):
"""Parse a string against all skills."""
| diff --git a/tests/test_constraints.py b/tests/test_constraints.py
--- a/tests/test_constraints.py
+++ b/tests/test_constraints.py
@@ -103,3 +103,26 @@ async def test_constrain_connectors_skips(self):
Message('Hello', 'user', '#general', connector)
)
self.assertEqual(len(tasks), 2) # match_always and the skill
+
+ async def test_constraint_can_be_called_after_skip(self):
+ with OpsDroid() as opsdroid:
+ opsdroid.eventloop = mock.CoroutineMock()
+ skill = await self.getMockSkill()
+ skill = match_regex(r'.*')(skill)
+ skill = constraints.constrain_users(['user'])(skill)
+ opsdroid.skills.append(skill)
+
+ tasks = await opsdroid.parse(
+ Message('Hello', 'user', '#general', None)
+ )
+ self.assertEqual(len(tasks), 2) # match_always and the skill
+
+ tasks = await opsdroid.parse(
+ Message('Hello', 'otheruser', '#general', None)
+ )
+ self.assertEqual(len(tasks), 1) # Just match_always
+
+ tasks = await opsdroid.parse(
+ Message('Hello', 'user', '#general', None)
+ )
+ self.assertEqual(len(tasks), 2) # match_always and the skill
| Constraints remove skills "forever"
# Description
The new constraints feature (which by the way, are an awesome feature) remove a skill "forever" when are activated.
I write "forever" in quotes because the skills will be unavailable until Opsdroid is restarted.
## Steps to Reproduce
- Add any constraint to a skill (for example, `@constrain_users(['alice'])`)
- Send a message from the user `alice`. Opsdroid will answer as expected.
- Send a message from the user `bob`. Opsdroid will not answer, as expected.
- Send again a message from the user `alice`. Unexpectedly, opsdroid will not answer.
## Expected Functionality
Opsdroid should answer when `alice` sends the message after `bob`.
## Experienced Functionality
Opsdroid doesn't answer when `alice` sends the message after `bob`.
## Versions
- **Opsdroid version: 0.14.1**
- **Python version: 3.6.7**
- **OS/Docker version: Ubuntu 18.04**
## Additional Details
The problem seems to be in the methods `parse` and `_constrain_skills` of the `OpsDroid` class.
`parse` calls `_constrain_skills` with `self.skills` as a parameter. `self.skills` is a list, so is passed by reference. Inside `_constrain_skills`, that list is mutated in `skills.remove(skill)`. So we're mutating `self.skills` directly.
| 2019-04-18T19:44:05 |
Subsets and Splits