diff --git a/airflow/contrib/example_dags/example_twitter_README.md b/airflow/contrib/example_dags/example_twitter_README.md index 28ae9826f1f06..0876bdb4d161a 100644 --- a/airflow/contrib/example_dags/example_twitter_README.md +++ b/airflow/contrib/example_dags/example_twitter_README.md @@ -50,6 +50,6 @@ CREATE TABLE toTwitter_A(id BIGINT, id_str STRING ``` When you review the code for the DAG, you will notice that these tasks are generated using for loop. These two for loops could be combined into one loop. However, in most cases, you will be running different analysis on your incoming incoming and outgoing tweets, and hence they are kept separated in this example. Final step is a running the broker script, brokerapi.py, which will run queries in Hive and store the summarized data to MySQL in our case. To connect to Hive, pyhs2 library is extremely useful and easy to use. To insert data into MySQL from Python, sqlalchemy is also a good one to use. -I hope you find this tutorial useful. If you have question feel free to ask me on [Twitter](https://twitter.com/EkhtiarSyed) or via the live Airflow chatroom room in [Gitter](https://gitter.im/airbnb/airflow).
+I hope you find this tutorial useful. If you have question feel free to ask me on [Twitter](https://twitter.com/EkhtiarSyed) or via the live Airflow chatroom room in [Gitter](https://gitter.im/apache/incubator-airflow).
-Ekhtiar Syed Last Update: 8-April-2016 diff --git a/airflow/models.py b/airflow/models.py index bc939e72b3ec9..ce4beb16c1bc9 100755 --- a/airflow/models.py +++ b/airflow/models.py @@ -3866,8 +3866,7 @@ def subdags(self): """ Returns a list of the subdag objects associated to this DAG """ - # Check SubDag for class but don't check class directly, see - # https://github.com/airbnb/airflow/issues/1168 + # Check SubDag for class but don't check class directly from airflow.operators.subdag_operator import SubDagOperator subdag_lst = [] for task in self.tasks: diff --git a/airflow/operators/slack_operator.py b/airflow/operators/slack_operator.py index c5a69456fbe86..3382bc2788dd4 100644 --- a/airflow/operators/slack_operator.py +++ b/airflow/operators/slack_operator.py @@ -115,8 +115,8 @@ def __init__(self, text='No message has been set.\n' 'Here is a cat video instead\n' 'https://www.youtube.com/watch?v=J---aiyznGQ', - icon_url='https://raw.githubusercontent.com' - '/airbnb/airflow/master/airflow/www/static/pin_100.png', + icon_url='https://raw.githubusercontent.com/apache/' + 'incubator-airflow/master/airflow/www/static/pin_100.jpg', attachments=None, *args, **kwargs): self.method = 'chat.postMessage' diff --git a/docs/scheduler.rst b/docs/scheduler.rst index 4f539be2dd6f3..377fdffac2837 100644 --- a/docs/scheduler.rst +++ b/docs/scheduler.rst @@ -114,7 +114,7 @@ interval series. """ Code that goes along with the Airflow tutorial located at: - https://github.com/airbnb/airflow/blob/master/airflow/example_dags/tutorial.py + https://github.com/apache/incubator-airflow/blob/master/airflow/example_dags/tutorial.py """ from airflow import DAG from airflow.operators.bash_operator import BashOperator diff --git a/tests/jobs.py b/tests/jobs.py index 231944de3e8f5..bb729424eca38 100644 --- a/tests/jobs.py +++ b/tests/jobs.py @@ -421,8 +421,6 @@ def test_backfill_ordered_concurrent_execute(self): def test_backfill_pooled_tasks(self): """ Test that queued tasks are executed by BackfillJob - - Test for https://github.com/airbnb/airflow/pull/1225 """ session = settings.Session() pool = Pool(pool='test_backfill_pooled_task_pool', slots=1) diff --git a/tests/sensors/test_http_sensor.py b/tests/sensors/test_http_sensor.py index de9513724415a..5e55aa56e9c49 100644 --- a/tests/sensors/test_http_sensor.py +++ b/tests/sensors/test_http_sensor.py @@ -140,7 +140,7 @@ class FakeSession(object): def __init__(self): self.response = requests.Response() self.response.status_code = 200 - self.response._content = 'airbnb/airflow'.encode('ascii', 'ignore') + self.response._content = 'apache/incubator-airflow'.encode('ascii', 'ignore') def send(self, request, **kwargs): return self.response @@ -178,7 +178,7 @@ def test_get_response_check(self): method='GET', endpoint='/search', data={"client": "ubuntu", "q": "airflow"}, - response_check=lambda response: ("airbnb/airflow" in response.text), + response_check=lambda response: ("apache/incubator-airflow" in response.text), headers={}, dag=self.dag) t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) @@ -192,7 +192,7 @@ def test_sensor(self): request_params={"client": "ubuntu", "q": "airflow", 'date': '{{ds}}'}, headers={}, response_check=lambda response: ( - "airbnb/airflow/" + DEFAULT_DATE.strftime('%Y-%m-%d') + "apache/incubator-airflow/" + DEFAULT_DATE.strftime('%Y-%m-%d') in response.text), poke_interval=5, timeout=15,