2011-10-04 03:22:53 +04:00
|
|
|
#!/usr/bin/env python
|
2012-02-20 18:58:49 +04:00
|
|
|
|
|
|
|
# This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
2011-10-04 03:22:53 +04:00
|
|
|
|
2012-03-27 05:44:21 +04:00
|
|
|
import pytest
|
2012-10-03 16:23:57 +04:00
|
|
|
from unittestzero import Assert
|
2012-03-27 05:44:21 +04:00
|
|
|
|
2012-09-24 19:12:40 +04:00
|
|
|
from pages.home_page import Home
|
2013-02-07 23:46:52 +04:00
|
|
|
from pages.link_crawler import LinkCrawler
|
2011-10-04 03:22:53 +04:00
|
|
|
|
2013-02-08 02:23:33 +04:00
|
|
|
|
2011-10-04 03:22:53 +04:00
|
|
|
class TestAccount:
|
|
|
|
|
2012-03-27 05:44:21 +04:00
|
|
|
@pytest.mark.nondestructive
|
2011-10-04 04:44:59 +04:00
|
|
|
def test_login_logout(self, mozwebqa):
|
2012-09-24 19:12:40 +04:00
|
|
|
home_page = Home(mozwebqa)
|
|
|
|
Assert.true(home_page.is_csrf_token_present)
|
|
|
|
home_page.login()
|
|
|
|
Assert.true(home_page.header.is_logout_menu_item_present)
|
|
|
|
home_page.header.click_logout_menu_item()
|
|
|
|
Assert.true(home_page.is_browserid_link_present)
|
2013-02-07 23:46:52 +04:00
|
|
|
|
2013-09-13 16:05:46 +04:00
|
|
|
@pytest.mark.nondestructive
|
|
|
|
def test_logout_verify_bid(self, mozwebqa):
|
|
|
|
# issue https://github.com/mozilla/mozillians-tests/issues/99
|
|
|
|
|
|
|
|
home_page = Home(mozwebqa)
|
|
|
|
Assert.true(home_page.is_csrf_token_present)
|
|
|
|
home_page.login()
|
|
|
|
Assert.true(home_page.header.is_logout_menu_item_present)
|
|
|
|
home_page.logout_using_url()
|
|
|
|
|
|
|
|
home_page.wait_for_user_login()
|
|
|
|
Assert.true(home_page.is_browserid_link_present)
|
|
|
|
|
2013-02-07 23:46:52 +04:00
|
|
|
@pytest.mark.skip_selenium
|
|
|
|
@pytest.mark.nondestructive
|
|
|
|
def test_that_links_in_footer_return_200_code(self, mozwebqa):
|
|
|
|
crawler = LinkCrawler(mozwebqa)
|
2013-02-08 02:23:33 +04:00
|
|
|
urls = crawler.collect_links('/', name='footer')
|
2013-02-07 23:46:52 +04:00
|
|
|
bad_urls = []
|
|
|
|
|
|
|
|
Assert.greater(
|
2013-02-08 02:23:33 +04:00
|
|
|
len(urls), 0, u'something went wrong. no links found.')
|
2013-02-07 23:46:52 +04:00
|
|
|
|
|
|
|
for url in urls:
|
|
|
|
check_result = crawler.verify_status_code_is_ok(url)
|
|
|
|
if check_result is not True:
|
|
|
|
bad_urls.append(check_result)
|
|
|
|
|
|
|
|
Assert.equal(
|
|
|
|
0, len(bad_urls),
|
2013-02-08 02:23:33 +04:00
|
|
|
u'%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls))
|