diff --git a/.travis.yml b/.travis.yml index 2440899e4f25..a6a3212ac82e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,26 +1,26 @@ language: python cache: pip python: - - 2.7 - - 3.6 - #- nightly - #- pypy - #- pypy3 + - 2.7 + - 3.6 + #- nightly + #- pypy + #- pypy3 matrix: - allow_failures: - - python: nightly - - python: pypy - - python: pypy3 + allow_failures: + - python: nightly + - python: pypy + - python: pypy3 install: - #- pip install -r requirements.txt - - pip install flake8 # pytest # add another testing frameworks later + #- pip install -r requirements.txt + - pip install flake8 # pytest # add another testing frameworks later before_script: - # stop the build if there are Python syntax errors or undefined names - - flake8 . --count --select=E9,F63,F72,F82 --show-source --statistics - # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - - flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + # stop the build if there are Python syntax errors or undefined names + - flake8 . --count --select=E9,F63,F72,F82 --show-source --statistics + # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide + - flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics script: - - true # pytest --capture=sys # add other tests here + - true # pytest --capture=sys # add other tests here notifications: - on_success: change - on_failure: change # `always` will be the setting once code changes slow down + on_success: change + on_failure: change # `always` will be the setting once code changes slow down diff --git a/ONLINE/BAT.py b/ONLINE/BAT.py new file mode 100644 index 000000000000..35a17b615aa9 --- /dev/null +++ b/ONLINE/BAT.py @@ -0,0 +1,208 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +import datetime +import json +import ssl +import unicodedata +from concurrent.futures.thread import ThreadPoolExecutor + +import pandas as pd +import numpy as np +import requests +import argparse + +# 屏蔽HTTPS证书校验, 忽略安全警告 +requests.packages.urllib3.disable_warnings() +context = ssl._create_unverified_context() + + +def init_param() -> list: + """ + 初始化参数, 读取shell命令参数, 自动登录 + 依次返回httpie_view方式, 线程池, 登录cookie + :rtype: list + """ + parser = argparse.ArgumentParser(description="并发执行接口") + parser.add_argument("-w", "--workers", type=int, choices=choice_nums(1, 65, 1), default=1, help="并发执行线程数, 取值范围[1, 64]") + group = parser.add_mutually_exclusive_group() + group.add_argument("-v", "--view", action="store_true", help="显示请求详细信息") + group.add_argument("-hd", "--header", action="store_true", help="显示请求头") + group.add_argument("-b", "--body", action="store_true", help="显示请求Body") + group.add_argument("-d", "--download", action="store_true", help="显示请求头, 但响应结果保存至TXT") + args = parser.parse_args() + view_param = "-v" + if args.header: + view_param = "-h" + if args.body: + view_param = "-b" + if args.download: + view_param = "-d" + print("参数设置结果: httpie命令方式=[{}], 并发线程数=[{}]".format(view_param, args.workers)) + init_executor = ThreadPoolExecutor(max_workers=args.workers) + cookie = auto_login() + return [view_param, init_executor, cookie] + + +def execute_http(id: int) -> str: + """ + 执行excuteUrl.json接口, 返回结果数据 + :param id: 接口请求标识性ID数据 + :rtype: str + """ + with open("./excuteUrl.json", 'r') as request_data: + request_json = json.load(request_data) + url = request_json['url'] + method = request_json['method'] + request_headers = handle_json_str_value(request_json['headers']) + request_headers['Cookie'] = init_cookie + request_body = replace_id(request_json['body'], id) + response_body = { + "status": -1, + "msg": "接口执行失败", + "data": "请检查接口是否返回JSON格式的相应数据, 以及抛出未经处理的特殊异常" + } + executeStartTime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f') + try: + response = requests.request(method, url, headers=request_headers, json=request_body, timeout=3, verify=False) + # JSON标准格式 + response_body = json.dumps(response.json(), ensure_ascii=False, indent=4) + except Exception as e: + print(e) + executeEndTime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f') + httpie_cmd_str = httpie(url, method, request_headers, request_body) + return "执行命令httpie:\n{}\n当前ID=[{}], executeStartTime=[{}], executeEndTime=[{}]\n响应结果:\n{}".format(httpie_cmd_str, id, executeStartTime, executeEndTime, response_body) + + +def httpie(url: str, method: str, request_headers: json, request_body: json) -> str: + """ + 拼接httpie完整命令 + :param url: 接口访问路径 + :param method: 请求方式 + :param request_headers: 请求头JSON + :param request_body: 请求Body体JSON + :rtype: str + """ + joiner = ' ' + cmd = "http" + no_ca = "--verify=no" + param_list = [cmd, no_ca, httpie_view] + param_list.extend([method, url]) + for (k, v) in request_headers.items(): + if k == "Cookie": + param_list.append("'" + k + ":" + v + "'") + else: + param_list.append(k + ":" + v) + for (k, v) in request_body.items(): + if is_number(v): + param_list.append(k + ":=" + v) + else: + param_list.append(k + "=" + v) + return joiner.join(param_list) + + +def is_number(s: str) -> bool: + """ + :param s: 输入字符串 + :rtype: bool + """ + try: + float(s) + return True + except ValueError: + # ValueError为Python的一种标准异常,表示"传入无效的参数" + # 如果引发了ValueError这种异常,不做任何事情(pass:不做任何事情,一般用做占位语句) + pass + try: + # 把一个表示数字的字符串转换为浮点数返回的函数 + unicodedata.numeric(s) + return True + except (TypeError, ValueError): + pass + return False + + +def load_data() -> list: + """ + 读取数据文件, 每行为一条数据 + :rtype: list + """ + data = pd.read_csv("./ID.csv", header=-1) + data.columns = ['id'] + return data['id'] + + +def auto_login() -> str: + """ + 自动登录, 获取登录Cookie + :rtype: str + """ + with open("./ssoLogin.json", 'r') as sso_login_request_data: + request_json = json.load(sso_login_request_data) + url = request_json['url'] + method = request_json['method'] + request_headers = handle_json_str_value(request_json['headers']) + request_body = handle_json_str_value(request_json['body']) + # request_headers = {"Content-Type": "application/json", "HT-app": "6"} + response = requests.request(method, url, headers=request_headers, json=request_body, timeout=3, verify=False) + response_headers = response.headers + # 处理Cookie, 多个Cookie之间使用';'分隔, 否则校验cookie时出现"domain."在高版本中tomcat中报错 + # https://blog.csdn.net/w57685321/article/details/84943176 + cookie = response_headers.get("set-Cookie").replace(", _r", "; _r").replace(", _a", "; _a") + # JSON标准格式 + response_body = json.dumps(response.json(), ensure_ascii=False, indent=4) + print("登录响应Cookie结果: \n{}\n登录响应BODY结果: {}".format(cookie, response_body)) + return cookie + + +def handle_json_str_value(json: json) -> json: + """ + 将json的值都变为字符串处理 + :param json: + :rtype: json + """ + for (k, v) in json.items(): + json[k] = str(v) + return json + + +def replace_id(json: json, id: int) -> json: + """ + 将json的值都变为字符串处理 + :param json: + :param id: 目标ID + :rtype: json + """ + for (k, v) in json.items(): + if v == "NONE": + json[k] = str(id) + else: + json[k] = str(v) + return json + + +def choice_nums(start: int, end: int, delta: int) -> list: + """ + 返回指定的数组序列 + :rtype: list + """ + return np.arange(start, end, delta).tolist() + + +def main(): + # 全局变量 + global httpie_view + global executor + global init_cookie + # 首先初始化数据 + init = init_param() + httpie_view = init[0] + executor = init[1] + init_cookie = init[2] + # 读取ID数据列表 + ids = load_data() + for result in executor.map(execute_http, ids): + print(result) + + +if __name__ == '__main__': + main() diff --git a/ONLINE/ID.csv b/ONLINE/ID.csv new file mode 100644 index 000000000000..bd41cba781d8 --- /dev/null +++ b/ONLINE/ID.csv @@ -0,0 +1 @@ +12345 \ No newline at end of file diff --git a/ONLINE/excuteUrl.json b/ONLINE/excuteUrl.json new file mode 100644 index 000000000000..414a0363f00f --- /dev/null +++ b/ONLINE/excuteUrl.json @@ -0,0 +1,11 @@ +{ + "url": "https://localhost:8119/account/sentinel", + "method": "POST", + "headers" : { + "Content-Type": "application/json", + "HT-app": 6 + }, + "body": { + "subAccountId": "NONE" + } +} diff --git a/ONLINE/ssoLogin.json b/ONLINE/ssoLogin.json new file mode 100644 index 000000000000..161ff65024d4 --- /dev/null +++ b/ONLINE/ssoLogin.json @@ -0,0 +1,14 @@ +{ + "url": "https://sso.testa.huitong.com/api/v100/ssonew/login", + "method": "POST", + "headers": { + "Content-Type": "application/json", + "HT-app": 6 + }, + "body": { + "phone": "13188880000", + "smsAuthCode": "123456", + "loginType": 0, + "pwd": "ht123456." + } +} diff --git a/analysis/compression_analysis/psnr.py b/analysis/compression_analysis/psnr.py index 0f21aac07d34..57fb5c08fd57 100644 --- a/analysis/compression_analysis/psnr.py +++ b/analysis/compression_analysis/psnr.py @@ -9,6 +9,7 @@ import cv2 import numpy as np + def psnr(original, contrast): mse = np.mean((original - contrast) ** 2) if mse == 0: diff --git a/arithmetic_analysis/bisection.py b/arithmetic_analysis/bisection.py index c81fa84f81e1..7526f5f4a01f 100644 --- a/arithmetic_analysis/bisection.py +++ b/arithmetic_analysis/bisection.py @@ -15,7 +15,7 @@ def bisection(function, a, b): # finds where the function becomes 0 in [a,b] us return else: mid = (start + end) / 2 - while abs(start - mid) > 10**-7: # until we achieve precise equals to 10^-7 + while abs(start - mid) > 10 ** -7: # until we achieve precise equals to 10^-7 if function(mid) == 0: return mid elif function(mid) * function(start) < 0: @@ -27,7 +27,8 @@ def bisection(function, a, b): # finds where the function becomes 0 in [a,b] us def f(x): - return math.pow(x, 3) - 2*x - 5 + return math.pow(x, 3) - 2 * x - 5 + if __name__ == "__main__": print(bisection(f, 1, 1000)) diff --git a/arithmetic_analysis/intersection.py b/arithmetic_analysis/intersection.py index 2f25f76ebd96..ebb206c8aa37 100644 --- a/arithmetic_analysis/intersection.py +++ b/arithmetic_analysis/intersection.py @@ -1,17 +1,20 @@ import math -def intersection(function,x0,x1): #function is the f we want to find its root and x0 and x1 are two random starting points + +def intersection(function, x0, x1): # function is the f we want to find its root and x0 and x1 are two random starting points x_n = x0 x_n1 = x1 while True: - x_n2 = x_n1-(function(x_n1)/((function(x_n1)-function(x_n))/(x_n1-x_n))) - if abs(x_n2 - x_n1) < 10**-5: + x_n2 = x_n1 - (function(x_n1) / ((function(x_n1) - function(x_n)) / (x_n1 - x_n))) + if abs(x_n2 - x_n1) < 10 ** -5: return x_n2 - x_n=x_n1 - x_n1=x_n2 + x_n = x_n1 + x_n1 = x_n2 + def f(x): - return math.pow(x , 3) - (2 * x) -5 + return math.pow(x, 3) - (2 * x) - 5 + if __name__ == "__main__": - print(intersection(f,3,3.5)) + print(intersection(f, 3, 3.5)) diff --git a/arithmetic_analysis/lu_decomposition.py b/arithmetic_analysis/lu_decomposition.py index f291d2dfe003..3bbbcfe566af 100644 --- a/arithmetic_analysis/lu_decomposition.py +++ b/arithmetic_analysis/lu_decomposition.py @@ -1,32 +1,34 @@ # lower–upper (LU) decomposition - https://en.wikipedia.org/wiki/LU_decomposition import numpy -def LUDecompose (table): + +def LUDecompose(table): # Table that contains our data # Table has to be a square array so we need to check first - rows,columns=numpy.shape(table) - L=numpy.zeros((rows,columns)) - U=numpy.zeros((rows,columns)) - if rows!=columns: + rows, columns = numpy.shape(table) + L = numpy.zeros((rows, columns)) + U = numpy.zeros((rows, columns)) + if rows != columns: return [] - for i in range (columns): - for j in range(i-1): - sum=0 - for k in range (j-1): - sum+=L[i][k]*U[k][j] - L[i][j]=(table[i][j]-sum)/U[j][j] - L[i][i]=1 - for j in range(i-1,columns): - sum1=0 - for k in range(i-1): - sum1+=L[i][k]*U[k][j] - U[i][j]=table[i][j]-sum1 - return L,U + for i in range(columns): + for j in range(i - 1): + sum = 0 + for k in range(j - 1): + sum += L[i][k] * U[k][j] + L[i][j] = (table[i][j] - sum) / U[j][j] + L[i][i] = 1 + for j in range(i - 1, columns): + sum1 = 0 + for k in range(i - 1): + sum1 += L[i][k] * U[k][j] + U[i][j] = table[i][j] - sum1 + return L, U + if __name__ == "__main__": - matrix =numpy.array([[2,-2,1], - [0,1,2], - [5,3,1]]) - L,U = LUDecompose(matrix) + matrix = numpy.array([[2, -2, 1], + [0, 1, 2], + [5, 3, 1]]) + L, U = LUDecompose(matrix) print(L) print(U) diff --git a/arithmetic_analysis/newton_method.py b/arithmetic_analysis/newton_method.py index 2ed29502522e..888e01cb865a 100644 --- a/arithmetic_analysis/newton_method.py +++ b/arithmetic_analysis/newton_method.py @@ -1,18 +1,21 @@ # Newton's Method - https://en.wikipedia.org/wiki/Newton%27s_method -def newton(function,function1,startingInt): #function is the f(x) and function1 is the f'(x) - x_n=startingInt - while True: - x_n1=x_n-function(x_n)/function1(x_n) - if abs(x_n-x_n1) < 10**-5: - return x_n1 - x_n=x_n1 - +def newton(function, function1, startingInt): # function is the f(x) and function1 is the f'(x) + x_n = startingInt + while True: + x_n1 = x_n - function(x_n) / function1(x_n) + if abs(x_n - x_n1) < 10 ** -5: + return x_n1 + x_n = x_n1 + + def f(x): - return (x**3) - (2 * x) -5 + return (x ** 3) - (2 * x) - 5 + def f1(x): - return 3 * (x**2) -2 + return 3 * (x ** 2) - 2 + if __name__ == "__main__": - print(newton(f,f1,3)) + print(newton(f, f1, 3)) diff --git a/arithmetic_analysis/newton_raphson_method.py b/arithmetic_analysis/newton_raphson_method.py index 5e7e2f930abc..18a10c6605c3 100644 --- a/arithmetic_analysis/newton_raphson_method.py +++ b/arithmetic_analysis/newton_raphson_method.py @@ -1,36 +1,34 @@ # Implementing Newton Raphson method in Python # Author: Haseeb -from sympy import diff from decimal import Decimal +from sympy import diff + + def NewtonRaphson(func, a): ''' Finds root from the point 'a' onwards by Newton-Raphson method ''' while True: - c = Decimal(a) - ( Decimal(eval(func)) / Decimal(eval(str(diff(func)))) ) - + c = Decimal(a) - (Decimal(eval(func)) / Decimal(eval(str(diff(func))))) + a = c # This number dictates the accuracy of the answer - if abs(eval(func)) < 10**-15: - return c - + if abs(eval(func)) < 10 ** -15: + return c + # Let's Execute if __name__ == '__main__': # Find root of trigonometric function # Find value of pi - print ('sin(x) = 0', NewtonRaphson('sin(x)', 2)) - + print('sin(x) = 0', NewtonRaphson('sin(x)', 2)) + # Find root of polynomial - print ('x**2 - 5*x +2 = 0', NewtonRaphson('x**2 - 5*x +2', 0.4)) - + print('x**2 - 5*x +2 = 0', NewtonRaphson('x**2 - 5*x +2', 0.4)) + # Find Square Root of 5 - print ('x**2 - 5 = 0', NewtonRaphson('x**2 - 5', 0.1)) + print('x**2 - 5 = 0', NewtonRaphson('x**2 - 5', 0.1)) # Exponential Roots - print ('exp(x) - 1 = 0', NewtonRaphson('exp(x) - 1', 0)) - - - - + print('exp(x) - 1 = 0', NewtonRaphson('exp(x) - 1', 0)) diff --git a/auto/ID.csv b/auto/ID.csv new file mode 100644 index 000000000000..8a1218a1024a --- /dev/null +++ b/auto/ID.csv @@ -0,0 +1,5 @@ +1 +2 +3 +4 +5 diff --git a/auto/back_excuteUrl.json b/auto/back_excuteUrl.json new file mode 100644 index 000000000000..238f4b58630a --- /dev/null +++ b/auto/back_excuteUrl.json @@ -0,0 +1,11 @@ +{ + "url": "http://172.16.10.41:8119/ruok/sentinel", + "method": "POST", + "headers" : [ + "Content-Type:application/json", + "HT-app:6" + ], + "body": [ + "id=NONE" + ] +} diff --git a/auto/backup.txt b/auto/backup.txt new file mode 100644 index 000000000000..c95bfc3a6354 --- /dev/null +++ b/auto/backup.txt @@ -0,0 +1,16 @@ +- 创建分类(/api/v100/live/employee/upload/dibblingVideo/addCategory) +"name=默认分类", +"schoolId=NONE" + +- 登录 +"phone=18999999999", +"smsAuthCode=123456", +"loginType:=0", +"pwd=ht123456.", + + +- 删除分类(/api/v100/live/employee/upload/dibblingVideo/deleteCategory) +"categoryId=NONE" + +- 视频详情(/api/v100/live/employee/upload/dibblingVideo/detail) +"videoId=NONE" \ No newline at end of file diff --git a/auto/batchHandler.py b/auto/batchHandler.py new file mode 100644 index 000000000000..35a17b615aa9 --- /dev/null +++ b/auto/batchHandler.py @@ -0,0 +1,208 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +import datetime +import json +import ssl +import unicodedata +from concurrent.futures.thread import ThreadPoolExecutor + +import pandas as pd +import numpy as np +import requests +import argparse + +# 屏蔽HTTPS证书校验, 忽略安全警告 +requests.packages.urllib3.disable_warnings() +context = ssl._create_unverified_context() + + +def init_param() -> list: + """ + 初始化参数, 读取shell命令参数, 自动登录 + 依次返回httpie_view方式, 线程池, 登录cookie + :rtype: list + """ + parser = argparse.ArgumentParser(description="并发执行接口") + parser.add_argument("-w", "--workers", type=int, choices=choice_nums(1, 65, 1), default=1, help="并发执行线程数, 取值范围[1, 64]") + group = parser.add_mutually_exclusive_group() + group.add_argument("-v", "--view", action="store_true", help="显示请求详细信息") + group.add_argument("-hd", "--header", action="store_true", help="显示请求头") + group.add_argument("-b", "--body", action="store_true", help="显示请求Body") + group.add_argument("-d", "--download", action="store_true", help="显示请求头, 但响应结果保存至TXT") + args = parser.parse_args() + view_param = "-v" + if args.header: + view_param = "-h" + if args.body: + view_param = "-b" + if args.download: + view_param = "-d" + print("参数设置结果: httpie命令方式=[{}], 并发线程数=[{}]".format(view_param, args.workers)) + init_executor = ThreadPoolExecutor(max_workers=args.workers) + cookie = auto_login() + return [view_param, init_executor, cookie] + + +def execute_http(id: int) -> str: + """ + 执行excuteUrl.json接口, 返回结果数据 + :param id: 接口请求标识性ID数据 + :rtype: str + """ + with open("./excuteUrl.json", 'r') as request_data: + request_json = json.load(request_data) + url = request_json['url'] + method = request_json['method'] + request_headers = handle_json_str_value(request_json['headers']) + request_headers['Cookie'] = init_cookie + request_body = replace_id(request_json['body'], id) + response_body = { + "status": -1, + "msg": "接口执行失败", + "data": "请检查接口是否返回JSON格式的相应数据, 以及抛出未经处理的特殊异常" + } + executeStartTime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f') + try: + response = requests.request(method, url, headers=request_headers, json=request_body, timeout=3, verify=False) + # JSON标准格式 + response_body = json.dumps(response.json(), ensure_ascii=False, indent=4) + except Exception as e: + print(e) + executeEndTime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f') + httpie_cmd_str = httpie(url, method, request_headers, request_body) + return "执行命令httpie:\n{}\n当前ID=[{}], executeStartTime=[{}], executeEndTime=[{}]\n响应结果:\n{}".format(httpie_cmd_str, id, executeStartTime, executeEndTime, response_body) + + +def httpie(url: str, method: str, request_headers: json, request_body: json) -> str: + """ + 拼接httpie完整命令 + :param url: 接口访问路径 + :param method: 请求方式 + :param request_headers: 请求头JSON + :param request_body: 请求Body体JSON + :rtype: str + """ + joiner = ' ' + cmd = "http" + no_ca = "--verify=no" + param_list = [cmd, no_ca, httpie_view] + param_list.extend([method, url]) + for (k, v) in request_headers.items(): + if k == "Cookie": + param_list.append("'" + k + ":" + v + "'") + else: + param_list.append(k + ":" + v) + for (k, v) in request_body.items(): + if is_number(v): + param_list.append(k + ":=" + v) + else: + param_list.append(k + "=" + v) + return joiner.join(param_list) + + +def is_number(s: str) -> bool: + """ + :param s: 输入字符串 + :rtype: bool + """ + try: + float(s) + return True + except ValueError: + # ValueError为Python的一种标准异常,表示"传入无效的参数" + # 如果引发了ValueError这种异常,不做任何事情(pass:不做任何事情,一般用做占位语句) + pass + try: + # 把一个表示数字的字符串转换为浮点数返回的函数 + unicodedata.numeric(s) + return True + except (TypeError, ValueError): + pass + return False + + +def load_data() -> list: + """ + 读取数据文件, 每行为一条数据 + :rtype: list + """ + data = pd.read_csv("./ID.csv", header=-1) + data.columns = ['id'] + return data['id'] + + +def auto_login() -> str: + """ + 自动登录, 获取登录Cookie + :rtype: str + """ + with open("./ssoLogin.json", 'r') as sso_login_request_data: + request_json = json.load(sso_login_request_data) + url = request_json['url'] + method = request_json['method'] + request_headers = handle_json_str_value(request_json['headers']) + request_body = handle_json_str_value(request_json['body']) + # request_headers = {"Content-Type": "application/json", "HT-app": "6"} + response = requests.request(method, url, headers=request_headers, json=request_body, timeout=3, verify=False) + response_headers = response.headers + # 处理Cookie, 多个Cookie之间使用';'分隔, 否则校验cookie时出现"domain."在高版本中tomcat中报错 + # https://blog.csdn.net/w57685321/article/details/84943176 + cookie = response_headers.get("set-Cookie").replace(", _r", "; _r").replace(", _a", "; _a") + # JSON标准格式 + response_body = json.dumps(response.json(), ensure_ascii=False, indent=4) + print("登录响应Cookie结果: \n{}\n登录响应BODY结果: {}".format(cookie, response_body)) + return cookie + + +def handle_json_str_value(json: json) -> json: + """ + 将json的值都变为字符串处理 + :param json: + :rtype: json + """ + for (k, v) in json.items(): + json[k] = str(v) + return json + + +def replace_id(json: json, id: int) -> json: + """ + 将json的值都变为字符串处理 + :param json: + :param id: 目标ID + :rtype: json + """ + for (k, v) in json.items(): + if v == "NONE": + json[k] = str(id) + else: + json[k] = str(v) + return json + + +def choice_nums(start: int, end: int, delta: int) -> list: + """ + 返回指定的数组序列 + :rtype: list + """ + return np.arange(start, end, delta).tolist() + + +def main(): + # 全局变量 + global httpie_view + global executor + global init_cookie + # 首先初始化数据 + init = init_param() + httpie_view = init[0] + executor = init[1] + init_cookie = init[2] + # 读取ID数据列表 + ids = load_data() + for result in executor.map(execute_http, ids): + print(result) + + +if __name__ == '__main__': + main() diff --git a/auto/excuteUrl.json b/auto/excuteUrl.json new file mode 100644 index 000000000000..53763eb61bf8 --- /dev/null +++ b/auto/excuteUrl.json @@ -0,0 +1,12 @@ +{ + "url": "http://127.0.0.1:8127/api/v100/live/employee/upload/dibblingVideo/addCategory", + "method": "POST", + "headers": { + "Content-Type": "application/json", + "HT-app": 6 + }, + "body": { + "schoolId": "NONE", + "name": "默认分类" + } +} diff --git a/auto/ssoLogin.json b/auto/ssoLogin.json new file mode 100644 index 000000000000..6ded12f95279 --- /dev/null +++ b/auto/ssoLogin.json @@ -0,0 +1,14 @@ +{ + "url": "https://sso.testa.huitong.com/api/v100/ssonew/login", + "method": "POST", + "headers": { + "Content-Type": "application/json", + "HT-app": 6 + }, + "body": { + "phone": "18999999999", + "smsAuthCode": "123456", + "loginType": 0, + "pwd": "ht123456." + } +} diff --git a/binary_tree/DIG.py b/binary_tree/DIG.py new file mode 100644 index 000000000000..b642f31bc092 --- /dev/null +++ b/binary_tree/DIG.py @@ -0,0 +1,82 @@ +def begin(): + print("装饰开始:瓜子板凳备好,坐等[生成]") + + +def end(): + print("装饰结束:瓜子嗑完了,板凳坐歪了,撤!") + + +def wrapper_counter_generator(func): + # 接受func的所有参数 + def wrapper(*args, **kwargs): + # 处理前 + begin() + # 执行处理 + result = func(*args, **kwargs) + # 处理后 + end() + # 返回处理结果 + return result + # 返回装饰的函数对象 + return wrapper + + +class DIGCounter: + """ + 装饰器-迭代器-生成器,一体化打包回家 + """ + + def __init__(self, start, end): + self.start = start + self.end = end + + def __iter__(self): + """ + 迭代获取的当前元素 + :rtype: object + """ + return self + + def __next__(self): + """ + 迭代获取的当前元素的下一个元素 + :rtype: object + :exception StopIteration + """ + if self.start > self.end: + raise StopIteration + current = self.start + self.start += 1 + return current + + @wrapper_counter_generator + def counter_generator(self): + """ + 获取生成器 + :rtype: generator + """ + while self.start <= self.end: + yield self.start + self.start += 1 + + +def main(): + """ + 迭代器/生成器(iterator)是不可重复遍历的, + 而可迭代对象(iterable)是可以重复遍历的, + iter()内置方法只会返回不可重复遍历的迭代器 + """ + + k_list = list(DIGCounter(1, 19)) + even_list = [e for e in k_list if not e % 2 == 0] + odd_list = [e for e in k_list if e % 2 == 0] + print(even_list) + print(odd_list) + + g_list = DIGCounter(1, 19).counter_generator() + five_list = [e for e in g_list if e % 5 == 0] + print(five_list) + + +if __name__ == '__main__': + main() diff --git a/binary_tree/PDF.csv b/binary_tree/PDF.csv new file mode 100644 index 000000000000..299062e11d79 --- /dev/null +++ b/binary_tree/PDF.csv @@ -0,0 +1,65534 @@ +129 +129 +128 +127 +126 +126 +126 +126 +125 +125 +125 +125 +125 +125 +125 +125 +125 +125 +125 +125 +125 +124 +124 +124 +124 +124 +124 +124 +124 +123 +123 +123 +123 +123 +123 +123 +123 +123 +123 +123 +123 +122 +122 +122 +122 +122 +122 +122 +122 +122 +122 +122 +122 +122 +122 +122 +122 +122 +122 +122 +121 +121 +121 +121 +121 +121 +121 +121 +121 +121 +121 +121 +121 +121 +121 +121 +121 +121 +121 +121 +121 +121 +120 +120 +120 +120 +120 +120 +120 +120 +120 +120 +120 +120 +120 +120 +120 +120 +120 +120 +120 +120 +119 +119 +119 +119 +119 +119 +119 +119 +119 +119 +119 +119 +119 +119 +119 +119 +119 +119 +119 +119 +119 +119 +119 +119 +119 +119 +119 +119 +119 +119 +119 +119 +119 +118 +118 +118 +118 +118 +118 +118 +118 +118 +118 +118 +118 +118 +118 +118 +118 +118 +118 +118 +118 +118 +118 +118 +118 +118 +118 +118 +118 +118 +118 +118 +118 +118 +118 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +117 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +116 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +115 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +114 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +113 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +112 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +111 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +110 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +109 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +108 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +107 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +106 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +105 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +104 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +103 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +102 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +101 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +100 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +99 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +98 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +97 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +96 +95 +95 +95 +95 +95 +95 +95 +95 +95 +95 +95 +95 +95 +95 +95 +95 +95 +95 +95 +95 +95 +95 +95 +95 +95 +95 +95 +95 +95 +95 +95 +95 +95 +94 +94 +94 +94 +94 +94 +94 +94 +94 +94 +94 +94 +94 +94 +94 +94 +94 +94 +94 +94 +93 +93 +93 +93 +93 +93 +93 +93 +93 +93 +93 +93 +93 +93 +93 +93 +93 +93 +93 +93 +93 +93 +93 +93 +93 +93 +93 +93 +93 +93 +93 +93 +93 +92 +92 +92 +92 +92 +92 +92 +92 +92 +92 +92 +92 +92 +92 +92 +92 +92 +92 +92 +92 +92 +92 +92 +91 +91 +91 +91 +91 +91 +91 +91 +91 +91 +91 +91 +91 +91 +91 +90 +90 +90 +90 +90 +90 +90 +90 +90 +90 +90 +89 +89 +89 +89 +89 +89 +89 +89 +89 +88 +88 +88 +88 +88 +88 +88 +87 +87 +87 +87 +87 +87 +86 +86 +86 +86 +86 +86 +86 +85 +85 +85 +85 +84 +84 +84 +83 +82 +82 +82 +82 +82 +81 +79 +79 +79 +78 +78 +77 +75 +74 +73 +73 +68 +67 +56 +50 +47 +39 +32 +30 +22 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/binary_tree/PDF.py b/binary_tree/PDF.py new file mode 100644 index 000000000000..dcf7f0e33548 --- /dev/null +++ b/binary_tree/PDF.py @@ -0,0 +1,135 @@ +# -*- coding: utf-8 -*- +import os +import pandas as pd +import math +import numpy as np +import matplotlib.pyplot as plt + + +def load_data(): + """ + 读取数据文件,推荐CSV格式 + :return: + """ + work_main_dir = os.path.dirname(__file__) + os.path.sep + file_path = work_main_dir + "PDF.csv" + return pd.read_csv(file_path) + + +def calculate_statistical_indicators(data, delta): + """ + 对数据序列计算:最大值/最小值/平均值/标准差,以及根据组距计算分组坐标 + :param data: 原始数据列 + :param delta: 分组区间长度 + :return: [最大值, 最小值, 平均值, 标准差, 分组间距, 分组数, 分组序号, 分组坐标点, 分组区间, 分组频次] + : 0 1 2 3 4 5 6 7 8 9 + """ + def dataframe_tolist(dataframe_data): + return sum(np.array(dataframe_data).tolist(), []) + + # 统计指标数据 + statistical_indicators = [float(data.max()), float(data.min()), float(data.mean()), float(data.std())] + # 数据转换 + datavalue = dataframe_tolist(data) + # 分组数 + split_group = math.ceil((statistical_indicators[0] - statistical_indicators[1]) / delta) + 1 + # 分组自然编号序列 + group_nos = list(np.arange(1, split_group + 1, 1)) + # 分组坐标节点序列 + group_coordinates = list(statistical_indicators[1] + (np.array(group_nos) - 1) * delta) + # 分组坐标区间序列 + group_sections = [] + # 统计分组坐标区间频次, 统计标准左开右闭:(,] + group_frequencies = {} + for i in group_nos: + i -= 1 + if i == 0: + group_sections.append([0, group_coordinates[i]]) + else: + group_sections.append([group_coordinates[i - 1], group_coordinates[i]]) + + start = group_sections[i][0] + end = group_sections[i][1] + count = 0 + for value in datavalue: + if start < value <= end: + count += 1 + group_frequencies.update({i: count}) + statistical_indicators.append(delta) + statistical_indicators.append(split_group) + statistical_indicators.append(group_nos) + statistical_indicators.append(group_coordinates) + statistical_indicators.append(group_sections) + statistical_indicators.append(group_frequencies) + statistical_indicators.append(datavalue) + + return statistical_indicators + + +def normal_distribution_pdf(x, mu, sigma): + """ + 正态分布概率密度函数 + Normal distribution probability density function + :return: + """ + if sigma == 0: + return 0 + return np.exp(-((x-mu)**2 / (2 * sigma**2))) / (sigma * np.sqrt(2*np.pi)) + + +def calculate_points(mu, sigma): + point = [] + i = mu - 2 * sigma + while mu - 2 * sigma <= i <= mu + 2 * sigma: + point.append(i) + i += sigma + x = np.array(point) + y = normal_distribution_pdf(x, mu, sigma) + for i in range(0, len(x)): + print(x[i], y[i]) + return [x, y] + + +def plot_pdf(statistical_indicators): + plt.figure("NormalDistribution-PDF") + # plt.grid() + plt.xlabel("Student-Score") + plt.ylabel("Probability-Value") + plt.title("Figure-1.1") + plt.xlim(0.00, 140.00) + plt.ylim(0.00, 0.055) + + data = statistical_indicators[len(statistical_indicators) - 1] + plt.hist(data, bins=23, rwidth=5, density=True, color='yellow') + + mu, sigma = statistical_indicators[2], statistical_indicators[3] + coordinates = statistical_indicators[7] + # 增加0值起始点 + coordinates.insert(0, 0) + x = np.array(coordinates) + y = normal_distribution_pdf(x, mu, sigma) + plt.plot(x, y, color='red', linewidth=2) + + points = calculate_points(mu, sigma) + plt.scatter(points[0], points[1], marker='<', s=30, c='green') + + # 绘制垂线plt.vlines + for x_i in points[0]: + plt.vlines(x_i, plt.ylim()[0], plt.ylim()[1], linestyles=':', linewidth=1) + + # 绘制水平线plt.hlines + # plt.hlines(0.025, plt.xlim()[0], plt.xlim()[1], linestyles=':', linewidth=1) + plt.show() + + +def main(): + data = load_data() + delta = 1 + statistical_indicators = calculate_statistical_indicators(data, delta) + plot_pdf(statistical_indicators) + + +if __name__ == '__main__': + main() + + diff --git a/binary_tree/__init__.py b/binary_tree/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/binary_tree/basic_binary_tree.py b/binary_tree/basic_binary_tree.py index 7c6240fb4dd4..dceacc416d00 100644 --- a/binary_tree/basic_binary_tree.py +++ b/binary_tree/basic_binary_tree.py @@ -1,62 +1,330 @@ -class Node: # This is the Class Node with constructor that contains data variable to type data and left,right pointers. +class TreeNode: + """This is the Class Node with constructor that contains data variable to type data and left,right pointers.""" + def __init__(self, data): self.data = data self.left = None self.right = None -def display(tree): #In Order traversal of the tree + def new_in_order_traversal(self) -> list: + """ + (代码优化)深度优先-中序遍历 + :return: list[TreeNode] + """ + if self is None: + return [] + return self.in_order_traversal(self.left) + [self] + self.in_order_traversal(self.right) + + def in_order_traversal(self, current_node) -> list: + """ + (冗余传参)深度优先-中序遍历 + :type current_node: TreeNode + :return: list[TreeNode] + """ + if current_node is None: + return [] + return self.in_order_traversal(current_node.left) + [current_node] + self.in_order_traversal(current_node.right) + + def pre_order_traversal(self) -> list: + """ + 深度优先-前序遍历 + :return: list[TreeNode] + """ + if self is None: + return [] + return [self] + self.left.pre_order_traversal() + self.right.pre_order_traversal() + + def post_order_traversal(self) -> list: + """ + 深度优先-后序遍历 + :return: list[TreeNode] + """ + if self is None: + return [] + return self.left.post_order_traversal() + self.right.post_order_traversal() + [self] + + @classmethod + def base_width_order_traversal(cls, root) -> list: + """ + 基本广度优先遍历 + :type root: TreeNode + :return: list[TreeNode] + """ + + def base_recursion_helper(current_node, current_level): + """ + 基本递归遍历,同一层次由左到右遍历 + :type current_level: int + :type current_node: TreeNode + """ + # 递归结束条件 + if current_node is None: + return + + # 收集本层元素 + sol[current_level - 1].append([current_node]) + + # 新的一层元素,需要添加收集容器 + if len(sol) == current_level: + sol.append([]) + + # 先左子树 + base_recursion_helper(current_node.left, current_level + 1) + # 后右子树 + base_recursion_helper(current_node.right, current_level + 1) + + sol = [[]] + base_recursion_helper(root, 1) + # sol为三层嵌套list, 且需要去除最后一个空元素[],以下方式可将三层list[]元素平铺为一层 + result_list = sum(sum(sol[:-1], []), []) + # result_list = [tree_node for second_list in sol[:-1] for third_list in second_list for tree_node in third_list] + return result_list - if tree is None: - return + @classmethod + def level_reverse_width_order_traversal(cls, root) -> list: + """ + 基本广度优先遍历 + :type root: TreeNode + :return: list[TreeNode] + """ - if tree.left is not None: - display(tree.left) + def level_reverse_recursion_helper(current_node, current_level): + """ + 基本递归遍历,同一层次由左到右遍历 + :type current_level: int + :type current_node: TreeNode + """ + # 递归结束条件 + if current_node is None: + return - print(tree.data) + # 收集本层元素 + sol[current_level - 1].append([current_node]) - if tree.right is not None: - display(tree.right) + # 新的一层元素,需要添加收集容器 + if len(sol) == current_level: + sol.append([]) - return + # 先右子树 + level_reverse_recursion_helper(current_node.right, current_level + 1) + # 后左子树 + level_reverse_recursion_helper(current_node.left, current_level + 1) -def depth_of_tree(tree): #This is the recursive function to find the depth of binary tree. - if tree is None: - return 0 - else: - depth_l_tree = depth_of_tree(tree.left) - depth_r_tree = depth_of_tree(tree.right) - if depth_l_tree > depth_r_tree: - return 1 + depth_l_tree - else: - return 1 + depth_r_tree + sol = [[]] + level_reverse_recursion_helper(root, 1) + # sol为三层嵌套list, 且需要去除最后一个空元素[],以下方式可将三层list[]元素平铺为一层 + result_list = sum(sum(sol[:-1], []), []) + # result_list = [tree_node for second_list in sol[:-1] for third_list in second_list for tree_node in third_list] + return result_list + @classmethod + def zigzag_width_order_traversal(cls, root) -> list: + """ + 锯齿型广度优先遍历 + :type root: TreeNode + :return: list[TreeNode] + """ -def is_full_binary_tree(tree): # This functions returns that is it full binary tree or not? - if tree is None: - return True - if (tree.left is None) and (tree.right is None): - return True - if (tree.left is not None) and (tree.right is not None): - return (is_full_binary_tree(tree.left) and is_full_binary_tree(tree.right)) - else: + def zigzag_recursion_helper(current_node, current_level): + """ + 基本递归遍历,同一层次由左到右遍历 + :type current_level: int + :type current_node: TreeNode + """ + # 递归结束点 + if current_node is None: + return + # 按照奇偶层进行拼接 + if current_level % 2 == 1: + # 收集本层元素,后插 + sol[current_level - 1].append([current_node]) + else: + # 收集本层元素,前插 + sol[current_level - 1].insert(0, [current_node]) + + # 新的一层元素,需要添加收集容器 + if len(sol) == current_level: + sol.append([]) + + # 先左子树 + zigzag_recursion_helper(current_node.left, current_level + 1) + # 后右子树 + zigzag_recursion_helper(current_node.right, current_level + 1) + + sol = [[]] + zigzag_recursion_helper(root, 1) + # sol为三层嵌套list, 且需要去除最后一个空元素[],以下方式可将三层list[]元素平铺为一层 + result_list = sum(sum(sol[:-1], []), []) + # result_list = [tree_node for second_list in sol[:-1] for third_list in second_list for tree_node in third_list] + return result_list + + def depth_of_tree(self) -> int: + """ + 树的深度 + :return: int + """ + if self is None: + return 0 + return 1 + max(self.left.depth_of_tree(), self.right.depth_of_tree()) + + def is_full_binary_tree(self) -> bool: + """ + 检查是否为满二叉树 + :return: bool + """ + if self is None: + return True + if (self.left is None) and (self.right is None): + return True + if (self.left is not None) and (self.right is not None): + return self.left.is_full_binary_tree() and self.right.is_full_binary_tree() return False -def main(): # Main func for testing. - tree = Node(1) - tree.left = Node(2) - tree.right = Node(3) - tree.left.left = Node(4) - tree.left.right = Node(5) - tree.left.right.left = Node(6) - tree.right.left = Node(7) - tree.right.left.left = Node(8) - tree.right.left.left.right = Node(9) - - print(is_full_binary_tree(tree)) - print(depth_of_tree(tree)) - print("Tree is: ") - display(tree) +def test_init_tree() -> TreeNode: + """ + 测试使用,构造树 + 1 + 2 3 + 4 5 7 + 6 8 + 9 + :return: TreeNode + """ + tree = TreeNode(1) + tree.left = TreeNode(2) + tree.right = TreeNode(3) + tree.left.left = TreeNode(4) + tree.left.right = TreeNode(5) + tree.left.right.left = TreeNode(6) + tree.right.left = TreeNode(7) + tree.right.left.left = TreeNode(8) + tree.right.left.left.right = TreeNode(9) + return tree + + +def test_in_order_traversal(): + """ + 二叉树中序遍历测试 + """ + init_tree = test_init_tree() + in_order_result = init_tree.in_order_traversal(init_tree) + in_order_data = fetch_tree_data(in_order_result) + out(in_order_data) + + +def test_new_in_order_traversal(): + """ + 二叉树中序遍历测试 + """ + init_tree = test_init_tree() + in_order_result = init_tree.new_in_order_traversal() + in_order_data = fetch_tree_data(in_order_result) + out(in_order_data) + + +def test_pre_order_traversal(): + """ + 二叉树前序遍历测试 + """ + init_tree = test_init_tree() + pre_order_result = init_tree.pre_order_traversal() + pre_order_data = fetch_tree_data(pre_order_result) + out(pre_order_data) + + +def test_post_order_traversal(): + """ + 二叉树后序遍历测试 + """ + init_tree = test_init_tree() + post_order_result = init_tree.post_order_traversal() + post_order_data = fetch_tree_data(post_order_result) + out(post_order_data) + + +def test_basic_width_order_traversal(): + """ + 二叉树基本层次遍历测试 + """ + init_tree = test_init_tree() + basic_width_order_result = TreeNode.base_width_order_traversal(init_tree) + basic_width_order_data = fetch_tree_data(basic_width_order_result) + out(basic_width_order_data) + + +def test_level_reverse_width_order_traversal(): + """ + 二叉树同层反序层次遍历测试 + """ + init_tree = test_init_tree() + level_reverse_width_order_result = TreeNode.level_reverse_width_order_traversal(init_tree) + level_reverse_width_order_data = fetch_tree_data(level_reverse_width_order_result) + out(level_reverse_width_order_data) + + +def test_zigzag_width_order_traversal(): + """ + 二叉树同层反序层次遍历测试 + """ + init_tree = test_init_tree() + zigzag_width_order_result = TreeNode.zigzag_width_order_traversal(init_tree) + zigzag_width_order_data = fetch_tree_data(zigzag_width_order_result) + out(zigzag_width_order_data) + + +def fetch_tree_data(tree_list) -> list: + """ + 根据树的平铺列表,获取数据[data] + :type tree_list: list + :return: list[TreeNode.data] + """ + return [e.data for e in tree_list if e is not None] + + +def out(content): + """ + 输出内容 + :type content: object + """ + print(content) + + +def main(): + """ + python函数及其参数约定: https://www.cnblogs.com/xialiaoliao0911/p/9430491.html + """ + + tree = ''' + 初始树 + 1 + 2 3 + 4 5 7 + 6 8 + 9 + + ''' + out(tree) + + out("深度优先之[前序]遍历:") + test_in_order_traversal() + + out("(代码优化后的)深度优先之[前序]遍历:") + test_new_in_order_traversal() + # out("深度优先之[中序]遍历:") + # test_pre_order_traversal() + # + # out("深度优先之[后序]遍历:") + # test_post_order_traversal() + # + # out("广度优先之正序层次遍历:") + # test_basic_width_order_traversal() + # + # out("广度优先之同层反遍历:") + # test_level_reverse_width_order_traversal() + # + # out("广度优先之锯齿型遍历:") + # test_zigzag_width_order_traversal() if __name__ == '__main__': diff --git a/binary_tree/test_treeNode.py b/binary_tree/test_treeNode.py new file mode 100644 index 000000000000..4191f018557f --- /dev/null +++ b/binary_tree/test_treeNode.py @@ -0,0 +1,5 @@ +from unittest import TestCase + + +class TestTreeNode(TestCase): + pass diff --git a/boolean_algebra/quine_mc_cluskey.py b/boolean_algebra/quine_mc_cluskey.py index db4d153cbfd7..8d0ecceb1ad7 100644 --- a/boolean_algebra/quine_mc_cluskey.py +++ b/boolean_algebra/quine_mc_cluskey.py @@ -1,116 +1,127 @@ def compare_string(string1, string2): - l1 = list(string1); l2 = list(string2) - count = 0 - for i in range(len(l1)): - if l1[i] != l2[i]: - count += 1 - l1[i] = '_' - if count > 1: - return -1 - else: - return("".join(l1)) + l1 = list(string1); + l2 = list(string2) + count = 0 + for i in range(len(l1)): + if l1[i] != l2[i]: + count += 1 + l1[i] = '_' + if count > 1: + return -1 + else: + return ("".join(l1)) + def check(binary): - pi = [] - while 1: - check1 = ['$']*len(binary) - temp = [] - for i in range(len(binary)): - for j in range(i+1, len(binary)): - k=compare_string(binary[i], binary[j]) - if k != -1: - check1[i] = '*' - check1[j] = '*' - temp.append(k) - for i in range(len(binary)): - if check1[i] == '$': - pi.append(binary[i]) - if len(temp) == 0: - return pi - binary = list(set(temp)) + pi = [] + while 1: + check1 = ['$'] * len(binary) + temp = [] + for i in range(len(binary)): + for j in range(i + 1, len(binary)): + k = compare_string(binary[i], binary[j]) + if k != -1: + check1[i] = '*' + check1[j] = '*' + temp.append(k) + for i in range(len(binary)): + if check1[i] == '$': + pi.append(binary[i]) + if len(temp) == 0: + return pi + binary = list(set(temp)) + def decimal_to_binary(no_of_variable, minterms): - temp = [] - s = '' - for m in minterms: - for i in range(no_of_variable): - s = str(m%2) + s - m //= 2 - temp.append(s) - s = '' - return temp + temp = [] + s = '' + for m in minterms: + for i in range(no_of_variable): + s = str(m % 2) + s + m //= 2 + temp.append(s) + s = '' + return temp + def is_for_table(string1, string2, count): - l1 = list(string1);l2=list(string2) - count_n = 0 - for i in range(len(l1)): - if l1[i] != l2[i]: - count_n += 1 - if count_n == count: - return True - else: - return False + l1 = list(string1); + l2 = list(string2) + count_n = 0 + for i in range(len(l1)): + if l1[i] != l2[i]: + count_n += 1 + if count_n == count: + return True + else: + return False + def selection(chart, prime_implicants): - temp = [] - select = [0]*len(chart) - for i in range(len(chart[0])): - count = 0 - rem = -1 - for j in range(len(chart)): - if chart[j][i] == 1: - count += 1 - rem = j - if count == 1: - select[rem] = 1 - for i in range(len(select)): - if select[i] == 1: - for j in range(len(chart[0])): - if chart[i][j] == 1: - for k in range(len(chart)): - chart[k][j] = 0 - temp.append(prime_implicants[i]) - while 1: - max_n = 0; rem = -1; count_n = 0 - for i in range(len(chart)): - count_n = chart[i].count(1) - if count_n > max_n: - max_n = count_n - rem = i - - if max_n == 0: - return temp - - temp.append(prime_implicants[rem]) - - for i in range(len(chart[0])): - if chart[rem][i] == 1: - for j in range(len(chart)): - chart[j][i] = 0 - + temp = [] + select = [0] * len(chart) + for i in range(len(chart[0])): + count = 0 + rem = -1 + for j in range(len(chart)): + if chart[j][i] == 1: + count += 1 + rem = j + if count == 1: + select[rem] = 1 + for i in range(len(select)): + if select[i] == 1: + for j in range(len(chart[0])): + if chart[i][j] == 1: + for k in range(len(chart)): + chart[k][j] = 0 + temp.append(prime_implicants[i]) + while 1: + max_n = 0; + rem = -1; + count_n = 0 + for i in range(len(chart)): + count_n = chart[i].count(1) + if count_n > max_n: + max_n = count_n + rem = i + + if max_n == 0: + return temp + + temp.append(prime_implicants[rem]) + + for i in range(len(chart[0])): + if chart[rem][i] == 1: + for j in range(len(chart)): + chart[j][i] = 0 + + def prime_implicant_chart(prime_implicants, binary): - chart = [[0 for x in range(len(binary))] for x in range(len(prime_implicants))] - for i in range(len(prime_implicants)): - count = prime_implicants[i].count('_') - for j in range(len(binary)): - if(is_for_table(prime_implicants[i], binary[j], count)): - chart[i][j] = 1 - - return chart + chart = [[0 for x in range(len(binary))] for x in range(len(prime_implicants))] + for i in range(len(prime_implicants)): + count = prime_implicants[i].count('_') + for j in range(len(binary)): + if (is_for_table(prime_implicants[i], binary[j], count)): + chart[i][j] = 1 + + return chart + def main(): - no_of_variable = int(input("Enter the no. of variables\n")) - minterms = [int(x) for x in input("Enter the decimal representation of Minterms 'Spaces Seprated'\n").split()] - binary = decimal_to_binary(no_of_variable, minterms) - - prime_implicants = check(binary) - print("Prime Implicants are:") - print(prime_implicants) - chart = prime_implicant_chart(prime_implicants, binary) - - essential_prime_implicants = selection(chart,prime_implicants) - print("Essential Prime Implicants are:") - print(essential_prime_implicants) + no_of_variable = int(input("Enter the no. of variables\n")) + minterms = [int(x) for x in input("Enter the decimal representation of Minterms 'Spaces Seprated'\n").split()] + binary = decimal_to_binary(no_of_variable, minterms) + + prime_implicants = check(binary) + print("Prime Implicants are:") + print(prime_implicants) + chart = prime_implicant_chart(prime_implicants, binary) + + essential_prime_implicants = selection(chart, prime_implicants) + print("Essential Prime Implicants are:") + print(essential_prime_implicants) + if __name__ == '__main__': - main() + main() diff --git a/ciphers/Atbash.py b/ciphers/Atbash.py index 162614c727ee..a21de4c43014 100644 --- a/ciphers/Atbash.py +++ b/ciphers/Atbash.py @@ -1,21 +1,22 @@ -try: # Python 2 +try: # Python 2 raw_input unichr -except NameError: # Python 3 +except NameError: #  Python 3 raw_input = input unichr = chr def Atbash(): - output="" + output = "" for i in raw_input("Enter the sentence to be encrypted ").strip(): extract = ord(i) if 65 <= extract <= 90: - output += unichr(155-extract) + output += unichr(155 - extract) elif 97 <= extract <= 122: - output += unichr(219-extract) + output += unichr(219 - extract) else: - output+=i + output += i print(output) + Atbash() diff --git a/ciphers/affine_cipher.py b/ciphers/affine_cipher.py index af5f4e0ff4c6..0d1add38c07b 100644 --- a/ciphers/affine_cipher.py +++ b/ciphers/affine_cipher.py @@ -1,26 +1,32 @@ from __future__ import print_function -import sys, random, cryptomath_module as cryptoMath + +import cryptomath_module as cryptoMath +import random +import sys SYMBOLS = r""" !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~""" + def main(): message = input('Enter message: ') key = int(input('Enter key [2000 - 9000]: ')) mode = input('Encrypt/Decrypt [E/D]: ') if mode.lower().startswith('e'): - mode = 'encrypt' - translated = encryptMessage(key, message) + mode = 'encrypt' + translated = encryptMessage(key, message) elif mode.lower().startswith('d'): - mode = 'decrypt' - translated = decryptMessage(key, message) + mode = 'decrypt' + translated = decryptMessage(key, message) print('\n%sed text: \n%s' % (mode.title(), translated)) + def getKeyParts(key): keyA = key // len(SYMBOLS) keyB = key % len(SYMBOLS) return (keyA, keyB) + def checkKeys(keyA, keyB, mode): if keyA == 1 and mode == 'encrypt': sys.exit('The affine cipher becomes weak when key A is set to 1. Choose different key') @@ -31,6 +37,7 @@ def checkKeys(keyA, keyB, mode): if cryptoMath.gcd(keyA, len(SYMBOLS)) != 1: sys.exit('Key A %s and the symbol set size %s are not relatively prime. Choose a different key.' % (keyA, len(SYMBOLS))) + def encryptMessage(key, message): ''' >>> encryptMessage(4545, 'The affine cipher is a type of monoalphabetic substitution cipher.') @@ -47,6 +54,7 @@ def encryptMessage(key, message): cipherText += symbol return cipherText + def decryptMessage(key, message): ''' >>> decryptMessage(4545, 'VL}p MM{I}p~{HL}Gp{vp pFsH}pxMpyxIx JHL O}F{~pvuOvF{FuF{xIp~{HL}Gi') @@ -64,6 +72,7 @@ def decryptMessage(key, message): plainText += symbol return plainText + def getRandomKey(): while True: keyA = random.randint(2, len(SYMBOLS)) @@ -71,7 +80,9 @@ def getRandomKey(): if cryptoMath.gcd(keyA, len(SYMBOLS)) == 1: return keyA * len(SYMBOLS) + keyB + if __name__ == '__main__': import doctest + doctest.testmod() main() diff --git a/ciphers/base16.py b/ciphers/base16.py index 9bc0e5d8337a..3577541a1092 100644 --- a/ciphers/base16.py +++ b/ciphers/base16.py @@ -1,11 +1,13 @@ import base64 + def main(): inp = input('->') - encoded = inp.encode('utf-8') #encoded the input (we need a bytes like object) - b16encoded = base64.b16encode(encoded) #b16encoded the encoded string + encoded = inp.encode('utf-8') # encoded the input (we need a bytes like object) + b16encoded = base64.b16encode(encoded) # b16encoded the encoded string print(b16encoded) - print(base64.b16decode(b16encoded).decode('utf-8'))#decoded it + print(base64.b16decode(b16encoded).decode('utf-8')) # decoded it + if __name__ == '__main__': main() diff --git a/ciphers/base32.py b/ciphers/base32.py index 2ac29f441e94..d993583a27ce 100644 --- a/ciphers/base32.py +++ b/ciphers/base32.py @@ -1,11 +1,13 @@ import base64 + def main(): inp = input('->') - encoded = inp.encode('utf-8') #encoded the input (we need a bytes like object) - b32encoded = base64.b32encode(encoded) #b32encoded the encoded string + encoded = inp.encode('utf-8') # encoded the input (we need a bytes like object) + b32encoded = base64.b32encode(encoded) # b32encoded the encoded string print(b32encoded) - print(base64.b32decode(b32encoded).decode('utf-8'))#decoded it + print(base64.b32decode(b32encoded).decode('utf-8')) # decoded it + if __name__ == '__main__': main() diff --git a/ciphers/base64_cipher.py b/ciphers/base64_cipher.py index fa3451c0cbae..17ce815c6a40 100644 --- a/ciphers/base64_cipher.py +++ b/ciphers/base64_cipher.py @@ -1,32 +1,33 @@ def encodeBase64(text): base64chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/" - - r = "" #the result - c = 3 - len(text) % 3 #the length of padding - p = "=" * c #the padding - s = text + "\0" * c #the text to encode - - i = 0 + + r = "" # the result + c = 3 - len(text) % 3 # the length of padding + p = "=" * c # the padding + s = text + "\0" * c # the text to encode + + i = 0 while i < len(s): if i > 0 and ((i / 3 * 4) % 76) == 0: r = r + "\r\n" - - n = (ord(s[i]) << 16) + (ord(s[i+1]) << 8 ) + ord(s[i+2]) - + + n = (ord(s[i]) << 16) + (ord(s[i + 1]) << 8) + ord(s[i + 2]) + n1 = (n >> 18) & 63 n2 = (n >> 12) & 63 - n3 = (n >> 6) & 63 + n3 = (n >> 6) & 63 n4 = n & 63 - + r += base64chars[n1] + base64chars[n2] + base64chars[n3] + base64chars[n4] i += 3 - return r[0: len(r)-len(p)] + p - + return r[0: len(r) - len(p)] + p + + def decodeBase64(text): base64chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/" s = "" - + for i in text: if i in base64chars: s += i @@ -34,31 +35,32 @@ def decodeBase64(text): else: if i == '=': c += '=' - + p = "" if c == "=": p = 'A' else: if c == "==": p = "AA" - + r = "" s = s + p - + i = 0 while i < len(s): - n = (base64chars.index(s[i]) << 18) + (base64chars.index(s[i+1]) << 12) + (base64chars.index(s[i+2]) << 6) +base64chars.index(s[i+3]) - + n = (base64chars.index(s[i]) << 18) + (base64chars.index(s[i + 1]) << 12) + (base64chars.index(s[i + 2]) << 6) + base64chars.index(s[i + 3]) + r += chr((n >> 16) & 255) + chr((n >> 8) & 255) + chr(n & 255) - + i += 4 - + return r[0: len(r) - len(p)] + def main(): print(encodeBase64("WELCOME to base64 encoding")) print(decodeBase64(encodeBase64("WELCOME to base64 encoding"))) - + if __name__ == '__main__': main() diff --git a/ciphers/base85.py b/ciphers/base85.py index 5fd13837f662..cb549855f14d 100644 --- a/ciphers/base85.py +++ b/ciphers/base85.py @@ -1,11 +1,13 @@ import base64 + def main(): inp = input('->') - encoded = inp.encode('utf-8') #encoded the input (we need a bytes like object) - a85encoded = base64.a85encode(encoded) #a85encoded the encoded string + encoded = inp.encode('utf-8') # encoded the input (we need a bytes like object) + a85encoded = base64.a85encode(encoded) # a85encoded the encoded string print(a85encoded) - print(base64.a85decode(a85encoded).decode('utf-8'))#decoded it + print(base64.a85decode(a85encoded).decode('utf-8')) # decoded it + if __name__ == '__main__': main() diff --git a/ciphers/brute_force_caesar_cipher.py b/ciphers/brute_force_caesar_cipher.py index 3b0716442fc5..22b6f3131b60 100644 --- a/ciphers/brute_force_caesar_cipher.py +++ b/ciphers/brute_force_caesar_cipher.py @@ -1,4 +1,6 @@ from __future__ import print_function + + def decrypt(message): """ >>> decrypt('TMDETUX PMDVU') @@ -43,12 +45,15 @@ def decrypt(message): translated = translated + symbol print("Decryption using Key #%s: %s" % (key, translated)) + def main(): message = input("Encrypted message: ") message = message.upper() decrypt(message) + if __name__ == '__main__': import doctest + doctest.testmod() main() diff --git a/ciphers/caesar_cipher.py b/ciphers/caesar_cipher.py index 39c069c95a7c..75b470c0bf8e 100644 --- a/ciphers/caesar_cipher.py +++ b/ciphers/caesar_cipher.py @@ -1,4 +1,3 @@ -import sys def encrypt(strng, key): encrypted = '' for x in strng: @@ -18,6 +17,7 @@ def decrypt(strng, key): decrypted = decrypted + chr(indx) return decrypted + def brute_force(strng): key = 1 decrypted = '' @@ -42,22 +42,24 @@ def main(): print("4.Quit") choice = input("What would you like to do?: ") if choice not in ['1', '2', '3', '4']: - print ("Invalid choice, please enter a valid choice") + print("Invalid choice, please enter a valid choice") elif choice == '1': strng = input("Please enter the string to be encrypted: ") key = int(input("Please enter off-set between 1-94: ")) if key in range(1, 95): - print (encrypt(strng.lower(), key)) + print(encrypt(strng.lower(), key)) elif choice == '2': strng = input("Please enter the string to be decrypted: ") key = int(input("Please enter off-set between 1-94: ")) - if key in range(1,95): + if key in range(1, 95): print(decrypt(strng, key)) elif choice == '3': strng = input("Please enter the string to be decrypted: ") brute_force(strng) main() elif choice == '4': - print ("Goodbye.") + print("Goodbye.") break + + main() diff --git a/ciphers/cryptomath_module.py b/ciphers/cryptomath_module.py index 3e8e71b117ed..fc38e4bd2a22 100644 --- a/ciphers/cryptomath_module.py +++ b/ciphers/cryptomath_module.py @@ -3,6 +3,7 @@ def gcd(a, b): a, b = b % a, a return b + def findModInverse(a, m): if gcd(a, m) != 1: return None @@ -10,5 +11,5 @@ def findModInverse(a, m): v1, v2, v3 = 0, 1, m while v3 != 0: q = u3 // v3 - v1, v2, v3, u1, u2, u3 = (u1 - q * v1), (u2 - q * v2), (u3 - q *v3), v1, v2, v3 - return u1 % m + v1, v2, v3, u1, u2, u3 = (u1 - q * v1), (u2 - q * v2), (u3 - q * v3), v1, v2, v3 + return u1 % m diff --git a/ciphers/elgamal_key_generator.py b/ciphers/elgamal_key_generator.py index 6a8751f69524..fe6714a7e614 100644 --- a/ciphers/elgamal_key_generator.py +++ b/ciphers/elgamal_key_generator.py @@ -1,7 +1,9 @@ import os import random import sys -import rabin_miller as rabinMiller, cryptomath_module as cryptoMath + +import cryptomath_module as cryptoMath +import rabin_miller as rabinMiller min_primitive_root = 3 @@ -19,7 +21,7 @@ def main(): def primitiveRoot(p_val): print("Generating primitive root of p") while True: - g = random.randrange(3,p_val) + g = random.randrange(3, p_val) if pow(g, 2, p_val) == 1: continue if pow(g, p_val, p_val) == 1: @@ -60,4 +62,3 @@ def makeKeyFiles(name, keySize): if __name__ == '__main__': main() - \ No newline at end of file diff --git a/ciphers/hill_cipher.py b/ciphers/hill_cipher.py index 89b88beed17e..19f71c45f3e8 100644 --- a/ciphers/hill_cipher.py +++ b/ciphers/hill_cipher.py @@ -44,7 +44,7 @@ def gcd(a, b): if a == 0: return b - return gcd(b%a, a) + return gcd(b % a, a) class HillCipher: @@ -59,19 +59,19 @@ class HillCipher: modulus = numpy.vectorize(lambda x: x % 36) toInt = numpy.vectorize(lambda x: round(x)) - + def __init__(self, encrypt_key): """ encrypt_key is an NxN numpy matrix """ - self.encrypt_key = self.modulus(encrypt_key) # mod36 calc's on the encrypt key - self.checkDeterminant() # validate the determinant of the encryption key + self.encrypt_key = self.modulus(encrypt_key) # mod36 calc's on the encrypt key + self.checkDeterminant() # validate the determinant of the encryption key self.decrypt_key = None self.break_key = encrypt_key.shape[0] def checkDeterminant(self): det = round(numpy.linalg.det(self.encrypt_key)) - + if det < 0: det = det % len(self.key_string) @@ -88,13 +88,13 @@ def processText(self, text): text.append(last) return ''.join(text) - + def encrypt(self, text): text = self.processText(text.upper()) encrypted = '' for i in range(0, len(text) - self.break_key + 1, self.break_key): - batch = text[i:i+self.break_key] + batch = text[i:i + self.break_key] batch_vec = list(map(self.replaceLetters, batch)) batch_vec = numpy.matrix([batch_vec]).T batch_encrypted = self.modulus(self.encrypt_key.dot(batch_vec)).T.tolist()[0] @@ -105,7 +105,7 @@ def encrypt(self, text): def makeDecryptKey(self): det = round(numpy.linalg.det(self.encrypt_key)) - + if det < 0: det = det % len(self.key_string) det_inv = None @@ -114,18 +114,18 @@ def makeDecryptKey(self): det_inv = i break - inv_key = det_inv * numpy.linalg.det(self.encrypt_key) *\ + inv_key = det_inv * numpy.linalg.det(self.encrypt_key) * \ numpy.linalg.inv(self.encrypt_key) return self.toInt(self.modulus(inv_key)) - + def decrypt(self, text): self.decrypt_key = self.makeDecryptKey() text = self.processText(text.upper()) decrypted = '' for i in range(0, len(text) - self.break_key + 1, self.break_key): - batch = text[i:i+self.break_key] + batch = text[i:i + self.break_key] batch_vec = list(map(self.replaceLetters, batch)) batch_vec = numpy.matrix([batch_vec]).T batch_decrypted = self.modulus(self.decrypt_key.dot(batch_vec)).T.tolist()[0] @@ -161,7 +161,7 @@ def main(): text_d = input("What text would you like to decrypt?: ") print("Your decrypted text is:") print(hc.decrypt(text_d)) - + if __name__ == "__main__": main() diff --git a/ciphers/morse_Code_implementation.py b/ciphers/morse_Code_implementation.py index 7b2d0a94b24b..54b509caf049 100644 --- a/ciphers/morse_Code_implementation.py +++ b/ciphers/morse_Code_implementation.py @@ -2,21 +2,21 @@ # Dictionary representing the morse code chart -MORSE_CODE_DICT = { 'A':'.-', 'B':'-...', - 'C':'-.-.', 'D':'-..', 'E':'.', - 'F':'..-.', 'G':'--.', 'H':'....', - 'I':'..', 'J':'.---', 'K':'-.-', - 'L':'.-..', 'M':'--', 'N':'-.', - 'O':'---', 'P':'.--.', 'Q':'--.-', - 'R':'.-.', 'S':'...', 'T':'-', - 'U':'..-', 'V':'...-', 'W':'.--', - 'X':'-..-', 'Y':'-.--', 'Z':'--..', - '1':'.----', '2':'..---', '3':'...--', - '4':'....-', '5':'.....', '6':'-....', - '7':'--...', '8':'---..', '9':'----.', - '0':'-----', ', ':'--..--', '.':'.-.-.-', - '?':'..--..', '/':'-..-.', '-':'-....-', - '(':'-.--.', ')':'-.--.-'} +MORSE_CODE_DICT = {'A': '.-', 'B': '-...', + 'C': '-.-.', 'D': '-..', 'E': '.', + 'F': '..-.', 'G': '--.', 'H': '....', + 'I': '..', 'J': '.---', 'K': '-.-', + 'L': '.-..', 'M': '--', 'N': '-.', + 'O': '---', 'P': '.--.', 'Q': '--.-', + 'R': '.-.', 'S': '...', 'T': '-', + 'U': '..-', 'V': '...-', 'W': '.--', + 'X': '-..-', 'Y': '-.--', 'Z': '--..', + '1': '.----', '2': '..---', '3': '...--', + '4': '....-', '5': '.....', '6': '-....', + '7': '--...', '8': '---..', '9': '----.', + '0': '-----', ', ': '--..--', '.': '.-.-.-', + '?': '..--..', '/': '-..-.', '-': '-....-', + '(': '-.--.', ')': '-.--.-'} def encrypt(message): @@ -24,7 +24,6 @@ def encrypt(message): for letter in message: if letter != ' ': - cipher += MORSE_CODE_DICT[letter] + ' ' else: @@ -34,7 +33,6 @@ def encrypt(message): def decrypt(message): - message += ' ' decipher = '' @@ -43,26 +41,21 @@ def decrypt(message): if (letter != ' '): - i = 0 - citext += letter else: i += 1 - - if i == 2 : - + if i == 2: decipher += ' ' else: - decipher += list(MORSE_CODE_DICT.keys())[list(MORSE_CODE_DICT - .values()).index(citext)] + .values()).index(citext)] citext = '' return decipher @@ -71,11 +64,11 @@ def decrypt(message): def main(): message = "Morse code here" result = encrypt(message.upper()) - print (result) + print(result) message = result result = decrypt(message) - print (result) + print(result) if __name__ == '__main__': diff --git a/ciphers/onepad_cipher.py b/ciphers/onepad_cipher.py index 6afbd45249ec..6ee1ed18d44b 100644 --- a/ciphers/onepad_cipher.py +++ b/ciphers/onepad_cipher.py @@ -11,16 +11,16 @@ def encrypt(self, text): cipher = [] for i in plain: k = random.randint(1, 300) - c = (i+k)*k + c = (i + k) * k cipher.append(c) key.append(k) return cipher, key - + def decrypt(self, cipher, key): '''Function to decrypt text using psedo-random numbers.''' plain = [] for i in range(len(key)): - p = int((cipher[i]-(key[i])**2)/key[i]) + p = int((cipher[i] - (key[i]) ** 2) / key[i]) plain.append(chr(p)) plain = ''.join([i for i in plain]) return plain diff --git a/ciphers/playfair_cipher.py b/ciphers/playfair_cipher.py index 20449b161963..7fb9daeabb87 100644 --- a/ciphers/playfair_cipher.py +++ b/ciphers/playfair_cipher.py @@ -1,14 +1,14 @@ -import string import itertools +import string + def chunker(seq, size): it = iter(seq) while True: - chunk = tuple(itertools.islice(it, size)) - if not chunk: - return - yield chunk - + chunk = tuple(itertools.islice(it, size)) + if not chunk: + return + yield chunk def prepare_input(dirty): @@ -16,19 +16,19 @@ def prepare_input(dirty): Prepare the plaintext by up-casing it and separating repeated letters with X's """ - + dirty = ''.join([c.upper() for c in dirty if c in string.ascii_letters]) clean = "" - + if len(dirty) < 2: return dirty - for i in range(len(dirty)-1): + for i in range(len(dirty) - 1): clean += dirty[i] - - if dirty[i] == dirty[i+1]: + + if dirty[i] == dirty[i + 1]: clean += 'X' - + clean += dirty[-1] if len(clean) & 1: @@ -36,8 +36,8 @@ def prepare_input(dirty): return clean -def generate_table(key): +def generate_table(key): # I and J are used interchangeably to allow # us to use a 5x5 table (25 letters) alphabet = "ABCDEFGHIKLMNOPQRSTUVWXYZ" @@ -57,6 +57,7 @@ def generate_table(key): return table + def encode(plaintext, key): table = generate_table(key) plaintext = prepare_input(plaintext) @@ -68,14 +69,14 @@ def encode(plaintext, key): row2, col2 = divmod(table.index(char2), 5) if row1 == row2: - ciphertext += table[row1*5+(col1+1)%5] - ciphertext += table[row2*5+(col2+1)%5] + ciphertext += table[row1 * 5 + (col1 + 1) % 5] + ciphertext += table[row2 * 5 + (col2 + 1) % 5] elif col1 == col2: - ciphertext += table[((row1+1)%5)*5+col1] - ciphertext += table[((row2+1)%5)*5+col2] - else: # rectangle - ciphertext += table[row1*5+col2] - ciphertext += table[row2*5+col1] + ciphertext += table[((row1 + 1) % 5) * 5 + col1] + ciphertext += table[((row2 + 1) % 5) * 5 + col2] + else: # rectangle + ciphertext += table[row1 * 5 + col2] + ciphertext += table[row2 * 5 + col1] return ciphertext @@ -90,13 +91,13 @@ def decode(ciphertext, key): row2, col2 = divmod(table.index(char2), 5) if row1 == row2: - plaintext += table[row1*5+(col1-1)%5] - plaintext += table[row2*5+(col2-1)%5] + plaintext += table[row1 * 5 + (col1 - 1) % 5] + plaintext += table[row2 * 5 + (col2 - 1) % 5] elif col1 == col2: - plaintext += table[((row1-1)%5)*5+col1] - plaintext += table[((row2-1)%5)*5+col2] - else: # rectangle - plaintext += table[row1*5+col2] - plaintext += table[row2*5+col1] + plaintext += table[((row1 - 1) % 5) * 5 + col1] + plaintext += table[((row2 - 1) % 5) * 5 + col2] + else: # rectangle + plaintext += table[row1 * 5 + col2] + plaintext += table[row2 * 5 + col1] return plaintext diff --git a/ciphers/prehistoric_men.txt b/ciphers/prehistoric_men.txt index 86c4de821bfc..77e7062ea0dc 100644 --- a/ciphers/prehistoric_men.txt +++ b/ciphers/prehistoric_men.txt @@ -40,8 +40,8 @@ Transcriber's note: version referred to above. One example of this might occur in the second paragraph under "Choppers and Adze-like Tools", page 46, which contains the phrase - an adze cutting edge is ? shaped. The symbol before - shaped looks like a sharply-italicized sans-serif L. + �an adze cutting edge is ? shaped�. The symbol before + �shaped� looks like a sharply-italicized sans-serif �L�. Devices that cannot display that symbol may substitute a question mark, a square, or other symbol. @@ -98,7 +98,7 @@ forced or pedantic; at least I have done my very best to tell the story simply and clearly. Many friends have aided in the preparation of the book. The whimsical -charm of Miss Susan Richerts illustrations add enormously to the +charm of Miss Susan Richert�s illustrations add enormously to the spirit I wanted. She gave freely of her own time on the drawings and in planning the book with me. My colleagues at the University of Chicago, especially Professor Wilton M. Krogman (now of the University @@ -108,7 +108,7 @@ the Department of Anthropology, gave me counsel in matters bearing on their special fields, and the Department of Anthropology bore some of the expense of the illustrations. From Mrs. Irma Hunter and Mr. Arnold Maremont, who are not archeologists at all and have only an intelligent -laymans notion of archeology, I had sound advice on how best to tell +layman�s notion of archeology, I had sound advice on how best to tell the story. I am deeply indebted to all these friends. While I was preparing the second edition, I had the great fortune @@ -117,13 +117,13 @@ Washburn, now of the Department of Anthropology of the University of California, and the fourth, fifth, and sixth chapters with Professor Hallum L. Movius, Jr., of the Peabody Museum, Harvard University. The book has gained greatly in accuracy thereby. In matters of dating, -Professor Movius and the indications of Professor W. F. Libbys Carbon +Professor Movius and the indications of Professor W. F. Libby�s Carbon 14 chronology project have both encouraged me to choose the lowest dates now current for the events of the Pleistocene Ice Age. There is still no certain way of fixing a direct chronology for most of the -Pleistocene, but Professor Libbys method appears very promising for +Pleistocene, but Professor Libby�s method appears very promising for its end range and for proto-historic dates. In any case, this book -names periods, and new dates may be written in against mine, if new +names �periods,� and new dates may be written in against mine, if new and better dating systems appear. I wish to thank Dr. Clifford C. Gregg, Director of Chicago Natural @@ -150,7 +150,7 @@ Clark Howell of the Department of Anthropology of the University of Chicago in reworking the earlier chapters, and he was very patient in the matter, which I sincerely appreciate. -All of Mrs. Susan Richert Allens original drawings appear, but a few +All of Mrs. Susan Richert Allen�s original drawings appear, but a few necessary corrections have been made in some of the charts and some new drawings have been added by Mr. John Pfiffner, Staff Artist, Chicago Natural History Museum. @@ -200,7 +200,7 @@ HOW WE LEARN about Prehistoric Men Prehistory means the time before written history began. Actually, more -than 99 per cent of mans story is prehistory. Man is at least half a +than 99 per cent of man�s story is prehistory. Man is at least half a million years old, but he did not begin to write history (or to write anything) until about 5,000 years ago. @@ -216,7 +216,7 @@ The scientists who study the bones and teeth and any other parts they find of the bodies of prehistoric men, are called _physical anthropologists_. Physical anthropologists are trained, much like doctors, to know all about the human body. They study living people, -too; they know more about the biological facts of human races than +too; they know more about the biological facts of human �races� than anybody else. If the police find a badly decayed body in a trunk, they ask a physical anthropologist to tell them what the person originally looked like. The physical anthropologists who specialize in @@ -228,14 +228,14 @@ ARCHEOLOGISTS There is a kind of scientist who studies the things that prehistoric men made and did. Such a scientist is called an _archeologist_. It is -the archeologists business to look for the stone and metal tools, the +the archeologist�s business to look for the stone and metal tools, the pottery, the graves, and the caves or huts of the men who lived before history began. But there is more to archeology than just looking for things. In -Professor V. Gordon Childes words, archeology furnishes a sort of +Professor V. Gordon Childe�s words, archeology �furnishes a sort of history of human activity, provided always that the actions have -produced concrete results and left recognizable material traces. You +produced concrete results and left recognizable material traces.� You will see that there are at least three points in what Childe says: 1. The archeologists have to find the traces of things left behind by @@ -245,7 +245,7 @@ will see that there are at least three points in what Childe says: too soft or too breakable to last through the years. However, 3. The archeologist must use whatever he can find to tell a story--to - make a sort of history--from the objects and living-places and + make a �sort of history�--from the objects and living-places and graves that have escaped destruction. What I mean is this: Let us say you are walking through a dump yard, @@ -253,8 +253,8 @@ and you find a rusty old spark plug. If you want to think about what the spark plug means, you quickly remember that it is a part of an automobile motor. This tells you something about the man who threw the spark plug on the dump. He either had an automobile, or he knew -or lived near someone who did. He cant have lived so very long ago, -youll remember, because spark plugs and automobiles are only about +or lived near someone who did. He can�t have lived so very long ago, +you�ll remember, because spark plugs and automobiles are only about sixty years old. When you think about the old spark plug in this way you have @@ -264,8 +264,8 @@ It is the same way with the man-made things we archeologists find and put in museums. Usually, only a few of these objects are pretty to look at; but each of them has some sort of story to tell. Making the interpretation of his finds is the most important part of the -archeologists job. It is the way he gets at the sort of history of -human activity which is expected of archeology. +archeologist�s job. It is the way he gets at the �sort of history of +human activity� which is expected of archeology. SOME OTHER SCIENTISTS @@ -274,7 +274,7 @@ There are many other scientists who help the archeologist and the physical anthropologist find out about prehistoric men. The geologists help us tell the age of the rocks or caves or gravel beds in which human bones or man-made objects are found. There are other scientists -with names which all begin with paleo (the Greek word for old). The +with names which all begin with �paleo� (the Greek word for �old�). The _paleontologists_ study fossil animals. There are also, for example, such scientists as _paleobotanists_ and _paleoclimatologists_, who study ancient plants and climates. These scientists help us to know @@ -306,20 +306,20 @@ systems. The rate of disappearance of radioactivity as time passes.[1]] [1] It is important that the limitations of the radioactive carbon - dating system be held in mind. As the statistics involved in + �dating� system be held in mind. As the statistics involved in the system are used, there are two chances in three that the - date of the sample falls within the range given as plus or - minus an added number of years. For example, the date for the - Jarmo village (see chart), given as 6750 200 B.C., really + �date� of the sample falls within the range given as plus or + minus an added number of years. For example, the �date� for the + Jarmo village (see chart), given as 6750 � 200 B.C., really means that there are only two chances in three that the real date of the charcoal sampled fell between 6950 and 6550 B.C. We have also begun to suspect that there are ways in which the - samples themselves may have become contaminated, either on + samples themselves may have become �contaminated,� either on the early or on the late side. We now tend to be suspicious of single radioactive carbon determinations, or of determinations from one site alone. But as a fabric of consistent determinations for several or more sites of one archeological - period, we gain confidence in the dates. + period, we gain confidence in the �dates.� HOW THE SCIENTISTS FIND OUT @@ -330,9 +330,9 @@ about prehistoric men. We also need a word about _how_ they find out. All our finds came by accident until about a hundred years ago. Men digging wells, or digging in caves for fertilizer, often turned up ancient swords or pots or stone arrowheads. People also found some odd -pieces of stone that didnt look like natural forms, but they also -didnt look like any known tool. As a result, the people who found them -gave them queer names; for example, thunderbolts. The people thought +pieces of stone that didn�t look like natural forms, but they also +didn�t look like any known tool. As a result, the people who found them +gave them queer names; for example, �thunderbolts.� The people thought the strange stones came to earth as bolts of lightning. We know now that these strange stones were prehistoric stone tools. @@ -349,7 +349,7 @@ story of cave men on Mount Carmel, in Palestine, began to be known. Planned archeological digging is only about a century old. Even before this, however, a few men realized the significance of objects they dug from the ground; one of these early archeologists was our own Thomas -Jefferson. The first real mound-digger was a German grocers clerk, +Jefferson. The first real mound-digger was a German grocer�s clerk, Heinrich Schliemann. Schliemann made a fortune as a merchant, first in Europe and then in the California gold-rush of 1849. He became an American citizen. Then he retired and had both money and time to test @@ -389,16 +389,16 @@ used had been a soft, unbaked mud-brick, and most of the debris consisted of fallen or rain-melted mud from these mud-bricks. This idea of _stratification_, like the cake layers, was already a -familiar one to the geologists by Schliemanns time. They could show +familiar one to the geologists by Schliemann�s time. They could show that their lowest layer of rock was oldest or earliest, and that the -overlying layers became more recent as one moved upward. Schliemanns +overlying layers became more recent as one moved upward. Schliemann�s digging proved the same thing at Troy. His first (lowest and earliest) city had at least nine layers above it; he thought that the second -layer contained the remains of Homers Troy. We now know that Homeric +layer contained the remains of Homer�s Troy. We now know that Homeric Troy was layer VIIa from the bottom; also, we count eleven layers or sub-layers in total. -Schliemanns work marks the beginnings of modern archeology. Scholars +Schliemann�s work marks the beginnings of modern archeology. Scholars soon set out to dig on ancient sites, from Egypt to Central America. @@ -410,21 +410,21 @@ Archeologists began to get ideas as to the kinds of objects that belonged together. If you compared a mail-order catalogue of 1890 with one of today, you would see a lot of differences. If you really studied the two catalogues hard, you would also begin to see that certain -objects go together. Horseshoes and metal buggy tires and pieces of +objects �go together.� Horseshoes and metal buggy tires and pieces of harness would begin to fit into a picture with certain kinds of coal stoves and furniture and china dishes and kerosene lamps. Our friend the spark plug, and radios and electric refrigerators and light bulbs would fit into a picture with different kinds of furniture and dishes -and tools. You wont be old enough to remember the kind of hats that -women wore in 1890, but youve probably seen pictures of them, and you -know very well they couldnt be worn with the fashions of today. +and tools. You won�t be old enough to remember the kind of hats that +women wore in 1890, but you�ve probably seen pictures of them, and you +know very well they couldn�t be worn with the fashions of today. This is one of the ways that archeologists study their materials. The various tools and weapons and jewelry, the pottery, the kinds of houses, and even the ways of burying the dead tend to fit into pictures. Some archeologists call all of the things that go together to make such a picture an _assemblage_. The assemblage of the first layer -of Schliemanns Troy was as different from that of the seventh layer as +of Schliemann�s Troy was as different from that of the seventh layer as our 1900 mail-order catalogue is from the one of today. The archeologists who came after Schliemann began to notice other @@ -433,23 +433,23 @@ idea that people will buy better mousetraps goes back into very ancient times. Today, if we make good automobiles or radios, we can sell some of them in Turkey or even in Timbuktu. This means that a few present-day types of American automobiles and radios form part -of present-day assemblages in both Turkey and Timbuktu. The total -present-day assemblage of Turkey is quite different from that of +of present-day �assemblages� in both Turkey and Timbuktu. The total +present-day �assemblage� of Turkey is quite different from that of Timbuktu or that of America, but they have at least some automobiles and some radios in common. Now these automobiles and radios will eventually wear out. Let us suppose we could go to some remote part of Turkey or to Timbuktu in a -dream. We dont know what the date is, in our dream, but we see all +dream. We don�t know what the date is, in our dream, but we see all sorts of strange things and ways of living in both places. Nobody tells us what the date is. But suddenly we see a 1936 Ford; so we know that in our dream it has to be at least the year 1936, and only as many years after that as we could reasonably expect a Ford to keep -in running order. The Ford would probably break down in twenty years -time, so the Turkish or Timbuktu assemblage were seeing in our dream +in running order. The Ford would probably break down in twenty years� +time, so the Turkish or Timbuktu �assemblage� we�re seeing in our dream has to date at about A.D. 1936-56. -Archeologists not only date their ancient materials in this way; they +Archeologists not only �date� their ancient materials in this way; they also see over what distances and between which peoples trading was done. It turns out that there was a good deal of trading in ancient times, probably all on a barter and exchange basis. @@ -480,13 +480,13 @@ site. They find the remains of everything that would last through time, in several different layers. They know that the assemblage in the bottom layer was laid down earlier than the assemblage in the next layer above, and so on up to the topmost layer, which is the latest. -They look at the results of other digs and find that some other +They look at the results of other �digs� and find that some other archeologist 900 miles away has found ax-heads in his lowest layer, exactly like the ax-heads of their fifth layer. This means that their fifth layer must have been lived in at about the same time as was the first layer in the site 200 miles away. It also may mean that the people who lived in the two layers knew and traded with each other. Or -it could mean that they didnt necessarily know each other, but simply +it could mean that they didn�t necessarily know each other, but simply that both traded with a third group at about the same time. You can see that the more we dig and find, the more clearly the main @@ -501,8 +501,8 @@ those of domesticated animals, for instance, sheep or cattle, and therefore the people must have kept herds. More important than anything else--as our structure grows more -complicated and our materials increase--is the fact that a sort -of history of human activity does begin to appear. The habits or +complicated and our materials increase--is the fact that �a sort +of history of human activity� does begin to appear. The habits or traditions that men formed in the making of their tools and in the ways they did things, begin to stand out for us. How characteristic were these habits and traditions? What areas did they spread over? @@ -519,7 +519,7 @@ method--chemical tests of the bones--that will enable them to discover what the blood-type may have been. One thing is sure. We have never found a group of skeletons so absolutely similar among themselves--so cast from a single mould, so to speak--that we could claim to have a -pure race. I am sure we never shall. +�pure� race. I am sure we never shall. We become particularly interested in any signs of change--when new materials and tool types and ways of doing things replace old ones. We @@ -527,7 +527,7 @@ watch for signs of social change and progress in one way or another. We must do all this without one word of written history to aid us. Everything we are concerned with goes back to the time _before_ men -learned to write. That is the prehistorians job--to find out what +learned to write. That is the prehistorian�s job--to find out what happened before history began. @@ -538,9 +538,9 @@ THE CHANGING WORLD in which Prehistoric Men Lived [Illustration] -Mankind, well say, is at least a half million years old. It is very +Mankind, we�ll say, is at least a half million years old. It is very hard to understand how long a time half a million years really is. -If we were to compare this whole length of time to one day, wed get +If we were to compare this whole length of time to one day, we�d get something like this: The present time is midnight, and Jesus was born just five minutes and thirty-six seconds ago. Earliest history began less than fifteen minutes ago. Everything before 11:45 was in @@ -569,7 +569,7 @@ book; it would mainly affect the dates earlier than 25,000 years ago. CHANGES IN ENVIRONMENT -The earth probably hasnt changed much in the last 5,000 years (250 +The earth probably hasn�t changed much in the last 5,000 years (250 generations). Men have built things on its surface and dug into it and drawn boundaries on maps of it, but the places where rivers, lakes, seas, and mountains now stand have changed very little. @@ -605,7 +605,7 @@ the glaciers covered most of Canada and the northern United States and reached down to southern England and France in Europe. Smaller ice sheets sat like caps on the Rockies, the Alps, and the Himalayas. The continental glaciation only happened north of the equator, however, so -remember that Ice Age is only half true. +remember that �Ice Age� is only half true. As you know, the amount of water on and about the earth does not vary. These large glaciers contained millions of tons of water frozen into @@ -677,9 +677,9 @@ their dead. At about the time when the last great glacier was finally melting away, men in the Near East made the first basic change in human economy. They began to plant grain, and they learned to raise and herd certain -animals. This meant that they could store food in granaries and on the -hoof against the bad times of the year. This first really basic change -in mans way of living has been called the food-producing revolution. +animals. This meant that they could store food in granaries and �on the +hoof� against the bad times of the year. This first really basic change +in man�s way of living has been called the �food-producing revolution.� By the time it happened, a modern kind of climate was beginning. Men had already grown to look as they do now. Know-how in ways of living had developed and progressed, slowly but surely, up to a point. It was @@ -698,25 +698,25 @@ Prehistoric Men THEMSELVES DO WE KNOW WHERE MAN ORIGINATED? -For a long time some scientists thought the cradle of mankind was in +For a long time some scientists thought the �cradle of mankind� was in central Asia. Other scientists insisted it was in Africa, and still -others said it might have been in Europe. Actually, we dont know -where it was. We dont even know that there was only _one_ cradle. -If we had to choose a cradle at this moment, we would probably say +others said it might have been in Europe. Actually, we don�t know +where it was. We don�t even know that there was only _one_ �cradle.� +If we had to choose a �cradle� at this moment, we would probably say Africa. But the southern portions of Asia and Europe may also have been included in the general area. The scene of the early development of -mankind was certainly the Old World. It is pretty certain men didnt +mankind was certainly the Old World. It is pretty certain men didn�t reach North or South America until almost the end of the Ice Age--had they done so earlier we would certainly have found some trace of them by now. The earliest tools we have yet found come from central and south -Africa. By the dating system Im using, these tools must be over +Africa. By the dating system I�m using, these tools must be over 500,000 years old. There are now reports that a few such early tools have been found--at the Sterkfontein cave in South Africa--along with -the bones of small fossil men called australopithecines. +the bones of small fossil men called �australopithecines.� -Not all scientists would agree that the australopithecines were men, +Not all scientists would agree that the australopithecines were �men,� or would agree that the tools were made by the australopithecines themselves. For these sticklers, the earliest bones of men come from the island of Java. The date would be about 450,000 years ago. So far, @@ -727,12 +727,12 @@ Let me say it another way. How old are the earliest traces of men we now have? Over half a million years. This was a time when the first alpine glaciation was happening in the north. What has been found so far? The tools which the men of those times made, in different parts -of Africa. It is now fairly generally agreed that the men who made -the tools were the australopithecines. There is also a more man-like +of Africa. It is now fairly generally agreed that the �men� who made +the tools were the australopithecines. There is also a more �man-like� jawbone at Kanam in Kenya, but its find-spot has been questioned. The next earliest bones we have were found in Java, and they may be almost a hundred thousand years younger than the earliest African finds. We -havent yet found the tools of these early Javanese. Our knowledge of +haven�t yet found the tools of these early Javanese. Our knowledge of tool-using in Africa spreads quickly as time goes on: soon after the appearance of tools in the south we shall have them from as far north as Algeria. @@ -758,30 +758,30 @@ prove it. MEN AND APES Many people used to get extremely upset at the ill-formed notion -that man descended from the apes. Such words were much more likely -to start fights or monkey trials than the correct notion that all +that �man descended from the apes.� Such words were much more likely +to start fights or �monkey trials� than the correct notion that all living animals, including man, ascended or evolved from a single-celled organism which lived in the primeval seas hundreds of millions of years -ago. Men are mammals, of the order called Primates, and mans living -relatives are the great apes. Men didnt descend from the apes or +ago. Men are mammals, of the order called Primates, and man�s living +relatives are the great apes. Men didn�t �descend� from the apes or apes from men, and mankind must have had much closer relatives who have since become extinct. Men stand erect. They also walk and run on their two feet. Apes are happiest in trees, swinging with their arms from branch to branch. Few branches of trees will hold the mighty gorilla, although he still -manages to sleep in trees. Apes cant stand really erect in our sense, +manages to sleep in trees. Apes can�t stand really erect in our sense, and when they have to run on the ground, they use the knuckles of their hands as well as their feet. A key group of fossil bones here are the south African australopithecines. These are called the _Australopithecinae_ or -man-apes or sometimes even ape-men. We do not _know_ that they were +�man-apes� or sometimes even �ape-men.� We do not _know_ that they were directly ancestral to men but they can hardly have been so to apes. -Presently Ill describe them a bit more. The reason I mention them +Presently I�ll describe them a bit more. The reason I mention them here is that while they had brains no larger than those of apes, their hipbones were enough like ours so that they must have stood erect. -There is no good reason to think they couldnt have walked as we do. +There is no good reason to think they couldn�t have walked as we do. BRAINS, HANDS, AND TOOLS @@ -801,12 +801,12 @@ Nobody knows which of these three is most important, or which came first. Most probably the growth of all three things was very much blended together. If you think about each of the things, you will see what I mean. Unless your hand is more flexible than a paw, and your -thumb will work against (or oppose) your fingers, you cant hold a tool -very well. But you wouldnt get the idea of using a tool unless you had +thumb will work against (or oppose) your fingers, you can�t hold a tool +very well. But you wouldn�t get the idea of using a tool unless you had enough brain to help you see cause and effect. And it is rather hard to see how your hand and brain would develop unless they had something to -practice on--like using tools. In Professor Krogmans words, the hand -must become the obedient servant of the eye and the brain. It is the +practice on--like using tools. In Professor Krogman�s words, �the hand +must become the obedient servant of the eye and the brain.� It is the _co-ordination_ of these things that counts. Many other things must have been happening to the bodies of the @@ -820,17 +820,17 @@ little by little, all together. Men became men very slowly. WHEN SHALL WE CALL MEN MEN? -What do I mean when I say men? People who looked pretty much as we +What do I mean when I say �men�? People who looked pretty much as we do, and who used different tools to do different things, are men to me. -Well probably never know whether the earliest ones talked or not. They +We�ll probably never know whether the earliest ones talked or not. They probably had vocal cords, so they could make sounds, but did they know how to make sounds work as symbols to carry meanings? But if the fossil -bones look like our skeletons, and if we find tools which well agree -couldnt have been made by nature or by animals, then Id say we had +bones look like our skeletons, and if we find tools which we�ll agree +couldn�t have been made by nature or by animals, then I�d say we had traces of _men_. The australopithecine finds of the Transvaal and Bechuanaland, in -south Africa, are bound to come into the discussion here. Ive already +south Africa, are bound to come into the discussion here. I�ve already told you that the australopithecines could have stood upright and walked on their two hind legs. They come from the very base of the Pleistocene or Ice Age, and a few coarse stone tools have been found @@ -848,17 +848,17 @@ bones. The doubt as to whether the australopithecines used the tools themselves goes like this--just suppose some man-like creature (whose bones we have not yet found) made the tools and used them to kill and butcher australopithecines. Hence a few experts tend to let -australopithecines still hang in limbo as man-apes. +australopithecines still hang in limbo as �man-apes.� THE EARLIEST MEN WE KNOW -Ill postpone talking about the tools of early men until the next +I�ll postpone talking about the tools of early men until the next chapter. The men whose bones were the earliest of the Java lot have been given the name _Meganthropus_. The bones are very fragmentary. We would not understand them very well unless we had the somewhat later -Javanese lot--the more commonly known _Pithecanthropus_ or Java -man--against which to refer them for study. One of the less well-known +Javanese lot--the more commonly known _Pithecanthropus_ or �Java +man�--against which to refer them for study. One of the less well-known and earliest fragments, a piece of lower jaw and some teeth, rather strongly resembles the lower jaws and teeth of the australopithecine type. Was _Meganthropus_ a sort of half-way point between the @@ -872,7 +872,7 @@ finds of Java man were made in 1891-92 by Dr. Eugene Dubois, a Dutch doctor in the colonial service. Finds have continued to be made. There are now bones enough to account for four skulls. There are also four jaws and some odd teeth and thigh bones. Java man, generally speaking, -was about five feet six inches tall, and didnt hold his head very +was about five feet six inches tall, and didn�t hold his head very erect. His skull was very thick and heavy and had room for little more than two-thirds as large a brain as we have. He had big teeth and a big jaw and enormous eyebrow ridges. @@ -885,22 +885,22 @@ belonged to his near descendants. Remember that there are several varieties of men in the whole early Java lot, at least two of which are earlier than the _Pithecanthropus_, -Java man. Some of the earlier ones seem to have gone in for +�Java man.� Some of the earlier ones seem to have gone in for bigness, in tooth-size at least. _Meganthropus_ is one of these earlier varieties. As we said, he _may_ turn out to be a link to the australopithecines, who _may_ or _may not_ be ancestral to men. _Meganthropus_ is best understandable in terms of _Pithecanthropus_, who appeared later in the same general area. _Pithecanthropus_ is pretty well understandable from the bones he left us, and also because -of his strong resemblance to the fully tool-using cave-dwelling Peking -man, _Sinanthropus_, about whom we shall talk next. But you can see +of his strong resemblance to the fully tool-using cave-dwelling �Peking +man,� _Sinanthropus_, about whom we shall talk next. But you can see that the physical anthropologists and prehistoric archeologists still have a lot of work to do on the problem of earliest men. PEKING MEN AND SOME EARLY WESTERNERS -The earliest known Chinese are called _Sinanthropus_, or Peking man, +The earliest known Chinese are called _Sinanthropus_, or �Peking man,� because the finds were made near that city. In World War II, the United States Marine guard at our Embassy in Peking tried to help get the bones out of the city before the Japanese attack. Nobody knows where @@ -913,9 +913,9 @@ casts of the bones. Peking man lived in a cave in a limestone hill, made tools, cracked animal bones to get the marrow out, and used fire. Incidentally, the bones of Peking man were found because Chinese dig for what they call -dragon bones and dragon teeth. Uneducated Chinese buy these things +�dragon bones� and �dragon teeth.� Uneducated Chinese buy these things in their drug stores and grind them into powder for medicine. The -dragon teeth and bones are really fossils of ancient animals, and +�dragon teeth� and �bones� are really fossils of ancient animals, and sometimes of men. The people who supply the drug stores have learned where to dig for strange bones and teeth. Paleontologists who get to China go to the drug stores to buy fossils. In a roundabout way, this @@ -924,7 +924,7 @@ is how the fallen-in cave of Peking man at Choukoutien was discovered. Peking man was not quite as tall as Java man but he probably stood straighter. His skull looked very much like that of the Java skull except that it had room for a slightly larger brain. His face was less -brutish than was Java mans face, but this isnt saying much. +brutish than was Java man�s face, but this isn�t saying much. Peking man dates from early in the interglacial period following the second alpine glaciation. He probably lived close to 350,000 years @@ -946,9 +946,9 @@ big ridges over the eyes. The more fragmentary skull from Swanscombe in England (p. 11) has been much more carefully studied. Only the top and back of that skull have been found. Since the skull rounds up nicely, it has been assumed that the face and forehead must have been quite -modern. Careful comparison with Steinheim shows that this was not +�modern.� Careful comparison with Steinheim shows that this was not necessarily so. This is important because it bears on the question of -how early truly modern man appeared. +how early truly �modern� man appeared. Recently two fragmentary jaws were found at Ternafine in Algeria, northwest Africa. They look like the jaws of Peking man. Tools were @@ -971,22 +971,22 @@ modern Australian natives. During parts of the Ice Age there was a land bridge all the way from Java to Australia. -TWO ENGLISHMEN WHO WERENT OLD +TWO ENGLISHMEN WHO WEREN�T OLD The older textbooks contain descriptions of two English finds which were thought to be very old. These were called Piltdown (_Eoanthropus dawsoni_) and Galley Hill. The skulls were very modern in appearance. In 1948-49, British scientists began making chemical tests which proved that neither of these finds is very old. It is now known that both -Piltdown man and the tools which were said to have been found with +�Piltdown man� and the tools which were said to have been found with him were part of an elaborate fake! -TYPICAL CAVE MEN +TYPICAL �CAVE MEN� The next men we have to talk about are all members of a related group. -These are the Neanderthal group. Neanderthal man himself was found in -the Neander Valley, near Dsseldorf, Germany, in 1856. He was the first +These are the Neanderthal group. �Neanderthal man� himself was found in +the Neander Valley, near D�sseldorf, Germany, in 1856. He was the first human fossil to be recognized as such. [Illustration: PRINCIPAL KNOWN TYPES OF FOSSIL MEN @@ -999,7 +999,7 @@ human fossil to be recognized as such. PITHECANTHROPUS] Some of us think that the neanderthaloids proper are only those people -of western Europe who didnt get out before the beginning of the last +of western Europe who didn�t get out before the beginning of the last great glaciation, and who found themselves hemmed in by the glaciers in the Alps and northern Europe. Being hemmed in, they intermarried a bit too much and developed into a special type. Professor F. Clark @@ -1010,7 +1010,7 @@ pre-neanderthaloids. There are traces of these pre-neanderthaloids pretty much throughout Europe during the third interglacial period--say 100,000 years ago. The pre-neanderthaloids are represented by such finds as the ones at Ehringsdorf in Germany and Saccopastore in Italy. -I wont describe them for you, since they are simply less extreme than +I won�t describe them for you, since they are simply less extreme than the neanderthaloids proper--about half way between Steinheim and the classic Neanderthal people. @@ -1019,24 +1019,24 @@ get caught in the pocket of the southwest corner of Europe at the onset of the last great glaciation became the classic Neanderthalers. Out in the Near East, Howell thinks, it is possible to see traces of people evolving from the pre-neanderthaloid type toward that of fully modern -man. Certainly, we dont see such extreme cases of neanderthaloidism +man. Certainly, we don�t see such extreme cases of �neanderthaloidism� outside of western Europe. There are at least a dozen good examples in the main or classic Neanderthal group in Europe. They date to just before and in the earlier part of the last great glaciation (85,000 to 40,000 years ago). -Many of the finds have been made in caves. The cave men the movies +Many of the finds have been made in caves. The �cave men� the movies and the cartoonists show you are probably meant to be Neanderthalers. -Im not at all sure they dragged their women by the hair; the women +I�m not at all sure they dragged their women by the hair; the women were probably pretty tough, too! Neanderthal men had large bony heads, but plenty of room for brains. Some had brain cases even larger than the average for modern man. Their faces were heavy, and they had eyebrow ridges of bone, but the ridges were not as big as those of Java man. Their foreheads were very low, -and they didnt have much chin. They were about five feet three inches -tall, but were heavy and barrel-chested. But the Neanderthalers didnt -slouch as much as theyve been blamed for, either. +and they didn�t have much chin. They were about five feet three inches +tall, but were heavy and barrel-chested. But the Neanderthalers didn�t +slouch as much as they�ve been blamed for, either. One important thing about the Neanderthal group is that there is a fair number of them to study. Just as important is the fact that we know @@ -1059,10 +1059,10 @@ different-looking people. EARLY MODERN MEN -How early is modern man (_Homo sapiens_), the wise man? Some people +How early is modern man (_Homo sapiens_), the �wise man�? Some people have thought that he was very early, a few still think so. Piltdown and Galley Hill, which were quite modern in anatomical appearance and -_supposedly_ very early in date, were the best evidence for very +_supposedly_ very early in date, were the best �evidence� for very early modern men. Now that Piltdown has been liquidated and Galley Hill is known to be very late, what is left of the idea? @@ -1073,13 +1073,13 @@ the Ternafine jaws, you might come to the conclusion that the crown of the Swanscombe head was that of a modern-like man. Two more skulls, again without faces, are available from a French -cave site, Fontchevade. They come from the time of the last great +cave site, Font�chevade. They come from the time of the last great interglacial, as did the pre-neanderthaloids. The crowns of the -Fontchevade skulls also look quite modern. There is a bit of the +Font�chevade skulls also look quite modern. There is a bit of the forehead preserved on one of these skulls and the brow-ridge is not heavy. Nevertheless, there is a suggestion that the bones belonged to an immature individual. In this case, his (or even more so, if _her_) -brow-ridges would have been weak anyway. The case for the Fontchevade +brow-ridges would have been weak anyway. The case for the Font�chevade fossils, as modern type men, is little stronger than that for Swanscombe, although Professor Vallois believes it a good case. @@ -1101,8 +1101,8 @@ of the onset of colder weather, when the last glaciation was beginning in the north--say 75,000 years ago. The 70 per cent modern group came from only one cave, Mugharet es-Skhul -(cave of the kids). The other group, from several caves, had bones of -men of the type weve been calling pre-neanderthaloid which we noted +(�cave of the kids�). The other group, from several caves, had bones of +men of the type we�ve been calling pre-neanderthaloid which we noted were widespread in Europe and beyond. The tools which came with each of these finds were generally similar, and McCown and Keith, and other scholars since their study, have tended to assume that both the Skhul @@ -1131,26 +1131,26 @@ important fossil men of later Europe are shown in the chart on page DIFFERENCES IN THE EARLY MODERNS The main early European moderns have been divided into two groups, the -Cro-Magnon group and the Combe Capelle-Brnn group. Cro-Magnon people +Cro-Magnon group and the Combe Capelle-Br�nn group. Cro-Magnon people were tall and big-boned, with large, long, and rugged heads. They must have been built like many present-day Scandinavians. The Combe -Capelle-Brnn people were shorter; they had narrow heads and faces, and -big eyebrow-ridges. Of course we dont find the skin or hair of these -people. But there is little doubt they were Caucasoids (Whites). +Capelle-Br�nn people were shorter; they had narrow heads and faces, and +big eyebrow-ridges. Of course we don�t find the skin or hair of these +people. But there is little doubt they were Caucasoids (�Whites�). Another important find came in the Italian Riviera, near Monte Carlo. Here, in a cave near Grimaldi, there was a grave containing a woman and a young boy, buried together. The two skeletons were first called -Negroid because some features of their bones were thought to resemble +�Negroid� because some features of their bones were thought to resemble certain features of modern African Negro bones. But more recently, Professor E. A. Hooton and other experts questioned the use of the word -Negroid in describing the Grimaldi skeletons. It is true that nothing +�Negroid� in describing the Grimaldi skeletons. It is true that nothing is known of the skin color, hair form, or any other fleshy feature of -the Grimaldi people, so that the word Negroid in its usual meaning is +the Grimaldi people, so that the word �Negroid� in its usual meaning is not proper here. It is also not clear whether the features of the bones -claimed to be Negroid are really so at all. +claimed to be �Negroid� are really so at all. -From a place called Wadjak, in Java, we have proto-Australoid skulls +From a place called Wadjak, in Java, we have �proto-Australoid� skulls which closely resemble those of modern Australian natives. Some of the skulls found in South Africa, especially the Boskop skull, look like those of modern Bushmen, but are much bigger. The ancestors of @@ -1159,12 +1159,12 @@ Desert. True African Negroes were forest people who apparently expanded out of the west central African area only in the last several thousand years. Although dark in skin color, neither the Australians nor the Bushmen are Negroes; neither the Wadjak nor the Boskop skulls are -Negroid. +�Negroid.� -As weve already mentioned, Professor Weidenreich believed that Peking +As we�ve already mentioned, Professor Weidenreich believed that Peking man was already on the way to becoming a Mongoloid. Anyway, the -Mongoloids would seem to have been present by the time of the Upper -Cave at Choukoutien, the _Sinanthropus_ find-spot. +Mongoloids would seem to have been present by the time of the �Upper +Cave� at Choukoutien, the _Sinanthropus_ find-spot. WHAT THE DIFFERENCES MEAN @@ -1175,14 +1175,14 @@ From area to area, men tended to look somewhat different, just as they do today. This is all quite natural. People _tended_ to mate near home; in the anthropological jargon, they made up geographically localized breeding populations. The simple continental division of -stocks--black = Africa, yellow = Asia, white = Europe--is too simple +�stocks�--black = Africa, yellow = Asia, white = Europe--is too simple a picture to fit the facts. People became accustomed to life in some -particular area within a continent (we might call it a natural area). +particular area within a continent (we might call it a �natural area�). As they went on living there, they evolved towards some particular physical variety. It would, of course, have been difficult to draw a clear boundary between two adjacent areas. There must always have been some mating across the boundaries in every case. One thing human -beings dont do, and never have done, is to mate for purity. It is +beings don�t do, and never have done, is to mate for �purity.� It is self-righteous nonsense when we try to kid ourselves into thinking that they do. @@ -1195,28 +1195,28 @@ and they must do the writing about races. I shall, however, give two modern definitions of race, and then make one comment. Dr. William G. Boyd, professor of Immunochemistry, School of - Medicine, Boston University: We may define a human race as a + Medicine, Boston University: �We may define a human race as a population which differs significantly from other human populations in regard to the frequency of one or more of the genes it - possesses. + possesses.� Professor Sherwood L. Washburn, professor of Physical Anthropology, - Department of Anthropology, the University of California: A race + Department of Anthropology, the University of California: �A �race� is a group of genetically similar populations, and races intergrade - because there are always intermediate populations. + because there are always intermediate populations.� My comment is that the ideas involved here are all biological: they concern groups, _not_ individuals. Boyd and Washburn may differ a bit -on what they want to consider a population, but a population is a +on what they want to consider a �population,� but a population is a group nevertheless, and genetics is biology to the hilt. Now a lot of people still think of race in terms of how people dress or fix their food or of other habits or customs they have. The next step is to talk -about racial purity. None of this has anything whatever to do with +about racial �purity.� None of this has anything whatever to do with race proper, which is a matter of the biology of groups. -Incidentally, Im told that if man very carefully _controls_ +Incidentally, I�m told that if man very carefully _controls_ the breeding of certain animals over generations--dogs, cattle, -chickens--he might achieve a pure race of animals. But he doesnt do +chickens--he might achieve a �pure� race of animals. But he doesn�t do it. Some unfortunate genetic trait soon turns up, so this has just as carefully to be bred out again, and so on. @@ -1240,20 +1240,20 @@ date to the second great interglacial period, about 350,000 years ago. Piltdown and Galley Hill are out, and with them, much of the starch in the old idea that there were two distinct lines of development -in human evolution: (1) a line of paleoanthropic development from +in human evolution: (1) a line of �paleoanthropic� development from Heidelberg to the Neanderthalers where it became extinct, and (2) a -very early modern line, through Piltdown, Galley Hill, Swanscombe, to +very early �modern� line, through Piltdown, Galley Hill, Swanscombe, to us. Swanscombe, Steinheim, and Ternafine are just as easily cases of very early pre-neanderthaloids. The pre-neanderthaloids were very widespread during the third interglacial: Ehringsdorf, Saccopastore, some of the Mount Carmel -people, and probably Fontchevade are cases in point. A variety of +people, and probably Font�chevade are cases in point. A variety of their descendants can be seen, from Java (Solo), Africa (Rhodesian man), and about the Mediterranean and in western Europe. As the acute cold of the last glaciation set in, the western Europeans found themselves surrounded by water, ice, or bitter cold tundra. To vastly -over-simplify it, they bred in and became classic neanderthaloids. +over-simplify it, they �bred in� and became classic neanderthaloids. But on Mount Carmel, the Skhul cave-find with its 70 per cent modern features shows what could happen elsewhere at the same time. @@ -1263,12 +1263,12 @@ modern skeletons of men. The modern skeletons differ from place to place, just as different groups of men living in different places still look different. -What became of the Neanderthalers? Nobody can tell me for sure. Ive a -hunch they were simply bred out again when the cold weather was over. +What became of the Neanderthalers? Nobody can tell me for sure. I�ve a +hunch they were simply �bred out� again when the cold weather was over. Many Americans, as the years go by, are no longer ashamed to claim they -have Indian blood in their veins. Give us a few more generations +have �Indian blood in their veins.� Give us a few more generations and there will not be very many other Americans left to whom we can -brag about it. It certainly isnt inconceivable to me to imagine a +brag about it. It certainly isn�t inconceivable to me to imagine a little Cro-Magnon boy bragging to his friends about his tough, strong, Neanderthaler great-great-great-great-grandfather! @@ -1281,15 +1281,15 @@ Cultural BEGINNINGS Men, unlike the lower animals, are made up of much more than flesh and -blood and bones; for men have culture. +blood and bones; for men have �culture.� WHAT IS CULTURE? -Culture is a word with many meanings. The doctors speak of making a -culture of a certain kind of bacteria, and ants are said to have a -culture. Then there is the Emily Post kind of culture--you say a -person is cultured, or that he isnt, depending on such things as +�Culture� is a word with many meanings. The doctors speak of making a +�culture� of a certain kind of bacteria, and ants are said to have a +�culture.� Then there is the Emily Post kind of �culture�--you say a +person is �cultured,� or that he isn�t, depending on such things as whether or not he eats peas with his knife. The anthropologists use the word too, and argue heatedly over its finer @@ -1300,7 +1300,7 @@ men from another. In this sense, a CULTURE means the way the members of a group of people think and believe and live, the tools they make, and the way they do things. Professor Robert Redfield says a culture is an organized or formalized body of conventional understandings. -Conventional understandings means the whole set of rules, beliefs, +�Conventional understandings� means the whole set of rules, beliefs, and standards which a group of people lives by. These understandings show themselves in art, and in the other things a people may make and do. The understandings continue to last, through tradition, from one @@ -1325,12 +1325,12 @@ Egyptians. I mean their beliefs as to why grain grew, as well as their ability to make tools with which to reap the grain. I mean their beliefs about life after death. What I am thinking about as culture is a thing which lasted in time. If any one Egyptian, even the Pharaoh, -died, it didnt affect the Egyptian culture of that particular moment. +died, it didn�t affect the Egyptian culture of that particular moment. PREHISTORIC CULTURES -For that long period of mans history that is all prehistory, we have +For that long period of man�s history that is all prehistory, we have no written descriptions of cultures. We find only the tools men made, the places where they lived, the graves in which they buried their dead. Fortunately for us, these tools and living places and graves all @@ -1345,15 +1345,15 @@ of the classic European Neanderthal group of men, we have found few cave-dwelling places of very early prehistoric men. First, there is the fallen-in cave where Peking man was found, near Peking. Then there are two or three other _early_, but not _very early_, possibilities. The -finds at the base of the French cave of Fontchevade, those in one of +finds at the base of the French cave of Font�chevade, those in one of the Makapan caves in South Africa, and several open sites such as Dr. -L. S. B. Leakeys Olorgesailie in Kenya doubtless all lie earlier than +L. S. B. Leakey�s Olorgesailie in Kenya doubtless all lie earlier than the time of the main European Neanderthal group, but none are so early as the Peking finds. You can see that we know very little about the home life of earlier prehistoric men. We find different kinds of early stone tools, but we -cant even be really sure which tools may have been used together. +can�t even be really sure which tools may have been used together. WHY LITTLE HAS LASTED FROM EARLY TIMES @@ -1380,11 +1380,11 @@ there first! The front of this enormous sheet of ice moved down over the country, crushing and breaking and plowing up everything, like a gigantic bulldozer. You can see what happened to our camp site. -Everything the glacier couldnt break, it pushed along in front of it +Everything the glacier couldn�t break, it pushed along in front of it or plowed beneath it. Rocks were ground to gravel, and soil was caught into the ice, which afterwards melted and ran off as muddy water. Hard -tools of flint sometimes remained whole. Human bones werent so hard; -its a wonder _any_ of them lasted. Gushing streams of melt water +tools of flint sometimes remained whole. Human bones weren�t so hard; +it�s a wonder _any_ of them lasted. Gushing streams of melt water flushed out the debris from underneath the glacier, and water flowed off the surface and through great crevasses. The hard materials these waters carried were even more rolled and ground up. Finally, such @@ -1407,26 +1407,26 @@ all up, and so we cannot say which particular sets of tools belonged together in the first place. -EOLITHS +�EOLITHS� But what sort of tools do we find earliest? For almost a century, people have been picking up odd bits of flint and other stone in the oldest Ice Age gravels in England and France. It is now thought these -odd bits of stone werent actually worked by prehistoric men. The -stones were given a name, _eoliths_, or dawn stones. You can see them +odd bits of stone weren�t actually worked by prehistoric men. The +stones were given a name, _eoliths_, or �dawn stones.� You can see them in many museums; but you can be pretty sure that very few of them were actually fashioned by men. -It is impossible to pick out eoliths that seem to be made in any -one _tradition_. By tradition I mean a set of habits for making one -kind of tool for some particular job. No two eoliths look very much +It is impossible to pick out �eoliths� that seem to be made in any +one _tradition_. By �tradition� I mean a set of habits for making one +kind of tool for some particular job. No two �eoliths� look very much alike: tools made as part of some one tradition all look much alike. -Now its easy to suppose that the very earliest prehistoric men picked -up and used almost any sort of stone. This wouldnt be surprising; you -and I do it when we go camping. In other words, some of these eoliths +Now it�s easy to suppose that the very earliest prehistoric men picked +up and used almost any sort of stone. This wouldn�t be surprising; you +and I do it when we go camping. In other words, some of these �eoliths� may actually have been used by prehistoric men. They must have used anything that might be handy when they needed it. We could have figured -that out without the eoliths. +that out without the �eoliths.� THE ROAD TO STANDARDIZATION @@ -1434,7 +1434,7 @@ THE ROAD TO STANDARDIZATION Reasoning from what we know or can easily imagine, there should have been three major steps in the prehistory of tool-making. The first step would have been simple _utilization_ of what was at hand. This is the -step into which the eoliths would fall. The second step would have +step into which the �eoliths� would fall. The second step would have been _fashioning_--the haphazard preparation of a tool when there was a need for it. Probably many of the earlier pebble tools, which I shall describe next, fall into this group. The third step would have been @@ -1447,7 +1447,7 @@ tradition appears. PEBBLE TOOLS -At the beginning of the last chapter, youll remember that I said there +At the beginning of the last chapter, you�ll remember that I said there were tools from very early geological beds. The earliest bones of men have not yet been found in such early beds although the Sterkfontein australopithecine cave approaches this early date. The earliest tools @@ -1467,7 +1467,7 @@ Old World besides Africa; in fact, some prehistorians already claim to have identified a few. Since the forms and the distinct ways of making the earlier pebble tools had not yet sufficiently jelled into a set tradition, they are difficult for us to recognize. It is not -so difficult, however, if there are great numbers of possibles +so difficult, however, if there are great numbers of �possibles� available. A little later in time the tradition becomes more clearly set, and pebble tools are easier to recognize. So far, really large collections of pebble tools have only been found and examined in Africa. @@ -1475,9 +1475,9 @@ collections of pebble tools have only been found and examined in Africa. CORE-BIFACE TOOLS -The next tradition well look at is the _core_ or biface one. The tools +The next tradition we�ll look at is the _core_ or biface one. The tools are large pear-shaped pieces of stone trimmed flat on the two opposite -sides or faces. Hence biface has been used to describe these tools. +sides or �faces.� Hence �biface� has been used to describe these tools. The front view is like that of a pear with a rather pointed top, and the back view looks almost exactly the same. Look at them side on, and you can see that the front and back faces are the same and have been @@ -1488,7 +1488,7 @@ illustration. [Illustration: ABBEVILLIAN BIFACE] We have very little idea of the way in which these core-bifaces were -used. They have been called hand axes, but this probably gives the +used. They have been called �hand axes,� but this probably gives the wrong idea, for an ax, to us, is not a pointed tool. All of these early tools must have been used for a number of jobs--chopping, scraping, cutting, hitting, picking, and prying. Since the core-bifaces tend to @@ -1505,7 +1505,7 @@ a big block of stone. You had to break off the flake in such a way that it was broad and thin, and also had a good sharp cutting edge. Once you really got on to the trick of doing it, this was probably a simpler way to make a good cutting tool than preparing a biface. You have to know -how, though; Ive tried it and have mashed my fingers more than once. +how, though; I�ve tried it and have mashed my fingers more than once. The flake tools look as if they were meant mainly for chopping, scraping, and cutting jobs. When one made a flake tool, the idea seems @@ -1535,9 +1535,9 @@ tradition. It probably has its earliest roots in the pebble tool tradition of African type. There are several kinds of tools in this tradition, but all differ from the western core-bifaces and flakes. There are broad, heavy scrapers or cleavers, and tools with an -adze-like cutting edge. These last-named tools are called hand adzes, -just as the core-bifaces of the west have often been called hand -axes. The section of an adze cutting edge is ? shaped; the section of +adze-like cutting edge. These last-named tools are called �hand adzes,� +just as the core-bifaces of the west have often been called �hand +axes.� The section of an adze cutting edge is ? shaped; the section of an ax is < shaped. [Illustration: ANYATHIAN ADZE-LIKE TOOL] @@ -1581,17 +1581,17 @@ stratification.[3] Soan (India) Flake: - Typical Mousterian + �Typical Mousterian� Levalloiso-Mousterian Levalloisian Tayacian Clactonian (localized in England) Core-biface: - Some blended elements in Mousterian + Some blended elements in �Mousterian� Micoquian (= Acheulean 6 and 7) Acheulean - Abbevillian (once called Chellean) + Abbevillian (once called �Chellean�) Pebble tool: Oldowan @@ -1608,8 +1608,8 @@ out of glacial gravels the easiest thing to do first is to isolate individual types of tools into groups. First you put a bushel-basketful of tools on a table and begin matching up types. Then you give names to the groups of each type. The groups and the types are really matters of -the archeologists choice; in real life, they were probably less exact -than the archeologists lists of them. We now know pretty well in which +the archeologists� choice; in real life, they were probably less exact +than the archeologists� lists of them. We now know pretty well in which of the early traditions the various early groups belong. @@ -1635,9 +1635,9 @@ production must have been passed on from one generation to another. I could even guess that the notions of the ideal type of one or the other of these tools stood out in the minds of men of those times -somewhat like a symbol of perfect tool for good job. If this were -so--remember its only a wild guess of mine--then men were already -symbol users. Now lets go on a further step to the fact that the words +somewhat like a symbol of �perfect tool for good job.� If this were +so--remember it�s only a wild guess of mine--then men were already +symbol users. Now let�s go on a further step to the fact that the words men speak are simply sounds, each different sound being a symbol for a different meaning. If standardized tool-making suggests symbol-making, is it also possible that crude word-symbols were also being made? I @@ -1650,7 +1650,7 @@ of our second step is more suggestive, although we may not yet feel sure that many of the earlier pebble tools were man-made products. But with the step to standardization and the appearance of the traditions, I believe we must surely be dealing with the traces of culture-bearing -_men_. The conventional understandings which Professor Redfields +_men_. The �conventional understandings� which Professor Redfield�s definition of culture suggests are now evidenced for us in the persistent habits for the preparation of stone tools. Were we able to see the other things these prehistoric men must have made--in materials @@ -1666,19 +1666,19 @@ In the last chapter, I told you that many of the older archeologists and human paleontologists used to think that modern man was very old. The supposed ages of Piltdown and Galley Hill were given as evidence of the great age of anatomically modern man, and some interpretations -of the Swanscombe and Fontchevade fossils were taken to support +of the Swanscombe and Font�chevade fossils were taken to support this view. The conclusion was that there were two parallel lines or -phyla of men already present well back in the Pleistocene. The -first of these, the more primitive or paleoanthropic line, was +�phyla� of men already present well back in the Pleistocene. The +first of these, the more primitive or �paleoanthropic� line, was said to include Heidelberg, the proto-neanderthaloids and classic -Neanderthal. The more anatomically modern or neanthropic line was +Neanderthal. The more anatomically modern or �neanthropic� line was thought to consist of Piltdown and the others mentioned above. The Neanderthaler or paleoanthropic line was thought to have become extinct after the first phase of the last great glaciation. Of course, the modern or neanthropic line was believed to have persisted into the -present, as the basis for the worlds population today. But with +present, as the basis for the world�s population today. But with Piltdown liquidated, Galley Hill known to be very late, and Swanscombe -and Fontchevade otherwise interpreted, there is little left of the +and Font�chevade otherwise interpreted, there is little left of the so-called parallel phyla theory. While the theory was in vogue, however, and as long as the European @@ -1695,9 +1695,9 @@ where they had actually been dropped by the men who made and used them. The tools came, rather, from the secondary hodge-podge of the glacial gravels. I tried to give you a picture of the bulldozing action of glaciers (p. 40) and of the erosion and weathering that were -side-effects of a glacially conditioned climate on the earths surface. +side-effects of a glacially conditioned climate on the earth�s surface. As we said above, if one simply plucks tools out of the redeposited -gravels, his natural tendency is to type the tools by groups, and to +gravels, his natural tendency is to �type� the tools by groups, and to think that the groups stand for something _on their own_. In 1906, M. Victor Commont actually made a rare find of what seems @@ -1705,15 +1705,15 @@ to have been a kind of workshop site, on a terrace above the Somme river in France. Here, Commont realized, flake tools appeared clearly in direct association with core-biface tools. Few prehistorians paid attention to Commont or his site, however. It was easier to believe -that flake tools represented a distinct culture and that this -culture was that of the Neanderthaler or paleoanthropic line, and -that the core-bifaces stood for another culture which was that of the +that flake tools represented a distinct �culture� and that this +�culture� was that of the Neanderthaler or paleoanthropic line, and +that the core-bifaces stood for another �culture� which was that of the supposed early modern or neanthropic line. Of course, I am obviously skipping many details here. Some later sites with Neanderthal fossils do seem to have only flake tools, but other such sites have both types of tools. The flake tools which appeared _with_ the core-bifaces in the Swanscombe gravels were never made much of, although it -was embarrassing for the parallel phyla people that Fontchevade +was embarrassing for the parallel phyla people that Font�chevade ran heavily to flake tools. All in all, the parallel phyla theory flourished because it seemed so neat and easy to understand. @@ -1722,20 +1722,20 @@ TRADITIONS ARE TOOL-MAKING HABITS, NOT CULTURES In case you think I simply enjoy beating a dead horse, look in any standard book on prehistory written twenty (or even ten) years ago, or -in most encyclopedias. Youll find that each of the individual tool -types, of the West, at least, was supposed to represent a culture. -The cultures were believed to correspond to parallel lines of human +in most encyclopedias. You�ll find that each of the individual tool +types, of the West, at least, was supposed to represent a �culture.� +The �cultures� were believed to correspond to parallel lines of human evolution. In 1937, Mr. Harper Kelley strongly re-emphasized the importance -of Commonts workshop site and the presence of flake tools with -core-bifaces. Next followed Dr. Movius clear delineation of the +of Commont�s workshop site and the presence of flake tools with +core-bifaces. Next followed Dr. Movius� clear delineation of the chopper-chopping tool tradition of the Far East. This spoiled the nice symmetry of the flake-tool = paleoanthropic, core-biface = neanthropic equations. Then came increasing understanding of the importance of the pebble tools in Africa, and the location of several more workshop sites there, especially at Olorgesailie in Kenya. Finally came the -liquidation of Piltdown and the deflation of Galley Hills date. So it +liquidation of Piltdown and the deflation of Galley Hill�s date. So it is at last possible to picture an individual prehistoric man making a flake tool to do one job and a core-biface tool to do another. Commont showed us this picture in 1906, but few believed him. @@ -1751,7 +1751,7 @@ that of the cave on Mount Carmel in Palestine, where the blended pre-neanderthaloid, 70 per cent modern-type skulls were found. Here, in the same level with the skulls, were 9,784 flint tools. Of these, only three--doubtless strays--were core-bifaces; all the rest were flake -tools or flake chips. We noted above how the Fontchevade cave ran to +tools or flake chips. We noted above how the Font�chevade cave ran to flake tools. The only conclusion I would draw from this is that times and circumstances did exist in which prehistoric men needed only flake tools. So they only made flake tools for those particular times and @@ -1773,13 +1773,13 @@ piece of bone. From the gravels which yield the Clactonian flakes of England comes the fire-hardened point of a wooden spear. There are also the chance finds of the fossil human bones themselves, of which we spoke in the last chapter. Aside from the cave of Peking man, none -of the earliest tools have been found in caves. Open air or workshop +of the earliest tools have been found in caves. Open air or �workshop� sites which do not seem to have been disturbed later by some geological agency are very rare. The chart on page 65 shows graphically what the situation in west-central Europe seems to have been. It is not yet certain whether -there were pebble tools there or not. The Fontchevade cave comes +there were pebble tools there or not. The Font�chevade cave comes into the picture about 100,000 years ago or more. But for the earlier hundreds of thousands of years--below the red-dotted line on the chart--the tools we find come almost entirely from the haphazard @@ -1790,13 +1790,13 @@ kinds of all-purpose tools. Almost any one of them could be used for hacking, chopping, cutting, and scraping; so the men who used them must have been living in a rough and ready sort of way. They found or hunted their food wherever they could. In the anthropological jargon, they -were food-gatherers, pure and simple. +were �food-gatherers,� pure and simple. Because of the mixture in the gravels and in the materials they -carried, we cant be sure which animals these men hunted. Bones of +carried, we can�t be sure which animals these men hunted. Bones of the larger animals turn up in the gravels, but they could just as well belong to the animals who hunted the men, rather than the other -way about. We dont know. This is why camp sites like Commonts and +way about. We don�t know. This is why camp sites like Commont�s and Olorgesailie in Kenya are so important when we do find them. The animal bones at Olorgesailie belonged to various mammals of extremely large size. Probably they were taken in pit-traps, but there are a number of @@ -1809,18 +1809,18 @@ animal. Professor F. Clark Howell recently returned from excavating another important open air site at Isimila in Tanganyika. The site yielded the bones of many fossil animals and also thousands of core-bifaces, -flakes, and choppers. But Howells reconstruction of the food-getting -habits of the Isimila people certainly suggests that the word hunting -is too dignified for what they did; scavenging would be much nearer +flakes, and choppers. But Howell�s reconstruction of the food-getting +habits of the Isimila people certainly suggests that the word �hunting� +is too dignified for what they did; �scavenging� would be much nearer the mark. During a great part of this time the climate was warm and pleasant. The second interglacial period (the time between the second and third great alpine glaciations) lasted a long time, and during much of this time -the climate may have been even better than ours is now. We dont know +the climate may have been even better than ours is now. We don�t know that earlier prehistoric men in Europe or Africa lived in caves. They may not have needed to; much of the weather may have been so nice that -they lived in the open. Perhaps they didnt wear clothes, either. +they lived in the open. Perhaps they didn�t wear clothes, either. WHAT THE PEKING CAVE-FINDS TELL US @@ -1832,7 +1832,7 @@ were bones of dangerous animals, members of the wolf, bear, and cat families. Some of the cat bones belonged to beasts larger than tigers. There were also bones of other wild animals: buffalo, camel, deer, elephants, horses, sheep, and even ostriches. Seventy per cent of the -animals Peking man killed were fallow deer. Its much too cold and dry +animals Peking man killed were fallow deer. It�s much too cold and dry in north China for all these animals to live there today. So this list helps us know that the weather was reasonably warm, and that there was enough rain to grow grass for the grazing animals. The list also helps @@ -1840,7 +1840,7 @@ the paleontologists to date the find. Peking man also seems to have eaten plant food, for there are hackberry seeds in the debris of the cave. His tools were made of sandstone and -quartz and sometimes of a rather bad flint. As weve already seen, they +quartz and sometimes of a rather bad flint. As we�ve already seen, they belong in the chopper-tool tradition. It seems fairly clear that some of the edges were chipped by right-handed people. There are also many split pieces of heavy bone. Peking man probably split them so he could @@ -1850,10 +1850,10 @@ Many of these split bones were the bones of Peking men. Each one of the skulls had already had the base broken out of it. In no case were any of the bones resting together in their natural relation to one another. There is nothing like a burial; all of the bones are scattered. Now -its true that animals could have scattered bodies that were not cared +it�s true that animals could have scattered bodies that were not cared for or buried. But splitting bones lengthwise and carefully removing the base of a skull call for both the tools and the people to use them. -Its pretty clear who the people were. Peking man was a cannibal. +It�s pretty clear who the people were. Peking man was a cannibal. * * * * * @@ -1862,8 +1862,8 @@ prehistoric men. In those days life was rough. You evidently had to watch out not only for dangerous animals but also for your fellow men. You ate whatever you could catch or find growing. But you had sense enough to build fires, and you had already formed certain habits for -making the kinds of stone tools you needed. Thats about all we know. -But I think well have to admit that cultural beginnings had been made, +making the kinds of stone tools you needed. That�s about all we know. +But I think we�ll have to admit that cultural beginnings had been made, and that these early people were really _men_. @@ -1876,16 +1876,16 @@ MORE EVIDENCE of Culture While the dating is not yet sure, the material that we get from caves in Europe must go back to about 100,000 years ago; the time of the -classic Neanderthal group followed soon afterwards. We dont know why +classic Neanderthal group followed soon afterwards. We don�t know why there is no earlier material in the caves; apparently they were not used before the last interglacial phase (the period just before the last great glaciation). We know that men of the classic Neanderthal group were living in caves from about 75,000 to 45,000 years ago. New radioactive carbon dates even suggest that some of the traces of -culture well describe in this chapter may have lasted to about 35,000 +culture we�ll describe in this chapter may have lasted to about 35,000 years ago. Probably some of the pre-neanderthaloid types of men had also lived in caves. But we have so far found their bones in caves only -in Palestine and at Fontchevade. +in Palestine and at Font�chevade. THE CAVE LAYERS @@ -1893,7 +1893,7 @@ THE CAVE LAYERS In parts of France, some peasants still live in caves. In prehistoric time, many generations of people lived in them. As a result, many caves have deep layers of debris. The first people moved in and lived -on the rock floor. They threw on the floor whatever they didnt want, +on the rock floor. They threw on the floor whatever they didn�t want, and they tracked in mud; nobody bothered to clean house in those days. Their debris--junk and mud and garbage and what not--became packed into a layer. As time went on, and generations passed, the layer grew @@ -1910,20 +1910,20 @@ earliest to latest. This is the _stratification_ we talked about (p. [Illustration: SECTION OF SHELTER ON LOWER TERRACE, LE MOUSTIER] -While we may find a mix-up in caves, its not nearly as bad as the +While we may find a mix-up in caves, it�s not nearly as bad as the mixing up that was done by glaciers. The animal bones and shells, the fireplaces, the bones of men, and the tools the men made all belong -together, if they come from one layer. Thats the reason why the cave +together, if they come from one layer. That�s the reason why the cave of Peking man is so important. It is also the reason why the caves in Europe and the Near East are so important. We can get an idea of which things belong together and which lot came earliest and which latest. In most cases, prehistoric men lived only in the mouths of caves. -They didnt like the dark inner chambers as places to live in. They +They didn�t like the dark inner chambers as places to live in. They preferred rock-shelters, at the bases of overhanging cliffs, if there was enough overhang to give shelter. When the weather was good, they no -doubt lived in the open air as well. Ill go on using the term cave -since its more familiar, but remember that I really mean rock-shelter, +doubt lived in the open air as well. I�ll go on using the term �cave� +since it�s more familiar, but remember that I really mean rock-shelter, as a place in which people actually lived. The most important European cave sites are in Spain, France, and @@ -1933,29 +1933,29 @@ found when the out-of-the-way parts of Europe, Africa, and Asia are studied. -AN INDUSTRY DEFINED +AN �INDUSTRY� DEFINED We have already seen that the earliest European cave materials are -those from the cave of Fontchevade. Movius feels certain that the +those from the cave of Font�chevade. Movius feels certain that the lowest materials here date back well into the third interglacial stage, -that which lay between the Riss (next to the last) and the Wrm I +that which lay between the Riss (next to the last) and the W�rm I (first stage of the last) alpine glaciations. This material consists of an _industry_ of stone tools, apparently all made in the flake -tradition. This is the first time we have used the word industry. +tradition. This is the first time we have used the word �industry.� It is useful to call all of the different tools found together in one layer and made of _one kind of material_ an industry; that is, the tools must be found together as men left them. Tools taken from the glacial gravels (or from windswept desert surfaces or river gravels -or any geological deposit) are not together in this sense. We might -say the latter have only geological, not archeological context. +or any geological deposit) are not �together� in this sense. We might +say the latter have only �geological,� not �archeological� context. Archeological context means finding things just as men left them. We -can tell what tools go together in an industrial sense only if we +can tell what tools go together in an �industrial� sense only if we have archeological context. -Up to now, the only things we could have called industries were the +Up to now, the only things we could have called �industries� were the worked stone industry and perhaps the worked (?) bone industry of the Peking cave. We could add some of the very clear cases of open air -sites, like Olorgesailie. We couldnt use the term for the stone tools +sites, like Olorgesailie. We couldn�t use the term for the stone tools from the glacial gravels, because we do not know which tools belonged together. But when the cave materials begin to appear in Europe, we can begin to speak of industries. Most of the European caves of this time @@ -1964,16 +1964,16 @@ contain industries of flint tools alone. THE EARLIEST EUROPEAN CAVE LAYERS -Weve just mentioned the industry from what is said to be the oldest +We�ve just mentioned the industry from what is said to be the oldest inhabited cave in Europe; that is, the industry from the deepest layer -of the site at Fontchevade. Apparently it doesnt amount to much. The +of the site at Font�chevade. Apparently it doesn�t amount to much. The tools are made of stone, in the flake tradition, and are very poorly worked. This industry is called _Tayacian_. Its type tool seems to be a smallish flake tool, but there are also larger flakes which seem to have been fashioned for hacking. In fact, the type tool seems to be simply a smaller edition of the Clactonian tool (pictured on p. 45). -None of the Fontchevade tools are really good. There are scrapers, +None of the Font�chevade tools are really good. There are scrapers, and more or less pointed tools, and tools that may have been used for hacking and chopping. Many of the tools from the earlier glacial gravels are better made than those of this first industry we see in @@ -2005,7 +2005,7 @@ core-biface and the flake traditions. The core-biface tools usually make up less than half of all the tools in the industry. However, the name of the biface type of tool is generally given to the whole industry. It is called the _Acheulean_, actually a late form of it, as -Acheulean is also used for earlier core-biface tools taken from the +�Acheulean� is also used for earlier core-biface tools taken from the glacial gravels. In western Europe, the name used is _Upper Acheulean_ or _Micoquian_. The same terms have been borrowed to name layers E and F in the Tabun cave, on Mount Carmel in Palestine. @@ -2029,7 +2029,7 @@ those used for at least one of the flake industries we shall mention presently. There is very little else in these early cave layers. We do not have -a proper industry of bone tools. There are traces of fire, and of +a proper �industry� of bone tools. There are traces of fire, and of animal bones, and a few shells. In Palestine, there are many more bones of deer than of gazelle in these layers; the deer lives in a wetter climate than does the gazelle. In the European cave layers, the @@ -2043,18 +2043,18 @@ bones of fossil men definitely in place with this industry. FLAKE INDUSTRIES FROM THE CAVES Two more stone industries--the _Levalloisian_ and the -_Mousterian_--turn up at approximately the same time in the European +�_Mousterian_�--turn up at approximately the same time in the European cave layers. Their tools seem to be mainly in the flake tradition, but according to some of the authorities their preparation also shows some combination with the habits by which the core-biface tools were prepared. -Now notice that I dont tell you the Levalloisian and the Mousterian +Now notice that I don�t tell you the Levalloisian and the �Mousterian� layers are both above the late Acheulean layers. Look at the cave -section (p. 57) and youll find that some Mousterian of Acheulean -tradition appears above some typical Mousterian. This means that +section (p. 57) and you�ll find that some �Mousterian of Acheulean +tradition� appears above some �typical Mousterian.� This means that there may be some kinds of Acheulean industries that are later than -some kinds of Mousterian. The same is true of the Levalloisian. +some kinds of �Mousterian.� The same is true of the Levalloisian. There were now several different kinds of habits that men used in making stone tools. These habits were based on either one or the other @@ -2072,7 +2072,7 @@ were no patent laws in those days. The extremely complicated interrelationships of the different habits used by the tool-makers of this range of time are at last being -systematically studied. M. Franois Bordes has developed a statistical +systematically studied. M. Fran�ois Bordes has developed a statistical method of great importance for understanding these tool preparation habits. @@ -2081,22 +2081,22 @@ THE LEVALLOISIAN AND MOUSTERIAN The easiest Levalloisian tool to spot is a big flake tool. The trick in making it was to fashion carefully a big chunk of stone (called -the Levalloisian tortoise core, because it resembles the shape of +the Levalloisian �tortoise core,� because it resembles the shape of a turtle-shell) and then to whack this in such a way that a large flake flew off. This large thin flake, with sharp cutting edges, is the finished Levalloisian tool. There were various other tools in a Levalloisian industry, but this is the characteristic _Levalloisian_ tool. -There are several typical Mousterian stone tools. Different from -the tools of the Levalloisian type, these were made from disc-like -cores. There are medium-sized flake side scrapers. There are also -some small pointed tools and some small hand axes. The last of these +There are several �typical Mousterian� stone tools. Different from +the tools of the Levalloisian type, these were made from �disc-like +cores.� There are medium-sized flake �side scrapers.� There are also +some small pointed tools and some small �hand axes.� The last of these tool types is often a flake worked on both of the flat sides (that is, bifacially). There are also pieces of flint worked into the form of crude balls. The pointed tools may have been fixed on shafts to make short jabbing spears; the round flint balls may have been used as -bolas. Actually, we dont _know_ what either tool was used for. The +bolas. Actually, we don�t _know_ what either tool was used for. The points and side scrapers are illustrated (pp. 64 and 66). [Illustration: LEVALLOIS FLAKE] @@ -2108,9 +2108,9 @@ Nowadays the archeologists are less and less sure of the importance of any one specific tool type and name. Twenty years ago, they used to speak simply of Acheulean or Levalloisian or Mousterian tools. Now, more and more, _all_ of the tools from some one layer in a -cave are called an industry, which is given a mixed name. Thus we -have Levalloiso-Mousterian, and Acheuleo-Levalloisian, and even -Acheuleo-Mousterian (or Mousterian of Acheulean tradition). Bordes +cave are called an �industry,� which is given a mixed name. Thus we +have �Levalloiso-Mousterian,� and �Acheuleo-Levalloisian,� and even +�Acheuleo-Mousterian� (or �Mousterian of Acheulean tradition�). Bordes� systematic work is beginning to clear up some of our confusion. The time of these late Acheuleo-Levalloiso-Mousterioid industries @@ -2120,16 +2120,16 @@ phase of the last great glaciation. It was also the time that the classic group of Neanderthal men was living in Europe. A number of the Neanderthal fossil finds come from these cave layers. Before the different habits of tool preparation were understood it used to be -popular to say Neanderthal man was Mousterian man. I think this is -wrong. What used to be called Mousterian is now known to be a variety +popular to say Neanderthal man was �Mousterian man.� I think this is +wrong. What used to be called �Mousterian� is now known to be a variety of industries with tools of both core-biface and flake habits, and -so mixed that the word Mousterian used alone really doesnt mean +so mixed that the word �Mousterian� used alone really doesn�t mean anything. The Neanderthalers doubtless understood the tool preparation habits by means of which Acheulean, Levalloisian and Mousterian type tools were produced. We also have the more modern-like Mount Carmel people, found in a cave layer of Palestine with tools almost entirely -in the flake tradition, called Levalloiso-Mousterian, and the -Fontchevade-Tayacian (p. 59). +in the flake tradition, called �Levalloiso-Mousterian,� and the +Font�chevade-Tayacian (p. 59). [Illustration: MOUSTERIAN POINT] @@ -2165,7 +2165,7 @@ which seem to have served as anvils or chopping blocks, are fairly common. Bits of mineral, used as coloring matter, have also been found. We -dont know what the color was used for. +don�t know what the color was used for. [Illustration: MOUSTERIAN SIDE SCRAPER] @@ -2230,7 +2230,7 @@ might suggest some notion of hoarding up the spirits or the strength of bears killed in the hunt. Probably the people lived in small groups, as hunting and food-gathering seldom provide enough food for large groups of people. These groups probably had some kind of leader or -chief. Very likely the rude beginnings of rules for community life +�chief.� Very likely the rude beginnings of rules for community life and politics, and even law, were being made. But what these were, we do not know. We can only guess about such things, as we can only guess about many others; for example, how the idea of a family must have been @@ -2246,8 +2246,8 @@ small. The mixtures and blendings of the habits used in making stone tools must mean that there were also mixtures and blends in many of the other ideas and beliefs of these small groups. And what this probably means is that there was no one _culture_ of the time. It is -certainly unlikely that there were simply three cultures, Acheulean, -Levalloisian, and Mousterian, as has been thought in the past. +certainly unlikely that there were simply three cultures, �Acheulean,� +�Levalloisian,� and �Mousterian,� as has been thought in the past. Rather there must have been a great variety of loosely related cultures at about the same stage of advancement. We could say, too, that here we really begin to see, for the first time, that remarkable ability @@ -2272,7 +2272,7 @@ related habits for the making of tools. But the men who made them must have looked much like the men of the West. Their tools were different, but just as useful. -As to what the men of the West looked like, Ive already hinted at all +As to what the men of the West looked like, I�ve already hinted at all we know so far (pp. 29 ff.). The Neanderthalers were present at the time. Some more modern-like men must have been about, too, since fossils of them have turned up at Mount Carmel in Palestine, and at @@ -2306,7 +2306,7 @@ A NEW TRADITION APPEARS Something new was probably beginning to happen in the European-Mediterranean area about 40,000 years ago, though all the rest of the Old World seems to have been going on as it had been. I -cant be sure of this because the information we are using as a basis +can�t be sure of this because the information we are using as a basis for dates is very inaccurate for the areas outside of Europe and the Mediterranean. @@ -2325,7 +2325,7 @@ drawing shows. It has sharp cutting edges, and makes a very useful knife. The real trick is to be able to make one. It is almost impossible to make a blade out of any stone but flint or a natural volcanic glass called obsidian. And even if you have flint or obsidian, -you first have to work up a special cone-shaped blade-core, from +you first have to work up a special cone-shaped �blade-core,� from which to whack off blades. [Illustration: PLAIN BLADE] @@ -2351,8 +2351,8 @@ found in equally early cave levels in Syria; their popularity there seems to fluctuate a bit. Some more or less parallel-sided flakes are known in the Levalloisian industry in France, but they are probably no earlier than Tabun E. The Tabun blades are part of a local late -Acheulean industry, which is characterized by core-biface hand -axes, but which has many flake tools as well. Professor F. E. +�Acheulean� industry, which is characterized by core-biface �hand +axes,� but which has many flake tools as well. Professor F. E. Zeuner believes that this industry may be more than 120,000 years old; actually its date has not yet been fixed, but it is very old--older than the fossil finds of modern-like men in the same caves. @@ -2371,7 +2371,7 @@ We are not sure just where the earliest _persisting_ habits for the production of blade tools developed. Impressed by the very early momentary appearance of blades at Tabun on Mount Carmel, Professor Dorothy A. Garrod first favored the Near East as a center of origin. -She spoke of some as yet unidentified Asiatic centre, which she +She spoke of �some as yet unidentified Asiatic centre,� which she thought might be in the highlands of Iran or just beyond. But more recent work has been done in this area, especially by Professor Coon, and the blade tools do not seem to have an early appearance there. When @@ -2395,21 +2395,21 @@ core (and the striking of the Levalloisian flake from it) might have followed through to the conical core and punch technique for the production of blades. Professor Garrod is much impressed with the speed of change during the later phases of the last glaciation, and its -probable consequences. She speaks of the greater number of industries +probable consequences. She speaks of �the greater number of industries having enough individual character to be classified as distinct ... -since evolution now starts to outstrip diffusion. Her evolution here +since evolution now starts to outstrip diffusion.� Her �evolution� here is of course an industrial evolution rather than a biological one. Certainly the people of Europe had begun to make blade tools during the warm spell after the first phase of the last glaciation. By about 40,000 years ago blades were well established. The bones of the blade -tool makers weve found so far indicate that anatomically modern men +tool makers we�ve found so far indicate that anatomically modern men had now certainly appeared. Unfortunately, only a few fossil men have so far been found from the very beginning of the blade tool range in Europe (or elsewhere). What I certainly shall _not_ tell you is that conquering bands of fine, strong, anatomically modern men, armed with superior blade tools, came sweeping out of the East to exterminate the -lowly Neanderthalers. Even if we dont know exactly what happened, Id -lay a good bet it wasnt that simple. +lowly Neanderthalers. Even if we don�t know exactly what happened, I�d +lay a good bet it wasn�t that simple. We do know a good deal about different blade industries in Europe. Almost all of them come from cave layers. There is a great deal of @@ -2418,7 +2418,7 @@ this complication; in fact, it doubtless simplifies it too much. But it may suggest all the complication of industries which is going on at this time. You will note that the upper portion of my much simpler chart (p. 65) covers the same material (in the section -marked Various Blade-Tool Industries). That chart is certainly too +marked �Various Blade-Tool Industries�). That chart is certainly too simplified. You will realize that all this complication comes not only from @@ -2429,7 +2429,7 @@ a good deal of climatic change at this time. The plants and animals that men used for food were changing, too. The great variety of tools and industries we now find reflect these changes and the ability of men to keep up with the times. Now, for example, is the first time we are -sure that there are tools to _make_ other tools. They also show mens +sure that there are tools to _make_ other tools. They also show men�s increasing ability to adapt themselves. @@ -2437,15 +2437,15 @@ SPECIAL TYPES OF BLADE TOOLS The most useful tools that appear at this time were made from blades. - 1. The backed blade. This is a knife made of a flint blade, with - one edge purposely blunted, probably to save the users fingers + 1. The �backed� blade. This is a knife made of a flint blade, with + one edge purposely blunted, probably to save the user�s fingers from being cut. There are several shapes of backed blades (p. 73). [Illustration: TWO BURINS] - 2. The _burin_ or graver. The burin was the original chisel. Its - cutting edge is _transverse_, like a chisels. Some burins are + 2. The _burin_ or �graver.� The burin was the original chisel. Its + cutting edge is _transverse_, like a chisel�s. Some burins are made like a screw-driver, save that burins are sharp. Others have edges more like the blade of a chisel or a push plane, with only one bevel. Burins were probably used to make slots in wood @@ -2456,29 +2456,29 @@ The most useful tools that appear at this time were made from blades. [Illustration: TANGED POINT] - 3. The tanged point. These stone points were used to tip arrows or + 3. The �tanged� point. These stone points were used to tip arrows or light spears. They were made from blades, and they had a long tang at the bottom where they were fixed to the shaft. At the place where the tang met the main body of the stone point, there was - a marked shoulder, the beginnings of a barb. Such points had + a marked �shoulder,� the beginnings of a barb. Such points had either one or two shoulders. [Illustration: NOTCHED BLADE] - 4. The notched or strangulated blade. Along with the points for + 4. The �notched� or �strangulated� blade. Along with the points for arrows or light spears must go a tool to prepare the arrow or - spear shaft. Today, such a tool would be called a draw-knife or - a spoke-shave, and this is what the notched blades probably are. + spear shaft. Today, such a tool would be called a �draw-knife� or + a �spoke-shave,� and this is what the notched blades probably are. Our spoke-shaves have sharp straight cutting blades and really - shave. Notched blades of flint probably scraped rather than cut. + �shave.� Notched blades of flint probably scraped rather than cut. - 5. The awl, drill, or borer. These blade tools are worked out + 5. The �awl,� �drill,� or �borer.� These blade tools are worked out to a spike-like point. They must have been used for making holes in wood, bone, shell, skin, or other things. [Illustration: DRILL OR AWL] - 6. The end-scraper on a blade is a tool with one or both ends + 6. The �end-scraper on a blade� is a tool with one or both ends worked so as to give a good scraping edge. It could have been used to hollow out wood or bone, scrape hides, remove bark from trees, and a number of other things (p. 78). @@ -2489,11 +2489,11 @@ usually made of blades, but the best examples are so carefully worked on both sides (bifacially) that it is impossible to see the original blade. This tool is - 7. The laurel leaf point. Some of these tools were long and + 7. The �laurel leaf� point. Some of these tools were long and dagger-like, and must have been used as knives or daggers. Others - were small, called willow leaf, and must have been mounted on + were small, called �willow leaf,� and must have been mounted on spear or arrow shafts. Another typical Solutrean tool is the - shouldered point. Both the laurel leaf and shouldered point + �shouldered� point. Both the �laurel leaf� and �shouldered� point types are illustrated (see above and p. 79). [Illustration: END-SCRAPER ON A BLADE] @@ -2507,17 +2507,17 @@ second is a core tool. [Illustration: SHOULDERED POINT] - 8. The keel-shaped round scraper is usually small and quite round, + 8. The �keel-shaped round scraper� is usually small and quite round, and has had chips removed up to a peak in the center. It is called - keel-shaped because it is supposed to look (when upside down) + �keel-shaped� because it is supposed to look (when upside down) like a section through a boat. Actually, it looks more like a tent or an umbrella. Its outer edges are sharp all the way around, and it was probably a general purpose scraping tool (see illustration, p. 81). - 9. The keel-shaped nosed scraper is a much larger and heavier tool + 9. The �keel-shaped nosed scraper� is a much larger and heavier tool than the round scraper. It was made on a core with a flat bottom, - and has one nicely worked end or nose. Such tools are usually + and has one nicely worked end or �nose.� Such tools are usually large enough to be easily grasped, and probably were used like push planes (see illustration, p. 81). @@ -2530,7 +2530,7 @@ the most easily recognized blade tools, although they show differences in detail at different times. There are also many other kinds. Not all of these tools appear in any one industry at one time. Thus the different industries shown in the chart (p. 72) each have only some -of the blade tools weve just listed, and also a few flake tools. Some +of the blade tools we�ve just listed, and also a few flake tools. Some industries even have a few core tools. The particular types of blade tools appearing in one cave layer or another, and the frequency of appearance of the different types, tell which industry we have in each @@ -2545,15 +2545,15 @@ to appear. There are knives, pins, needles with eyes, and little double-pointed straight bars of bone that were probably fish-hooks. The fish-line would have been fastened in the center of the bar; when the fish swallowed the bait, the bar would have caught cross-wise in the -fishs mouth. +fish�s mouth. One quite special kind of bone tool is a long flat point for a light spear. It has a deep notch cut up into the breadth of its base, and is -called a split-based bone point (p. 82). We know examples of bone +called a �split-based bone point� (p. 82). We know examples of bone beads from these times, and of bone handles for flint tools. Pierced teeth of some animals were worn as beads or pendants, but I am not sure -that elks teeth were worn this early. There are even spool-shaped -buttons or toggles. +that elks� teeth were worn this early. There are even spool-shaped +�buttons� or toggles. [Illustration: SPLIT-BASED BONE POINT] @@ -2595,12 +2595,12 @@ almost to have served as sketch blocks. The surfaces of these various objects may show animals, or rather abstract floral designs, or geometric designs. -[Illustration: VENUS FIGURINE FROM WILLENDORF] +[Illustration: �VENUS� FIGURINE FROM WILLENDORF] Some of the movable art is not done on tools. The most remarkable examples of this class are little figures of women. These women seem to be pregnant, and their most female characteristics are much emphasized. -It is thought that these Venus or Mother-goddess figurines may be +It is thought that these �Venus� or �Mother-goddess� figurines may be meant to show the great forces of nature--fertility and the birth of life. @@ -2616,21 +2616,21 @@ are different styles in the cave art. The really great cave art is pretty well restricted to southern France and Cantabrian (northwestern) Spain. -There are several interesting things about the Franco-Cantabrian cave +There are several interesting things about the �Franco-Cantabrian� cave art. It was done deep down in the darkest and most dangerous parts of the caves, although the men lived only in the openings of caves. If you think what they must have had for lights--crude lamps of hollowed stone have been found, which must have burned some kind of oil or grease, with a matted hair or fiber wick--and of the animals that may have -lurked in the caves, youll understand the part about danger. Then, -too, were sure the pictures these people painted were not simply to be +lurked in the caves, you�ll understand the part about danger. Then, +too, we�re sure the pictures these people painted were not simply to be looked at and admired, for they painted one picture right over other pictures which had been done earlier. Clearly, it was the _act_ of _painting_ that counted. The painter had to go way down into the most mysterious depths of the earth and create an animal in paint. Possibly he believed that by doing this he gained some sort of magic power over the same kind of animal when he hunted it in the open air. It certainly -doesnt look as if he cared very much about the picture he painted--as +doesn�t look as if he cared very much about the picture he painted--as a finished product to be admired--for he or somebody else soon went down and painted another animal right over the one he had done. @@ -2683,10 +2683,10 @@ it. Their art is another example of the direction the human mind was taking. And when I say human, I mean it in the fullest sense, for this is the time in which fully modern man has appeared. On page 34, we -spoke of the Cro-Magnon group and of the Combe Capelle-Brnn group of -Caucasoids and of the Grimaldi Negroids, who are no longer believed +spoke of the Cro-Magnon group and of the Combe Capelle-Br�nn group of +Caucasoids and of the Grimaldi �Negroids,� who are no longer believed to be Negroid. I doubt that any one of these groups produced most of -the achievements of the times. Its not yet absolutely sure which +the achievements of the times. It�s not yet absolutely sure which particular group produced the great cave art. The artists were almost certainly a blend of several (no doubt already mixed) groups. The pair of Grimaldians were buried in a grave with a sprinkling of red ochre, @@ -2705,9 +2705,9 @@ also found about the shore of the Mediterranean basin, and it moved into northern Europe as the last glaciation pulled northward. People began making blade tools of very small size. They learned how to chip very slender and tiny blades from a prepared core. Then they made these -little blades into tiny triangles, half-moons (lunates), trapezoids, +little blades into tiny triangles, half-moons (�lunates�), trapezoids, and several other geometric forms. These little tools are called -microliths. They are so small that most of them must have been fixed +�microliths.� They are so small that most of them must have been fixed in handles or shafts. [Illustration: MICROLITHS @@ -2726,7 +2726,7 @@ One corner of each little triangle stuck out, and the whole thing made a fine barbed harpoon. In historic times in Egypt, geometric trapezoidal microliths were still in use as arrowheads. They were fastened--broad end out--on the end of an arrow shaft. It seems queer -to give an arrow a point shaped like a T. Actually, the little points +to give an arrow a point shaped like a �T.� Actually, the little points were very sharp, and must have pierced the hides of animals very easily. We also think that the broader cutting edge of the point may have caused more bleeding than a pointed arrowhead would. In hunting @@ -2739,7 +2739,7 @@ is some evidence that they appear early in the Near East. Their use was very common in northwest Africa but this came later. The microlith makers who reached south Russia and central Europe possibly moved up out of the Near East. Or it may have been the other way around; we -simply dont yet know. +simply don�t yet know. Remember that the microliths we are talking about here were made from carefully prepared little blades, and are often geometric in outline. @@ -2749,7 +2749,7 @@ even some flake scrapers, in most microlithic industries. I emphasize this bladelet and the geometric character of the microlithic industries of the western Old World, since there has sometimes been confusion in the matter. Sometimes small flake chips, utilized as minute pointed -tools, have been called microliths. They may be _microlithic_ in size +tools, have been called �microliths.� They may be _microlithic_ in size in terms of the general meaning of the word, but they do not seem to belong to the sub-tradition of the blade tool preparation habits which we have been discussing here. @@ -2763,10 +2763,10 @@ in western Asia too, and early, although Professor Garrod is no longer sure that the whole tradition originated in the Near East. If you look again at my chart (p. 72) you will note that in western Asia I list some of the names of the western European industries, but with the -qualification -like (for example, Gravettian-like). The western +qualification �-like� (for example, �Gravettian-like�). The western Asiatic blade-tool industries do vaguely recall some aspects of those of western Europe, but we would probably be better off if we used -completely local names for them. The Emiran of my chart is such an +completely local names for them. The �Emiran� of my chart is such an example; its industry includes a long spike-like blade point which has no western European counterpart. @@ -2774,13 +2774,13 @@ When we last spoke of Africa (p. 66), I told you that stone tools there were continuing in the Levalloisian flake tradition, and were becoming smaller. At some time during this process, two new tool types appeared in northern Africa: one was the Aterian point with -a tang (p. 67), and the other was a sort of laurel leaf point, -called the Sbaikian. These two tool types were both produced from +a tang (p. 67), and the other was a sort of �laurel leaf� point, +called the �Sbaikian.� These two tool types were both produced from flakes. The Sbaikian points, especially, are roughly similar to some of the Solutrean points of Europe. It has been suggested that both the Sbaikian and Aterian points may be seen on their way to France through their appearance in the Spanish cave deposits of Parpallo, but there is -also a rival pre-Solutrean in central Europe. We still do not know +also a rival �pre-Solutrean� in central Europe. We still do not know whether there was any contact between the makers of these north African tools and the Solutrean tool-makers. What does seem clear is that the blade-tool tradition itself arrived late in northern Africa. @@ -2788,11 +2788,11 @@ blade-tool tradition itself arrived late in northern Africa. NETHER AFRICA -Blade tools and laurel leaf points and some other probably late +Blade tools and �laurel leaf� points and some other probably late stone tool types also appear in central and southern Africa. There are geometric microliths on bladelets and even some coarse pottery in east Africa. There is as yet no good way of telling just where these -items belong in time; in broad geological terms they are late. +items belong in time; in broad geological terms they are �late.� Some people have guessed that they are as early as similar European and Near Eastern examples, but I doubt it. The makers of small-sized Levalloisian flake tools occupied much of Africa until very late in @@ -2823,18 +2823,18 @@ ancestors of the American Indians came from Asia. The stone-tool traditions of Europe, Africa, the Near and Middle East, and central Siberia, did _not_ move into the New World. With only a very few special or late exceptions, there are _no_ core-bifaces, -flakes, or blade tools of the Old World. Such things just havent been +flakes, or blade tools of the Old World. Such things just haven�t been found here. -This is why I say its a shame we dont know more of the end of the +This is why I say it�s a shame we don�t know more of the end of the chopper-tool tradition in the Far East. According to Weidenreich, the Mongoloids were in the Far East long before the end of the last glaciation. If the genetics of the blood group types do demand a non-Mongoloid ancestry for the American Indians, who else may have been in the Far East 25,000 years ago? We know a little about the habits for making stone tools which these first people brought with them, -and these habits dont conform with those of the western Old World. -Wed better keep our eyes open for whatever happened to the end of +and these habits don�t conform with those of the western Old World. +We�d better keep our eyes open for whatever happened to the end of the chopper-tool tradition in northern China; already there are hints that it lasted late there. Also we should watch future excavations in eastern Siberia. Perhaps we shall find the chopper-tool tradition @@ -2846,13 +2846,13 @@ THE NEW ERA Perhaps it comes in part from the way I read the evidence and perhaps in part it is only intuition, but I feel that the materials of this chapter suggest a new era in the ways of life. Before about 40,000 -years ago, people simply gathered their food, wandering over large +years ago, people simply �gathered� their food, wandering over large areas to scavenge or to hunt in a simple sort of way. But here we -have seen them settling-in more, perhaps restricting themselves in +have seen them �settling-in� more, perhaps restricting themselves in their wanderings and adapting themselves to a given locality in more intensive ways. This intensification might be suggested by the word -collecting. The ways of life we described in the earlier chapters -were food-gathering ways, but now an era of food-collecting has +�collecting.� The ways of life we described in the earlier chapters +were �food-gathering� ways, but now an era of �food-collecting� has begun. We shall see further intensifications of it in the next chapter. @@ -2883,8 +2883,8 @@ The last great glaciation of the Ice Age was a two-part affair, with a sub-phase at the end of the second part. In Europe the last sub-phase of this glaciation commenced somewhere around 15,000 years ago. Then the glaciers began to melt back, for the last time. Remember that -Professor Antevs (p. 19) isnt sure the Ice Age is over yet! This -melting sometimes went by fits and starts, and the weather wasnt +Professor Antevs (p. 19) isn�t sure the Ice Age is over yet! This +melting sometimes went by fits and starts, and the weather wasn�t always changing for the better; but there was at least one time when European weather was even better than it is now. @@ -2927,16 +2927,16 @@ Sweden. Much of this north European material comes from bogs and swamps where it had become water-logged and has kept very well. Thus we have much more complete _assemblages_[4] than for any time earlier. - [4] Assemblage is a useful word when there are different kinds of + [4] �Assemblage� is a useful word when there are different kinds of archeological materials belonging together, from one area and of - one time. An assemblage is made up of a number of industries + one time. An assemblage is made up of a number of �industries� (that is, all the tools in chipped stone, all the tools in bone, all the tools in wood, the traces of houses, etc.) and everything else that manages to survive, such as the art, the burials, the bones of the animals used as food, and the traces of plant foods; in fact, everything that has been left to us and can be used to help reconstruct the lives of the people to - whom it once belonged. Our own present-day assemblage would be + whom it once belonged. Our own present-day �assemblage� would be the sum total of all the objects in our mail-order catalogues, department stores and supply houses of every sort, our churches, our art galleries and other buildings, together with our roads, @@ -2976,7 +2976,7 @@ found. It seems likely that the Maglemosian bog finds are remains of summer camps, and that in winter the people moved to higher and drier regions. -Childe calls them the Forest folk; they probably lived much the +Childe calls them the �Forest folk�; they probably lived much the same sort of life as did our pre-agricultural Indians of the north central states. They hunted small game or deer; they did a great deal of fishing; they collected what plant food they could find. In fact, @@ -3010,7 +3010,7 @@ South of the north European belt the hunting-food-collecting peoples were living on as best they could during this time. One interesting group, which seems to have kept to the regions of sandy soil and scrub forest, made great quantities of geometric microliths. These are the -materials called _Tardenoisian_. The materials of the Forest folk of +materials called _Tardenoisian_. The materials of the �Forest folk� of France and central Europe generally are called _Azilian_; Dr. Movius believes the term might best be restricted to the area south of the Loire River. @@ -3032,24 +3032,24 @@ to it than this. Professor Mathiassen of Copenhagen, who knows the archeological remains of this time very well, poses a question. He speaks of the material -as being neither rich nor progressive, in fact rather stagnant, but -he goes on to add that the people had a certain receptiveness and +as being neither rich nor progressive, in fact �rather stagnant,� but +he goes on to add that the people had a certain �receptiveness� and were able to adapt themselves quickly when the next change did come. -My own understanding of the situation is that the Forest folk made +My own understanding of the situation is that the �Forest folk� made nothing as spectacular as had the producers of the earlier Magdalenian assemblage and the Franco-Cantabrian art. On the other hand, they _seem_ to have been making many more different kinds of tools for many more different kinds of tasks than had their Ice Age forerunners. I -emphasize seem because the preservation in the Maglemosian bogs +emphasize �seem� because the preservation in the Maglemosian bogs is very complete; certainly we cannot list anywhere near as many different things for earlier times as we did for the Maglemosians (p. 94). I believe this experimentation with all kinds of new tools and gadgets, this intensification of adaptiveness (p. 91), this -receptiveness, even if it is still only pointed toward hunting, +�receptiveness,� even if it is still only pointed toward hunting, fishing, and food-collecting, is an important thing. Remember that the only marker we have handy for the _beginning_ of -this tendency toward receptiveness and experimentation is the +this tendency toward �receptiveness� and experimentation is the little microlithic blade tools of various geometric forms. These, we saw, began before the last ice had melted away, and they lasted on in use for a very long time. I wish there were a better marker than @@ -3063,7 +3063,7 @@ CHANGES IN OTHER AREAS? All this last section was about Europe. How about the rest of the world when the last glaciers were melting away? -We simply dont know much about this particular time in other parts +We simply don�t know much about this particular time in other parts of the world except in Europe, the Mediterranean basin and the Middle East. People were certainly continuing to move into the New World by way of Siberia and the Bering Strait about this time. But for the @@ -3075,10 +3075,10 @@ clear information. REAL CHANGE AND PRELUDE IN THE NEAR EAST The appearance of the microliths and the developments made by the -Forest folk of northwestern Europe also mark an end. They show us +�Forest folk� of northwestern Europe also mark an end. They show us the terminal phase of the old food-collecting way of life. It grows increasingly clear that at about the same time that the Maglemosian and -other Forest folk were adapting themselves to hunting, fishing, and +other �Forest folk� were adapting themselves to hunting, fishing, and collecting in new ways to fit the post-glacial environment, something completely new was being made ready in western Asia. @@ -3098,7 +3098,7 @@ simply gathering or collecting it. When their food-production became reasonably effective, people could and did settle down in village-farming communities. With the appearance of the little farming villages, a new way of life was actually under way. Professor Childe -has good reason to speak of the food-producing revolution, for it was +has good reason to speak of the �food-producing revolution,� for it was indeed a revolution. @@ -3117,8 +3117,8 @@ before the _how_ and _why_ answers begin to appear. Anthropologically trained archeologists are fascinated with the cultures of men in times of great change. About ten or twelve thousand years ago, the general level of culture in many parts of the world seems to have been ready -for change. In northwestern Europe, we saw that cultures changed -just enough so that they would not have to change. We linked this to +for change. In northwestern Europe, we saw that cultures �changed +just enough so that they would not have to change.� We linked this to environmental changes with the coming of post-glacial times. In western Asia, we archeologists can prove that the food-producing @@ -3155,7 +3155,7 @@ living as the Maglemosians did? These are the questions we still have to face. -CULTURAL RECEPTIVENESS AND PROMISING ENVIRONMENTS +CULTURAL �RECEPTIVENESS� AND PROMISING ENVIRONMENTS Until the archeologists and the natural scientists--botanists, geologists, zoologists, and general ecologists--have spent many more @@ -3163,15 +3163,15 @@ years on the problem, we shall not have full _how_ and _why_ answers. I do think, however, that we are beginning to understand what to look for. We shall have to learn much more of what makes the cultures of men -receptive and experimental. Did change in the environment alone -force it? Was it simply a case of Professor Toynbees challenge and -response? I cannot believe the answer is quite that simple. Were it -so simple, we should want to know why the change hadnt come earlier, +�receptive� and experimental. Did change in the environment alone +force it? Was it simply a case of Professor Toynbee�s �challenge and +response?� I cannot believe the answer is quite that simple. Were it +so simple, we should want to know why the change hadn�t come earlier, along with earlier environmental changes. We shall not know the answer, however, until we have excavated the traces of many more cultures of the time in question. We shall doubtless also have to learn more about, and think imaginatively about, the simpler cultures still left today. -The mechanics of culture in general will be bound to interest us. +The �mechanics� of culture in general will be bound to interest us. It will also be necessary to learn much more of the environments of 10,000 to 12,000 years ago. In which regions of the world were the @@ -3228,7 +3228,7 @@ THE OLD THEORY TOO SIMPLE FOR THE FACTS This theory was set up before we really knew anything in detail about the later prehistory of the Near and Middle East. We now know that -the facts which have been found dont fit the old theory at all well. +the facts which have been found don�t fit the old theory at all well. Also, I have yet to find an American meteorologist who feels that we know enough about the changes in the weather pattern to say that it can have been so simple and direct. And, of course, the glacial ice which @@ -3238,7 +3238,7 @@ of great alpine glaciers, and long periods of warm weather in between. If the rain belt moved north as the glaciers melted for the last time, it must have moved in the same direction in earlier times. Thus, the forced neighborliness of men, plants, and animals in river valleys and -oases must also have happened earlier. Why didnt domestication happen +oases must also have happened earlier. Why didn�t domestication happen earlier, then? Furthermore, it does not seem to be in the oases and river valleys @@ -3275,20 +3275,20 @@ archeologists, probably through habit, favor an old scheme of Grecized names for the subdivisions: paleolithic, mesolithic, neolithic. I refuse to use these words myself. They have meant too many different things to too many different people and have tended to hide some pretty -fuzzy thinking. Probably you havent even noticed my own scheme of -subdivision up to now, but Id better tell you in general what it is. +fuzzy thinking. Probably you haven�t even noticed my own scheme of +subdivision up to now, but I�d better tell you in general what it is. I think of the earliest great group of archeological materials, from which we can deduce only a food-gathering way of culture, as the -_food-gathering stage_. I say stage rather than age, because it +_food-gathering stage_. I say �stage� rather than �age,� because it is not quite over yet; there are still a few primitive people in out-of-the-way parts of the world who remain in the _food-gathering stage_. In fact, Professor Julian Steward would probably prefer to call it a food-gathering _level_ of existence, rather than a stage. This would be perfectly acceptable to me. I also tend to find myself using _collecting_, rather than _gathering_, for the more recent aspects or -era of the stage, as the word collecting appears to have more sense -of purposefulness and specialization than does gathering (see p. +era of the stage, as the word �collecting� appears to have more sense +of purposefulness and specialization than does �gathering� (see p. 91). Now, while I think we could make several possible subdivisions of the @@ -3297,22 +3297,22 @@ believe the only one which means much to us here is the last or _terminal sub-era of food-collecting_ of the whole food-gathering stage. The microliths seem to mark its approach in the northwestern part of the Old World. It is really shown best in the Old World by -the materials of the Forest folk, the cultural adaptation to the +the materials of the �Forest folk,� the cultural adaptation to the post-glacial environment in northwestern Europe. We talked about -the Forest folk at the beginning of this chapter, and I used the +the �Forest folk� at the beginning of this chapter, and I used the Maglemosian assemblage of Denmark as an example. [5] It is difficult to find words which have a sequence or gradation of meaning with respect to both development and a range of time in the past, or with a range of time from somewhere in the past which is perhaps not yet ended. One standard Webster definition - of _stage_ is: One of the steps into which the material - development of man ... is divided. I cannot find any dictionary + of _stage_ is: �One of the steps into which the material + development of man ... is divided.� I cannot find any dictionary definition that suggests which of the words, _stage_ or _era_, has the meaning of a longer span of time. Therefore, I have chosen to let my eras be shorter, and to subdivide my stages - into eras. Webster gives _era_ as: A signal stage of history, - an epoch. When I want to subdivide my eras, I find myself using + into eras. Webster gives _era_ as: �A signal stage of history, + an epoch.� When I want to subdivide my eras, I find myself using _sub-eras_. Thus I speak of the _eras_ within a _stage_ and of the _sub-eras_ within an _era_; that is, I do so when I feel that I really have to, and when the evidence is clear enough to @@ -3328,9 +3328,9 @@ realms of culture. It is rather that for most of prehistoric time the materials left to the archeologists tend to limit our deductions to technology and economics. -Im so soon out of my competence, as conventional ancient history +I�m so soon out of my competence, as conventional ancient history begins, that I shall only suggest the earlier eras of the -food-producing stage to you. This book is about prehistory, and Im not +food-producing stage to you. This book is about prehistory, and I�m not a universal historian. @@ -3339,28 +3339,28 @@ THE TWO EARLIEST ERAS OF THE FOOD-PRODUCING STAGE The food-producing stage seems to appear in western Asia with really revolutionary suddenness. It is seen by the relative speed with which the traces of new crafts appear in the earliest village-farming -community sites weve dug. It is seen by the spread and multiplication +community sites we�ve dug. It is seen by the spread and multiplication of these sites themselves, and the remarkable growth in human -population we deduce from this increase in sites. Well look at some +population we deduce from this increase in sites. We�ll look at some of these sites and the archeological traces they yield in the next chapter. When such village sites begin to appear, I believe we are in the _era of the primary village-farming community_. I also believe this is the second era of the food-producing stage. The first era of the food-producing stage, I believe, was an _era of -incipient cultivation and animal domestication_. I keep saying I -believe because the actual evidence for this earlier era is so slight +incipient cultivation and animal domestication_. I keep saying �I +believe� because the actual evidence for this earlier era is so slight that one has to set it up mainly by playing a hunch for it. The reason for playing the hunch goes about as follows. One thing we seem to be able to see, in the food-collecting era in general, is a tendency for people to begin to settle down. This settling down seemed to become further intensified in the terminal -era. How this is connected with Professor Mathiassens receptiveness +era. How this is connected with Professor Mathiassen�s �receptiveness� and the tendency to be experimental, we do not exactly know. The evidence from the New World comes into play here as well as that from the Old World. With this settling down in one place, the people of the -terminal era--especially the Forest folk whom we know best--began +terminal era--especially the �Forest folk� whom we know best--began making a great variety of new things. I remarked about this earlier in the chapter. Dr. Robert M. Adams is of the opinion that this atmosphere of experimentation with new tools--with new ways of collecting food--is @@ -3368,9 +3368,9 @@ the kind of atmosphere in which one might expect trials at planting and at animal domestication to have been made. We first begin to find traces of more permanent life in outdoor camp sites, although caves were still inhabited at the beginning of the terminal era. It is not -surprising at all that the Forest folk had already domesticated the +surprising at all that the �Forest folk� had already domesticated the dog. In this sense, the whole era of food-collecting was becoming ready -and almost incipient for cultivation and animal domestication. +and almost �incipient� for cultivation and animal domestication. Northwestern Europe was not the place for really effective beginnings in agriculture and animal domestication. These would have had to take @@ -3425,13 +3425,13 @@ zone which surrounds the drainage basin of the Tigris and Euphrates Rivers at elevations of from approximately 2,000 to 5,000 feet. The lower alluvial land of the Tigris-Euphrates basin itself has very little rainfall. Some years ago Professor James Henry Breasted called -the alluvial lands of the Tigris-Euphrates a part of the fertile -crescent. These alluvial lands are very fertile if irrigated. Breasted +the alluvial lands of the Tigris-Euphrates a part of the �fertile +crescent.� These alluvial lands are very fertile if irrigated. Breasted was most interested in the oriental civilizations of conventional ancient history, and irrigation had been discovered before they appeared. -The country of hilly flanks above Breasteds crescent receives from +The country of hilly flanks above Breasted�s crescent receives from 10 to 20 or more inches of winter rainfall each year, which is about what Kansas has. Above the hilly-flanks zone tower the peaks and ridges of the Lebanon-Amanus chain bordering the coast-line from Palestine @@ -3440,7 +3440,7 @@ range of the Iraq-Iran borderland. This rugged mountain frame for our hilly-flanks zone rises to some magnificent alpine scenery, with peaks of from ten to fifteen thousand feet in elevation. There are several gaps in the Mediterranean coastal portion of the frame, through which -the winters rain-bearing winds from the sea may break so as to carry +the winter�s rain-bearing winds from the sea may break so as to carry rain to the foothills of the Taurus and the Zagros. The picture I hope you will have from this description is that of an @@ -3482,7 +3482,7 @@ hilly-flanks zone in their wild state. With a single exception--that of the dog--the earliest positive evidence of domestication includes the two forms of wheat, the barley, and the goat. The evidence comes from within the hilly-flanks zone. -However, it comes from a settled village proper, Jarmo (which Ill +However, it comes from a settled village proper, Jarmo (which I�ll describe in the next chapter), and is thus from the era of the primary village-farming community. We are still without positive evidence of domesticated grain and animals in the first era of the food-producing @@ -3534,9 +3534,9 @@ and the spread of ideas of people who had passed on into one of the more developed eras. In many cases, the terminal era of food-collecting was ended by the incoming of the food-producing peoples themselves. For example, the practices of food-production were carried into Europe -by the actual movement of some numbers of peoples (we dont know how +by the actual movement of some numbers of peoples (we don�t know how many) who had reached at least the level of the primary village-farming -community. The Forest folk learned food-production from them. There +community. The �Forest folk� learned food-production from them. There was never an era of incipient cultivation and domestication proper in Europe, if my hunch is right. @@ -3547,16 +3547,16 @@ The way I see it, two things were required in order that an era of incipient cultivation and domestication could begin. First, there had to be the natural environment of a nuclear area, with its whole group of plants and animals capable of domestication. This is the aspect of -the matter which weve said is directly given by nature. But it is +the matter which we�ve said is directly given by nature. But it is quite possible that such an environment with such a group of plants and animals in it may have existed well before ten thousand years ago in the Near East. It is also quite possible that the same promising condition may have existed in regions which never developed into nuclear areas proper. Here, again, we come back to the cultural factor. -I think it was that atmosphere of experimentation weve talked about -once or twice before. I cant define it for you, other than to say that +I think it was that �atmosphere of experimentation� we�ve talked about +once or twice before. I can�t define it for you, other than to say that by the end of the Ice Age, the general level of many cultures was ready -for change. Ask me how and why this was so, and Ill tell you we dont +for change. Ask me how and why this was so, and I�ll tell you we don�t know yet, and that if we did understand this kind of question, there would be no need for me to go on being a prehistorian! @@ -3590,7 +3590,7 @@ such collections for the modern wild forms of animals and plants from some of our nuclear areas. In the nuclear area in the Near East, some of the wild animals, at least, have already become extinct. There are no longer wild cattle or wild horses in western Asia. We know they were -there from the finds weve made in caves of late Ice Age times, and +there from the finds we�ve made in caves of late Ice Age times, and from some slightly later sites. @@ -3601,7 +3601,7 @@ incipient era of cultivation and animal domestication. I am closing this chapter with descriptions of two of the best Near Eastern examples I know of. You may not be satisfied that what I am able to describe makes a full-bodied era of development at all. Remember, however, that -Ive told you Im largely playing a kind of a hunch, and also that the +I�ve told you I�m largely playing a kind of a hunch, and also that the archeological materials of this era will always be extremely difficult to interpret. At the beginning of any new way of life, there will be a great tendency for people to make-do, at first, with tools and habits @@ -3613,7 +3613,7 @@ THE NATUFIAN, AN ASSEMBLAGE OF THE INCIPIENT ERA The assemblage called the Natufian comes from the upper layers of a number of caves in Palestine. Traces of its flint industry have also -turned up in Syria and Lebanon. We dont know just how old it is. I +turned up in Syria and Lebanon. We don�t know just how old it is. I guess that it probably falls within five hundred years either way of about 5000 B.C. @@ -3662,7 +3662,7 @@ pendants. There were also beads and pendants of pierced teeth and shell. A number of Natufian burials have been found in the caves; some burials were grouped together in one grave. The people who were buried within the Mount Carmel cave were laid on their backs in an extended position, -while those on the terrace seem to have been flexed (placed in their +while those on the terrace seem to have been �flexed� (placed in their graves in a curled-up position). This may mean no more than that it was easier to dig a long hole in cave dirt than in the hard-packed dirt of the terrace. The people often had some kind of object buried with them, @@ -3679,7 +3679,7 @@ beads. GROUND STONE BONE] -The animal bones of the Natufian layers show beasts of a modern type, +The animal bones of the Natufian layers show beasts of a �modern� type, but with some differences from those of present-day Palestine. The bones of the gazelle far outnumber those of the deer; since gazelles like a much drier climate than deer, Palestine must then have had much @@ -3692,9 +3692,9 @@ Maglemosian of northern Europe. More recently, it has been reported that a domesticated goat is also part of the Natufian finds. The study of the human bones from the Natufian burials is not yet -complete. Until Professor McCowns study becomes available, we may note -Professor Coons assessment that these people were of a basically -Mediterranean type. +complete. Until Professor McCown�s study becomes available, we may note +Professor Coon�s assessment that these people were of a �basically +Mediterranean type.� THE KARIM SHAHIR ASSEMBLAGE @@ -3704,11 +3704,11 @@ of a temporary open site or encampment. It lies on the top of a bluff in the Kurdish hill-country of northeastern Iraq. It was dug by Dr. Bruce Howe of the expedition I directed in 1950-51 for the Oriental Institute and the American Schools of Oriental Research. In 1954-55, -our expedition located another site, Mlefaat, with general resemblance +our expedition located another site, M�lefaat, with general resemblance to Karim Shahir, but about a hundred miles north of it. In 1956, Dr. Ralph Solecki located still another Karim Shahir type of site called Zawi Chemi Shanidar. The Zawi Chemi site has a radiocarbon date of 8900 - 300 B.C. +� 300 B.C. Karim Shahir has evidence of only one very shallow level of occupation. It was probably not lived on very long, although the people who lived @@ -3717,7 +3717,7 @@ layer yielded great numbers of fist-sized cracked pieces of limestone, which had been carried up from the bed of a stream at the bottom of the bluff. We think these cracked stones had something to do with a kind of architecture, but we were unable to find positive traces of hut plans. -At Mlefaat and Zawi Chemi, there were traces of rounded hut plans. +At M�lefaat and Zawi Chemi, there were traces of rounded hut plans. As in the Natufian, the great bulk of small objects of the Karim Shahir assemblage was in chipped flint. A large proportion of the flint tools @@ -3737,7 +3737,7 @@ clay figurines which seemed to be of animal form. UNBAKED CLAY SHELL BONE - ARCHITECTURE] + �ARCHITECTURE�] Karim Shahir did not yield direct evidence of the kind of vegetable food its people ate. The animal bones showed a considerable @@ -3746,7 +3746,7 @@ domestication--sheep, goat, cattle, horse, dog--as compared with animal bones from the earlier cave sites of the area, which have a high proportion of bones of wild forms like deer and gazelle. But we do not know that any of the Karim Shahir animals were actually domesticated. -Some of them may have been, in an incipient way, but we have no means +Some of them may have been, in an �incipient� way, but we have no means at the moment that will tell us from the bones alone. @@ -3761,7 +3761,7 @@ goat, and the general animal situation at Karim Shahir to hint at an incipient approach to food-production. At Karim Shahir, there was the tendency to settle down out in the open; this is echoed by the new reports of open air Natufian sites. The large number of cracked stones -certainly indicates that it was worth the peoples while to have some +certainly indicates that it was worth the peoples� while to have some kind of structure, even if the site as a whole was short-lived. It is a part of my hunch that these things all point toward @@ -3771,13 +3771,13 @@ which we shall look at next, are fully food-producing, the Natufian and Karim Shahir folk had not yet arrived. I think they were part of a general build-up to full scale food-production. They were possibly controlling a few animals of several kinds and perhaps one or two -plants, without realizing the full possibilities of this control as a +plants, without realizing the full possibilities of this �control� as a new way of life. This is why I think of the Karim Shahir and Natufian folk as being at a level, or in an era, of incipient cultivation and domestication. But we shall have to do a great deal more excavation in this range of time -before well get the kind of positive information we need. +before we�ll get the kind of positive information we need. SUMMARY @@ -3798,7 +3798,7 @@ history. We know the earliest village-farming communities appeared in western Asia, in a nuclear area. We do not yet know why the Near Eastern -experiment came first, or why it didnt happen earlier in some other +experiment came first, or why it didn�t happen earlier in some other nuclear area. Apparently, the level of culture and the promise of the natural environment were ready first in western Asia. The next sites we look at will show a simple but effective food-production already @@ -3835,7 +3835,7 @@ contrast between food-collecting and food-producing as ways of life. THE DIFFERENCE BETWEEN FOOD-COLLECTORS AND FOOD-PRODUCERS -Childe used the word revolution because of the radical change that +Childe used the word �revolution� because of the radical change that took place in the habits and customs of man. Food-collectors--that is, hunters, fishers, berry- and nut-gatherers--had to live in small groups or bands, for they had to be ready to move wherever their food supply @@ -3851,7 +3851,7 @@ for clothing beyond the tools that were probably used to dress the skins of animals; no time to think of much of anything but food and protection and disposal of the dead when death did come: an existence which takes nature as it finds it, which does little or nothing to -modify nature--all in all, a savages existence, and a very tough one. +modify nature--all in all, a savage�s existence, and a very tough one. A man who spends his whole life following animals just to kill them to eat, or moving from one berry patch to another, is really living just like an animal himself. @@ -3859,10 +3859,10 @@ like an animal himself. THE FOOD-PRODUCING ECONOMY -Against this picture let me try to draw another--that of mans life -after food-production had begun. His meat was stored on the hoof, +Against this picture let me try to draw another--that of man�s life +after food-production had begun. His meat was stored �on the hoof,� his grain in silos or great pottery jars. He lived in a house: it was -worth his while to build one, because he couldnt move far from his +worth his while to build one, because he couldn�t move far from his fields and flocks. In his neighborhood enough food could be grown and enough animals bred so that many people were kept busy. They all lived close to their flocks and fields, in a village. The village was @@ -3872,7 +3872,7 @@ Children and old men could shepherd the animals by day or help with the lighter work in the fields. After the crops had been harvested the younger men might go hunting and some of them would fish, but the food they brought in was only an addition to the food in the village; the -villagers wouldnt starve, even if the hunters and fishermen came home +villagers wouldn�t starve, even if the hunters and fishermen came home empty-handed. There was more time to do different things, too. They began to modify @@ -3885,23 +3885,23 @@ people in the village who were becoming full-time craftsmen. Other things were changing, too. The villagers must have had to agree on new rules for living together. The head man of the village had problems different from those of the chief of the small -food-collectors band. If somebodys flock of sheep spoiled a wheat +food-collectors� band. If somebody�s flock of sheep spoiled a wheat field, the owner wanted payment for the grain he lost. The chief of the hunters was never bothered with such questions. Even the gods had changed. The spirits and the magic that had been used by hunters -werent of any use to the villagers. They needed gods who would watch +weren�t of any use to the villagers. They needed gods who would watch over the fields and the flocks, and they eventually began to erect buildings where their gods might dwell, and where the men who knew most about the gods might live. -WAS FOOD-PRODUCTION A REVOLUTION? +WAS FOOD-PRODUCTION A �REVOLUTION�? If you can see the difference between these two pictures--between life in the food-collecting stage and life after food-production -had begun--youll see why Professor Childe speaks of a revolution. -By revolution, he doesnt mean that it happened over night or that -it happened only once. We dont know exactly how long it took. Some +had begun--you�ll see why Professor Childe speaks of a revolution. +By revolution, he doesn�t mean that it happened over night or that +it happened only once. We don�t know exactly how long it took. Some people think that all these changes may have occurred in less than 500 years, but I doubt that. The incipient era was probably an affair of some duration. Once the level of the village-farming community had @@ -3915,7 +3915,7 @@ been achieved with truly revolutionary suddenness. GAPS IN OUR KNOWLEDGE OF THE NEAR EAST -If youll look again at the chart (p. 111) youll see that I have +If you�ll look again at the chart (p. 111) you�ll see that I have very few sites and assemblages to name in the incipient era of cultivation and domestication, and not many in the earlier part of the primary village-farming level either. Thanks in no small part @@ -3926,20 +3926,20 @@ yard-stick here. But I am far from being able to show you a series of Sears Roebuck catalogues, even century by century, for any part of the nuclear area. There is still a great deal of earth to move, and a great mass of material to recover and interpret before we even begin to -understand how and why. +understand �how� and �why.� Perhaps here, because this kind of archeology is really my specialty, -youll excuse it if I become personal for a moment. I very much look +you�ll excuse it if I become personal for a moment. I very much look forward to having further part in closing some of the gaps in knowledge -of the Near East. This is not, as Ive told you, the spectacular +of the Near East. This is not, as I�ve told you, the spectacular range of Near Eastern archeology. There are no royal tombs, no gold, no great buildings or sculpture, no writing, in fact nothing to excite the normal museum at all. Nevertheless it is a range which, idea-wise, gives the archeologist tremendous satisfaction. The country of the hilly flanks is an exciting combination of green grasslands and mountainous ridges. The Kurds, who inhabit the part of the area -in which Ive worked most recently, are an extremely interesting and -hospitable people. Archeologists dont become rich, but Ill forego +in which I�ve worked most recently, are an extremely interesting and +hospitable people. Archeologists don�t become rich, but I�ll forego the Cadillac for any bright spring morning in the Kurdish hills, on a good site with a happy crew of workmen and an interested and efficient staff. It is probably impossible to convey the full feeling which life @@ -3965,15 +3965,15 @@ like the use of pottery borrowed from the more developed era of the same time in the nuclear area. The same general explanation doubtless holds true for certain materials in Egypt, along the upper Nile and in the Kharga oasis: these materials, called Sebilian III, the Khartoum -neolithic, and the Khargan microlithic, are from surface sites, +�neolithic,� and the Khargan microlithic, are from surface sites, not from caves. The chart (p. 111) shows where I would place these materials in era and time. [Illustration: THE HILLY FLANKS OF THE CRESCENT AND EARLY SITES OF THE NEAR EAST] -Both Mlefaat and Dr. Soleckis Zawi Chemi Shanidar site appear to have -been slightly more settled in than was Karim Shahir itself. But I do +Both M�lefaat and Dr. Solecki�s Zawi Chemi Shanidar site appear to have +been slightly more �settled in� than was Karim Shahir itself. But I do not think they belong to the era of farming-villages proper. The first site of this era, in the hills of Iraqi Kurdistan, is Jarmo, on which we have spent three seasons of work. Following Jarmo comes a variety of @@ -3989,9 +3989,9 @@ times when their various cultures flourished, there must have been many little villages which shared the same general assemblage. We are only now beginning to locate them again. Thus, if I speak of Jarmo, or Jericho, or Sialk as single examples of their particular kinds of -assemblages, I dont mean that they were unique at all. I think I could +assemblages, I don�t mean that they were unique at all. I think I could take you to the sites of at least three more Jarmos, within twenty -miles of the original one. They are there, but they simply havent yet +miles of the original one. They are there, but they simply haven�t yet been excavated. In 1956, a Danish expedition discovered material of Jarmo type at Shimshara, only two dozen miles northeast of Jarmo, and below an assemblage of Hassunan type (which I shall describe presently). @@ -4000,15 +4000,15 @@ below an assemblage of Hassunan type (which I shall describe presently). THE GAP BETWEEN KARIM SHAHIR AND JARMO As we see the matter now, there is probably still a gap in the -available archeological record between the Karim Shahir-Mlefaat-Zawi +available archeological record between the Karim Shahir-M�lefaat-Zawi Chemi group (of the incipient era) and that of Jarmo (of the village-farming era). Although some items of the Jarmo type materials do reflect the beginnings of traditions set in the Karim Shahir group (see p. 120), there is not a clear continuity. Moreover--to the degree that we may trust a few radiocarbon dates--there would appear to be around two thousand years of difference in time. The single -available Zawi Chemi date is 8900 300 B.C.; the most reasonable -group of dates from Jarmo average to about 6750 200 B.C. I am +available Zawi Chemi �date� is 8900 � 300 B.C.; the most reasonable +group of �dates� from Jarmo average to about 6750 � 200 B.C. I am uncertain about this two thousand years--I do not think it can have been so long. @@ -4021,7 +4021,7 @@ JARMO, IN THE KURDISH HILLS, IRAQ The site of Jarmo has a depth of deposit of about twenty-seven feet, and approximately a dozen layers of architectural renovation and -change. Nevertheless it is a one period site: its assemblage remains +change. Nevertheless it is a �one period� site: its assemblage remains essentially the same throughout, although one or two new items are added in later levels. It covers about four acres of the top of a bluff, below which runs a small stream. Jarmo lies in the hill country @@ -4078,7 +4078,7 @@ human beings in clay; one type of human figurine they favored was that of a markedly pregnant woman, probably the expression of some sort of fertility spirit. They provided their house floors with baked-in-place depressions, either as basins or hearths, and later with domed ovens of -clay. As weve noted, the houses themselves were of clay or mud; one +clay. As we�ve noted, the houses themselves were of clay or mud; one could almost say they were built up like a house-sized pot. Then, finally, the idea of making portable pottery itself appeared, although I very much doubt that the people of the Jarmo village discovered the @@ -4095,11 +4095,11 @@ over three hundred miles to the north. Already a bulk carrying trade had been established--the forerunner of commerce--and the routes were set by which, in later times, the metal trade was to move. -There are now twelve radioactive carbon dates from Jarmo. The most -reasonable cluster of determinations averages to about 6750 200 -B.C., although there is a completely unreasonable range of dates +There are now twelve radioactive carbon �dates� from Jarmo. The most +reasonable cluster of determinations averages to about 6750 � 200 +B.C., although there is a completely unreasonable range of �dates� running from 3250 to 9250 B.C.! _If_ I am right in what I take to be -reasonable, the first flush of the food-producing revolution had been +�reasonable,� the first flush of the food-producing revolution had been achieved almost nine thousand years ago. @@ -4117,7 +4117,7 @@ it, but the Hassunan sites seem to cluster at slightly lower elevations than those we have been talking about so far. The catalogue of the Hassuna assemblage is of course more full and -elaborate than that of Jarmo. The Iraqi governments archeologists +elaborate than that of Jarmo. The Iraqi government�s archeologists who dug Hassuna itself, exposed evidence of increasing architectural know-how. The walls of houses were still formed of puddled mud; sun-dried bricks appear only in later periods. There were now several @@ -4130,16 +4130,16 @@ largely disappeared by Hassunan times. The flint work of the Hassunan catalogue is, by and large, a wretched affair. We might guess that the kinaesthetic concentration of the Hassuna craftsmen now went into other categories; that is, they suddenly discovered they might have more fun -working with the newer materials. Its a shame, for example, that none +working with the newer materials. It�s a shame, for example, that none of their weaving is preserved for us. The two available radiocarbon determinations from Hassunan contexts -stand at about 5100 and 5600 B.C. 250 years. +stand at about 5100 and 5600 B.C. � 250 years. OTHER EARLY VILLAGE SITES IN THE NUCLEAR AREA -Ill now name and very briefly describe a few of the other early +I�ll now name and very briefly describe a few of the other early village assemblages either in or adjacent to the hilly flanks of the crescent. Unfortunately, we do not have radioactive carbon dates for many of these materials. We may guess that some particular assemblage, @@ -4177,7 +4177,7 @@ ecological niche, some seven hundred feet below sea level; it is geographically within the hilly-flanks zone but environmentally not part of it. -Several radiocarbon dates for Jericho fall within the range of those +Several radiocarbon �dates� for Jericho fall within the range of those I find reasonable for Jarmo, and their internal statistical consistency is far better than that for the Jarmo determinations. It is not yet clear exactly what this means. @@ -4226,7 +4226,7 @@ how things were made are different; the Sialk assemblage represents still another cultural pattern. I suspect it appeared a bit later in time than did that of Hassuna. There is an important new item in the Sialk catalogue. The Sialk people made small drills or pins of -hammered copper. Thus the metallurgists specialized craft had made its +hammered copper. Thus the metallurgist�s specialized craft had made its appearance. There is at least one very early Iranian site on the inward slopes @@ -4246,7 +4246,7 @@ shore of the Fayum lake. The Fayum materials come mainly from grain bins or silos. Another site, Merimde, in the western part of the Nile delta, shows the remains of a true village, but it may be slightly later than the settlement of the Fayum. There are radioactive carbon -dates for the Fayum materials at about 4275 B.C. 320 years, which +�dates� for the Fayum materials at about 4275 B.C. � 320 years, which is almost fifteen hundred years later than the determinations suggested for the Hassunan or Syro-Cilician assemblages. I suspect that this is a somewhat over-extended indication of the time it took for the @@ -4260,13 +4260,13 @@ the mound called Shaheinab. The Shaheinab catalogue roughly corresponds to that of the Fayum; the distance between the two places, as the Nile flows, is roughly 1,500 miles. Thus it took almost a thousand years for the new way of life to be carried as far south into Africa as Khartoum; -the two Shaheinab dates average about 3300 B.C. 400 years. +the two Shaheinab �dates� average about 3300 B.C. � 400 years. If the movement was up the Nile (southward), as these dates suggest, then I suspect that the earliest available village material of middle Egypt, the so-called Tasian, is also later than that of the Fayum. The Tasian materials come from a few graves near a village called Deir -Tasa, and I have an uncomfortable feeling that the Tasian assemblage +Tasa, and I have an uncomfortable feeling that the Tasian �assemblage� may be mainly an artificial selection of poor examples of objects which belong in the following range of time. @@ -4280,7 +4280,7 @@ spread outward in space from the nuclear area, as time went on. There is good archeological evidence that both these processes took place. For the hill country of northeastern Iraq, in the nuclear area, we have already noticed how the succession (still with gaps) from Karim -Shahir, through Mlefaat and Jarmo, to Hassuna can be charted (see +Shahir, through M�lefaat and Jarmo, to Hassuna can be charted (see chart, p. 111). In the next chapter, we shall continue this charting and description of what happened in Iraq upward through time. We also watched traces of the new way of life move through space up the Nile @@ -4299,7 +4299,7 @@ appearance of the village-farming community there--is still an open one. In the last chapter, we noted the probability of an independent nuclear area in southeastern Asia. Professor Carl Sauer strongly champions the great importance of this area as _the_ original center -of agricultural pursuits, as a kind of cradle of all incipient eras +of agricultural pursuits, as a kind of �cradle� of all incipient eras of the Old World at least. While there is certainly not the slightest archeological evidence to allow us to go that far, we may easily expect that an early southeast Asian development would have been felt in @@ -4311,13 +4311,13 @@ way of life moved well beyond Khartoum in Africa. THE SPREAD OF THE VILLAGE-FARMING COMMUNITY WAY OF LIFE INTO EUROPE -How about Europe? I wont give you many details. You can easily imagine +How about Europe? I won�t give you many details. You can easily imagine that the late prehistoric prelude to European history is a complicated affair. We all know very well how complicated an area Europe is now, with its welter of different languages and cultures. Remember, however, that a great deal of archeology has been done on the late prehistory of Europe, and very little on that of further Asia and Africa. If we knew -as much about these areas as we do of Europe, I expect wed find them +as much about these areas as we do of Europe, I expect we�d find them just as complicated. This much is clear for Europe, as far as the spread of the @@ -4329,21 +4329,21 @@ in western Asia. I do not, of course, mean that there were traveling salesmen who carried these ideas and things to Europe with a commercial gleam in their eyes. The process took time, and the ideas and things must have been passed on from one group of people to the next. There -was also some actual movement of peoples, but we dont know the size of +was also some actual movement of peoples, but we don�t know the size of the groups that moved. -The story of the colonization of Europe by the first farmers is +The story of the �colonization� of Europe by the first farmers is thus one of (1) the movement from the eastern Mediterranean lands of some people who were farmers; (2) the spread of ideas and things beyond the Near East itself and beyond the paths along which the -colonists moved; and (3) the adaptations of the ideas and things -by the indigenous Forest folk, about whose receptiveness Professor +�colonists� moved; and (3) the adaptations of the ideas and things +by the indigenous �Forest folk�, about whose �receptiveness� Professor Mathiassen speaks (p. 97). It is important to note that the resulting cultures in the new European environment were European, not Near -Eastern. The late Professor Childe remarked that the peoples of the +Eastern. The late Professor Childe remarked that �the peoples of the West were not slavish imitators; they adapted the gifts from the East ... into a new and organic whole capable of developing on its own -original lines. +original lines.� THE WAYS TO EUROPE @@ -4389,19 +4389,19 @@ Hill, the earliest known trace of village-farming communities in England, is about 2500 B.C. I would expect about 5500 B.C. to be a safe date to give for the well-developed early village communities of Syro-Cilicia. We suspect that the spread throughout Europe did not -proceed at an even rate. Professor Piggott writes that at a date +proceed at an even rate. Professor Piggott writes that �at a date probably about 2600 B.C., simple agricultural communities were being established in Spain and southern France, and from the latter region a spread northwards can be traced ... from points on the French seaboard of the [English] Channel ... there were emigrations of a certain number of these tribes by boat, across to the chalk lands of Wessex and Sussex [in England], probably not more than three or four generations later -than the formation of the south French colonies. +than the formation of the south French colonies.� New radiocarbon determinations are becoming available all the time--already several suggest that the food-producing way of life had reached the lower Rhine and Holland by 4000 B.C. But not all -prehistorians accept these dates, so I do not show them on my map +prehistorians accept these �dates,� so I do not show them on my map (p. 139). @@ -4427,7 +4427,7 @@ concentric sets of banks and ditches. Traces of oblong timber houses have been found, but not within the enclosures. The second type of structure is mine-shafts, dug down into the chalk beds where good flint for the making of axes or hoes could be found. The third type -of structure is long simple mounds or unchambered barrows, in one +of structure is long simple mounds or �unchambered barrows,� in one end of which burials were made. It has been commonly believed that the Windmill Hill assemblage belonged entirely to the cultural tradition which moved up through France to the Channel. Professor Piggott is now @@ -4443,12 +4443,12 @@ consists mainly of tombs and the contents of tombs, with only very rare settlement sites. The tombs were of some size and received the bodies of many people. The tombs themselves were built of stone, heaped over with earth; the stones enclosed a passage to a central chamber -(passage graves), or to a simple long gallery, along the sides of -which the bodies were laid (gallery graves). The general type of -construction is called megalithic (= great stone), and the whole +(�passage graves�), or to a simple long gallery, along the sides of +which the bodies were laid (�gallery graves�). The general type of +construction is called �megalithic� (= great stone), and the whole earth-mounded structure is often called a _barrow_. Since many have -proper chambers, in one sense or another, we used the term unchambered -barrow above to distinguish those of the Windmill Hill type from these +proper chambers, in one sense or another, we used the term �unchambered +barrow� above to distinguish those of the Windmill Hill type from these megalithic structures. There is some evidence for sacrifice, libations, and ceremonial fires, and it is clear that some form of community ritual was focused on the megalithic tombs. @@ -4466,7 +4466,7 @@ The third early British group of antiquities of this general time It is not so certain that the people who made this assemblage, called Peterborough, were actually farmers. While they may on occasion have practiced a simple agriculture, many items of their assemblage link -them closely with that of the Forest folk of earlier times in +them closely with that of the �Forest folk� of earlier times in England and in the Baltic countries. Their pottery is decorated with impressions of cords and is quite different from that of Windmill Hill and the megalithic builders. In addition, the distribution of their @@ -4479,7 +4479,7 @@ to acquire the raw material for stone axes. A probably slightly later culture, whose traces are best known from Skara Brae on Orkney, also had its roots in those cultures of the -Baltic area which fused out of the meeting of the Forest folk and +Baltic area which fused out of the meeting of the �Forest folk� and the peoples who took the eastern way into Europe. Skara Brae is very well preserved, having been built of thin stone slabs about which dune-sand drifted after the village died. The individual houses, the @@ -4498,14 +4498,14 @@ details which I have omitted in order to shorten the story. I believe some of the difficulty we have in understanding the establishment of the first farming communities in Europe is with -the word colonization. We have a natural tendency to think of -colonization as it has happened within the last few centuries. In the +the word �colonization.� We have a natural tendency to think of +�colonization� as it has happened within the last few centuries. In the case of the colonization of the Americas, for example, the colonists came relatively quickly, and in increasingly vast numbers. They had vastly superior technical, political, and war-making skills, compared with those of the Indians. There was not much mixing with the Indians. The case in Europe five or six thousand years ago must have been very -different. I wonder if it is even proper to call people colonists +different. I wonder if it is even proper to call people �colonists� who move some miles to a new region, settle down and farm it for some years, then move on again, generation after generation? The ideas and the things which these new people carried were only _potentially_ @@ -4521,12 +4521,12 @@ migrants were moving by boat, long distances may have been covered in a short time. Remember, however, we seem to have about three thousand years between the early Syro-Cilician villages and Windmill Hill. -Let me repeat Professor Childe again. The peoples of the West were +Let me repeat Professor Childe again. �The peoples of the West were not slavish imitators: they adapted the gifts from the East ... into a new and organic whole capable of developing on its own original -lines. Childe is of course completely conscious of the fact that his -peoples of the West were in part the descendants of migrants who came -originally from the East, bringing their gifts with them. This +lines.� Childe is of course completely conscious of the fact that his +�peoples of the West� were in part the descendants of migrants who came +originally from the �East,� bringing their �gifts� with them. This was the late prehistoric achievement of Europe--to take new ideas and things and some migrant peoples and, by mixing them with the old in its own environments, to forge a new and unique series of cultures. @@ -4553,14 +4553,14 @@ things first happened there and also because I know it best. There is another interesting thing, too. We have seen that the first experiment in village-farming took place in the Near East. So did -the first experiment in civilization. Both experiments took. The +the first experiment in civilization. Both experiments �took.� The traditions we live by today are based, ultimately, on those ancient beginnings in food-production and civilization in the Near East. -WHAT CIVILIZATION MEANS +WHAT �CIVILIZATION� MEANS -I shall not try to define civilization for you; rather, I shall +I shall not try to define �civilization� for you; rather, I shall tell you what the word brings to my mind. To me civilization means urbanization: the fact that there are cities. It means a formal political set-up--that there are kings or governing bodies that the @@ -4606,7 +4606,7 @@ of Mexico, the Mayas of Yucatan and Guatemala, and the Incas of the Andes were civilized. -WHY DIDNT CIVILIZATION COME TO ALL FOOD-PRODUCERS? +WHY DIDN�T CIVILIZATION COME TO ALL FOOD-PRODUCERS? Once you have food-production, even at the well-advanced level of the village-farming community, what else has to happen before you @@ -4625,13 +4625,13 @@ early civilization, is still an open and very interesting question. WHERE CIVILIZATION FIRST APPEARED IN THE NEAR EAST You remember that our earliest village-farming communities lay along -the hilly flanks of a great crescent. (See map on p. 125.) -Professor Breasteds fertile crescent emphasized the rich river +the hilly flanks of a great �crescent.� (See map on p. 125.) +Professor Breasted�s �fertile crescent� emphasized the rich river valleys of the Nile and the Tigris-Euphrates Rivers. Our hilly-flanks area of the crescent zone arches up from Egypt through Palestine and Syria, along southern Turkey into northern Iraq, and down along the southwestern fringe of Iran. The earliest food-producing villages we -know already existed in this area by about 6750 B.C. ( 200 years). +know already existed in this area by about 6750 B.C. (� 200 years). Now notice that this hilly-flanks zone does not include southern Mesopotamia, the alluvial land of the lower Tigris and Euphrates in @@ -4639,7 +4639,7 @@ Iraq, or the Nile Valley proper. The earliest known villages of classic Mesopotamia and Egypt seem to appear fifteen hundred or more years after those of the hilly-flanks zone. For example, the early Fayum village which lies near a lake west of the Nile Valley proper (see p. -135) has a radiocarbon date of 4275 B.C. 320 years. It was in the +135) has a radiocarbon date of 4275 B.C. � 320 years. It was in the river lands, however, that the immediate beginnings of civilization were made. @@ -4657,8 +4657,8 @@ THE HILLY-FLANKS ZONE VERSUS THE RIVER LANDS Why did these two civilizations spring up in these two river lands which apparently were not even part of the area where the -village-farming community began? Why didnt we have the first -civilizations in Palestine, Syria, north Iraq, or Iran, where were +village-farming community began? Why didn�t we have the first +civilizations in Palestine, Syria, north Iraq, or Iran, where we�re sure food-production had had a long time to develop? I think the probable answer gives a clue to the ways in which civilization began in Egypt and Mesopotamia. @@ -4669,7 +4669,7 @@ and Syria. There are pleasant mountain slopes, streams running out to the sea, and rain, at least in the winter months. The rain belt and the foothills of the Turkish mountains also extend to northern Iraq and on to the Iranian plateau. The Iranian plateau has its mountain valleys, -streams, and some rain. These hilly flanks of the crescent, through +streams, and some rain. These hilly flanks of the �crescent,� through most of its arc, are almost made-to-order for beginning farmers. The grassy slopes of the higher hills would be pasture for their herds and flocks. As soon as the earliest experiments with agriculture and @@ -4720,10 +4720,10 @@ Obviously, we can no longer find the first dikes or reservoirs of the Nile Valley, or the first canals or ditches of Mesopotamia. The same land has been lived on far too long for any traces of the first attempts to be left; or, especially in Egypt, it has been covered by -the yearly deposits of silt, dropped by the river floods. But were +the yearly deposits of silt, dropped by the river floods. But we�re pretty sure the first food-producers of Egypt and southern Mesopotamia must have made such dikes, canals, and ditches. In the first place, -there cant have been enough rain for them to grow things otherwise. +there can�t have been enough rain for them to grow things otherwise. In the second place, the patterns for such projects seem to have been pretty well set by historic times. @@ -4733,10 +4733,10 @@ CONTROL OF THE RIVERS THE BUSINESS OF EVERYONE Here, then, is a _part_ of the reason why civilization grew in Egypt and Mesopotamia first--not in Palestine, Syria, or Iran. In the latter areas, people could manage to produce their food as individuals. It -wasnt too hard; there were rain and some streams, and good pasturage +wasn�t too hard; there were rain and some streams, and good pasturage for the animals even if a crop or two went wrong. In Egypt and Mesopotamia, people had to put in a much greater amount of work, and -this work couldnt be individual work. Whole villages or groups of +this work couldn�t be individual work. Whole villages or groups of people had to turn out to fix dikes or dig ditches. The dikes had to be repaired and the ditches carefully cleared of silt each year, or they would become useless. @@ -4745,7 +4745,7 @@ There also had to be hard and fast rules. The person who lived nearest the ditch or the reservoir must not be allowed to take all the water and leave none for his neighbors. It was not only a business of learning to control the rivers and of making their waters do the -farmers work. It also meant controlling men. But once these men had +farmer�s work. It also meant controlling men. But once these men had managed both kinds of controls, what a wonderful yield they had! The soil was already fertile, and the silt which came in the floods and ditches kept adding fertile soil. @@ -4756,7 +4756,7 @@ THE GERM OF CIVILIZATION IN EGYPT AND MESOPOTAMIA This learning to work together for the common good was the real germ of the Egyptian and the Mesopotamian civilizations. The bare elements of civilization were already there: the need for a governing hand and for -laws to see that the communities work was done and that the water was +laws to see that the communities� work was done and that the water was justly shared. You may object that there is a sort of chicken and egg paradox in this idea. How could the people set up the rules until they had managed to get a way to live, and how could they manage to get a @@ -4781,12 +4781,12 @@ My explanation has been pointed particularly at Egypt and Mesopotamia. I have already told you that the irrigation and water-control part of it does not apply to the development of the Aztecs or the Mayas, or perhaps anybody else. But I think that a fair part of the story of -Egypt and Mesopotamia must be as Ive just told you. +Egypt and Mesopotamia must be as I�ve just told you. I am particularly anxious that you do _not_ understand me to mean that irrigation _caused_ civilization. I am sure it was not that simple at all. For, in fact, a complex and highly engineered irrigation system -proper did not come until later times. Lets say rather that the simple +proper did not come until later times. Let�s say rather that the simple beginnings of irrigation allowed and in fact encouraged a great number of things in the technological, political, social, and moral realms of culture. We do not yet understand what all these things were or how @@ -4842,7 +4842,7 @@ the mound which later became the holy Sumerian city of Eridu, Iraqi archeologists uncovered a handsome painted pottery. Pottery of the same type had been noticed earlier by German archeologists on the surface of a small mound, awash in the spring floods, near the remains of the -Biblical city of Erich (Sumerian = Uruk; Arabic = Warka). This Eridu +Biblical city of Erich (Sumerian = Uruk; Arabic = Warka). This �Eridu� pottery, which is about all we have of the assemblage of the people who once produced it, may be seen as a blend of the Samarran and Halafian painted pottery styles. This may over-simplify the case, but as yet we @@ -4864,7 +4864,7 @@ seems to move into place before the Halaf manifestation is finished, and to blend with it. The Ubaidian assemblage in the south is by far the more spectacular. The development of the temple has been traced at Eridu from a simple little structure to a monumental building some -62 feet long, with a pilaster-decorated faade and an altar in its +62 feet long, with a pilaster-decorated fa�ade and an altar in its central chamber. There is painted Ubaidian pottery, but the style is hurried and somewhat careless and gives the _impression_ of having been a cheap mass-production means of decoration when compared with the @@ -4879,7 +4879,7 @@ turtle-like faces are another item in the southern Ubaidian assemblage. There is a large Ubaid cemetery at Eridu, much of it still awaiting excavation. The few skeletons so far tentatively studied reveal a -completely modern type of Mediterraneanoid; the individuals whom the +completely modern type of �Mediterraneanoid�; the individuals whom the skeletons represent would undoubtedly blend perfectly into the modern population of southern Iraq. What the Ubaidian assemblage says to us is that these people had already adapted themselves and their culture to @@ -4925,7 +4925,7 @@ woven stuffs must have been the mediums of exchange. Over what area did the trading net-work of Ubaid extend? We start with the idea that the Ubaidian assemblage is most richly developed in the south. We assume, I think, correctly, that it represents a cultural flowering of the south. -On the basis of the pottery of the still elusive Eridu immigrants +On the basis of the pottery of the still elusive �Eridu� immigrants who had first followed the rivers into alluvial Mesopotamia, we get the notion that the characteristic painted pottery style of Ubaid was developed in the southland. If this reconstruction is correct @@ -4935,7 +4935,7 @@ assemblage of (and from the southern point of view, _fairly_ pure) Ubaidian material in northern Iraq. The pottery appears all along the Iranian flanks, even well east of the head of the Persian Gulf, and ends in a later and spectacular flourish in an extremely handsome -painted style called the Susa style. Ubaidian pottery has been noted +painted style called the �Susa� style. Ubaidian pottery has been noted up the valleys of both of the great rivers, well north of the Iraqi and Syrian borders on the southern flanks of the Anatolian plateau. It reaches the Mediterranean Sea and the valley of the Orontes in @@ -4965,10 +4965,10 @@ Mesopotamia. Next, much to our annoyance, we have what is almost a temporary black-out. According to the system of terminology I favor, our next -assemblage after that of Ubaid is called the _Warka_ phase, from +�assemblage� after that of Ubaid is called the _Warka_ phase, from the Arabic name for the site of Uruk or Erich. We know it only from six or seven levels in a narrow test-pit at Warka, and from an even -smaller hole at another site. This assemblage, so far, is known only +smaller hole at another site. This �assemblage,� so far, is known only by its pottery, some of which still bears Ubaidian style painting. The characteristic Warkan pottery is unpainted, with smoothed red or gray surfaces and peculiar shapes. Unquestionably, there must be a great @@ -4979,7 +4979,7 @@ have to excavate it! THE DAWN OF CIVILIZATION After our exasperation with the almost unknown Warka interlude, -following the brilliant false dawn of Ubaid, we move next to an +following the brilliant �false dawn� of Ubaid, we move next to an assemblage which yields traces of a preponderance of those elements which we noted (p. 144) as meaning civilization. This assemblage is that called _Proto-Literate_; it already contains writing. On @@ -4988,8 +4988,8 @@ history--and no longer prehistory--the assemblage is named for the historical implications of its content, and no longer after the name of the site where it was first found. Since some of the older books used site-names for this assemblage, I will tell you that the Proto-Literate -includes the latter half of what used to be called the Uruk period -_plus_ all of what used to be called the Jemdet Nasr period. It shows +includes the latter half of what used to be called the �Uruk period� +_plus_ all of what used to be called the �Jemdet Nasr period.� It shows a consistent development from beginning to end. I shall, in fact, leave much of the description and the historic @@ -5033,18 +5033,18 @@ mental block seems to have been removed. Clay tablets bearing pictographic signs are the Proto-Literate forerunners of cuneiform writing. The earliest examples are not well -understood but they seem to be devices for making accounts and -for remembering accounts. Different from the later case in Egypt, +understood but they seem to be �devices for making accounts and +for remembering accounts.� Different from the later case in Egypt, where writing appears fully formed in the earliest examples, the development from simple pictographic signs to proper cuneiform writing may be traced, step by step, in Mesopotamia. It is most probable that the development of writing was connected with the temple and -the need for keeping account of the temples possessions. Professor +the need for keeping account of the temple�s possessions. Professor Jacobsen sees writing as a means for overcoming space, time, and the -increasing complications of human affairs: Literacy, which began +increasing complications of human affairs: �Literacy, which began with ... civilization, enhanced mightily those very tendencies in its development which characterize it as a civilization and mark it off as -such from other types of culture. +such from other types of culture.� [Illustration: RELIEF ON A PROTO-LITERATE STONE VASE, WARKA @@ -5098,7 +5098,7 @@ civilized way of life. I suppose you could say that the difference in the approach is that as a prehistorian I have been looking forward or upward in time, while the -historians look backward to glimpse what Ive been describing here. My +historians look backward to glimpse what I�ve been describing here. My base-line was half a million years ago with a being who had little more than the capacity to make tools and fire to distinguish him from the animals about him. Thus my point of view and that of the conventional @@ -5114,17 +5114,17 @@ End of PREHISTORY [Illustration] -Youll doubtless easily recall your general course in ancient history: +You�ll doubtless easily recall your general course in ancient history: how the Sumerian dynasties of Mesopotamia were supplanted by those of Babylonia, how the Hittite kingdom appeared in Anatolian Turkey, and about the three great phases of Egyptian history. The literate kingdom of Crete arose, and by 1500 B.C. there were splendid fortified Mycenean towns on the mainland of Greece. This was the time--about the whole eastern end of the Mediterranean--of what Professor Breasted called the -first great internationalism, with flourishing trade, international +�first great internationalism,� with flourishing trade, international treaties, and royal marriages between Egyptians, Babylonians, and -Hittites. By 1200 B.C., the whole thing had fragmented: the peoples of -the sea were restless in their isles, and the great ancient centers in +Hittites. By 1200 B.C., the whole thing had fragmented: �the peoples of +the sea were restless in their isles,� and the great ancient centers in Egypt, Mesopotamia, and Anatolia were eclipsed. Numerous smaller states arose--Assyria, Phoenicia, Israel--and the Trojan war was fought. Finally Assyria became the paramount power of all the Near East, @@ -5135,7 +5135,7 @@ but casting them with its own tradition into a new mould, arose in mainland Greece. I once shocked my Classical colleagues to the core by referring to -Greece as a second degree derived civilization, but there is much +Greece as �a second degree derived civilization,� but there is much truth in this. The principles of bronze- and then of iron-working, of the alphabet, and of many other elements in Greek culture were borrowed from western Asia. Our debt to the Greeks is too well known for me even @@ -5146,7 +5146,7 @@ Greece fell in its turn to Rome, and in 55 B.C. Caesar invaded Britain. I last spoke of Britain on page 142; I had chosen it as my single example for telling you something of how the earliest farming communities were established in Europe. Now I will continue with -Britains later prehistory, so you may sense something of the end of +Britain�s later prehistory, so you may sense something of the end of prehistory itself. Remember that Britain is simply a single example we select; the same thing could be done for all the other countries of Europe, and will be possible also, some day, for further Asia and @@ -5186,20 +5186,20 @@ few Battle-axe folk elements, including, in fact, stone battle-axes, reached England with the earliest Beaker folk,[6] coming from the Rhineland. - [6] The British authors use the term Beaker folk to mean both + [6] The British authors use the term �Beaker folk� to mean both archeological assemblage and human physical type. They speak - of a ... tall, heavy-boned, rugged, and round-headed strain + of a �... tall, heavy-boned, rugged, and round-headed� strain which they take to have developed, apparently in the Rhineland, by a mixture of the original (Spanish?) beaker-makers and the northeast European battle-axe makers. However, since the science of physical anthropology is very much in flux at the moment, and since I am not able to assess the evidence for these - physical types, I _do not_ use the term folk in this book with + physical types, I _do not_ use the term �folk� in this book with its usual meaning of standardized physical type. When I use - folk here, I mean simply _the makers of a given archeological + �folk� here, I mean simply _the makers of a given archeological assemblage_. The difficulty only comes when assemblages are named for some item in them; it is too clumsy to make an - adjective of the item and refer to a beakerian assemblage. + adjective of the item and refer to a �beakerian� assemblage. The Beaker folk settled earliest in the agriculturally fertile south and east. There seem to have been several phases of Beaker folk @@ -5211,7 +5211,7 @@ folk are known. They buried their dead singly, sometimes in conspicuous individual barrows with the dead warrior in his full trappings. The spectacular element in the assemblage of the Beaker folk is a group of large circular monuments with ditches and with uprights of wood or -stone. These henges became truly monumental several hundred years +stone. These �henges� became truly monumental several hundred years later; while they were occasionally dedicated with a burial, they were not primarily tombs. The effect of the invasion of the Beaker folk seems to cut across the whole fabric of life in Britain. @@ -5221,7 +5221,7 @@ seems to cut across the whole fabric of life in Britain. There was, however, a second major element in British life at this time. It shows itself in the less well understood traces of a group again called after one of the items in their catalogue, the Food-vessel -folk. There are many burials in these food-vessel pots in northern +folk. There are many burials in these �food-vessel� pots in northern England, Scotland, and Ireland, and the pottery itself seems to link back to that of the Peterborough assemblage. Like the earlier Peterborough people in the highland zone before them, the makers of @@ -5238,8 +5238,8 @@ MORE INVASIONS About 1500 B.C., the situation became further complicated by the arrival of new people in the region of southern England anciently called Wessex. The traces suggest the Brittany coast of France as a -source, and the people seem at first to have been a small but heroic -group of aristocrats. Their heroes are buried with wealth and +source, and the people seem at first to have been a small but �heroic� +group of aristocrats. Their �heroes� are buried with wealth and ceremony, surrounded by their axes and daggers of bronze, their gold ornaments, and amber and jet beads. These rich finds show that the trade-linkage these warriors patronized spread from the Baltic sources @@ -5265,10 +5265,10 @@ which must have been necessary before such a great monument could have been built. -THIS ENGLAND +�THIS ENGLAND� The range from 1900 to about 1400 B.C. includes the time of development -of the archeological features usually called the Early Bronze Age +of the archeological features usually called the �Early Bronze Age� in Britain. In fact, traces of the Wessex warriors persisted down to about 1200 B.C. The main regions of the island were populated, and the adjustments to the highland and lowland zones were distinct and well @@ -5279,7 +5279,7 @@ trading role, separated from the European continent but conveniently adjacent to it. The tin of Cornwall--so important in the production of good bronze--as well as the copper of the west and of Ireland, taken with the gold of Ireland and the general excellence of Irish -metal work, assured Britain a traders place in the then known world. +metal work, assured Britain a trader�s place in the then known world. Contacts with the eastern Mediterranean may have been by sea, with Cornish tin as the attraction, or may have been made by the Food-vessel middlemen on their trips to the Baltic coast. There they would have @@ -5292,9 +5292,9 @@ relative isolation gave some peace and also gave time for a leveling and further fusion of culture. The separate cultural traditions began to have more in common. The growing of barley, the herding of sheep and cattle, and the production of woolen garments were already features -common to all Britains inhabitants save a few in the remote highlands, +common to all Britain�s inhabitants save a few in the remote highlands, the far north, and the distant islands not yet fully touched by -food-production. The personality of Britain was being formed. +food-production. The �personality of Britain� was being formed. CREMATION BURIALS BEGIN @@ -5325,9 +5325,9 @@ which we shall mention below. The British cremation-burial-in-urns folk survived a long time in the highland zone. In the general British scheme, they make up what is -called the Middle Bronze Age, but in the highland zone they last +called the �Middle Bronze Age,� but in the highland zone they last until after 900 B.C. and are considered to be a specialized highland -Late Bronze Age. In the highland zone, these later cremation-burial +�Late Bronze Age.� In the highland zone, these later cremation-burial folk seem to have continued the older Food-vessel tradition of being middlemen in the metal market. @@ -5379,12 +5379,12 @@ to get a picture of estate or tribal boundaries which included village communities; we find a variety of tools in bronze, and even whetstones which show that iron has been honed on them (although the scarce iron has not been found). Let me give you the picture in Professor S. -Piggotts words: The ... Late Bronze Age of southern England was but +Piggott�s words: �The ... Late Bronze Age of southern England was but the forerunner of the earliest Iron Age in the same region, not only in the techniques of agriculture, but almost certainly in terms of ethnic kinship ... we can with some assurance talk of the Celts ... the great early Celtic expansion of the Continent is recognized to be that of the -Urnfield people. +Urnfield people.� Thus, certainly by 500 B.C., there were people in Britain, some of whose descendants we may recognize today in name or language in remote @@ -5399,11 +5399,11 @@ efficient set of tools than does bronze. Iron tools seem first to have been made in quantity in Hittite Anatolia about 1500 B.C. In continental Europe, the earliest, so-called Hallstatt, iron-using cultures appeared in Germany soon after 750 B.C. Somewhat later, -Greek and especially Etruscan exports of _objets dart_--which moved +Greek and especially Etruscan exports of _objets d�art_--which moved with a flourishing trans-Alpine wine trade--influenced the Hallstatt iron-working tradition. Still later new classical motifs, together with older Hallstatt, oriental, and northern nomad motifs, gave rise to a -new style in metal decoration which characterizes the so-called La Tne +new style in metal decoration which characterizes the so-called La T�ne phase. A few iron users reached Britain a little before 400 B.C. Not long @@ -5422,7 +5422,7 @@ HILL-FORTS AND FARMS The earliest iron-users seem to have entrenched themselves temporarily within hill-top forts, mainly in the south. Gradually, they moved inland, establishing _individual_ farm sites with extensive systems -of rectangular fields. We recognize these fields by the lynchets or +of rectangular fields. We recognize these fields by the �lynchets� or lines of soil-creep which plowing left on the slopes of hills. New crops appeared; there were now bread wheat, oats, and rye, as well as barley. @@ -5434,7 +5434,7 @@ various outbuildings and pits for the storage of grain. Weaving was done on the farm, but not blacksmithing, which must have been a specialized trade. Save for the lack of firearms, the place might almost be taken for a farmstead on the American frontier in the early -1800s. +1800�s. Toward 250 B.C. there seems to have been a hasty attempt to repair the hill-forts and to build new ones, evidently in response to signs of @@ -5446,9 +5446,9 @@ THE SECOND PHASE Perhaps the hill-forts were not entirely effective or perhaps a compromise was reached. In any case, the newcomers from the Marne district did establish themselves, first in the southeast and then to -the north and west. They brought iron with decoration of the La Tne +the north and west. They brought iron with decoration of the La T�ne type and also the two-wheeled chariot. Like the Wessex warriors of -over a thousand years earlier, they made heroes graves, with their +over a thousand years earlier, they made �heroes�� graves, with their warriors buried in the war-chariots and dressed in full trappings. [Illustration: CELTIC BUCKLE] @@ -5457,7 +5457,7 @@ The metal work of these Marnian newcomers is excellent. The peculiar Celtic art style, based originally on the classic tendril motif, is colorful and virile, and fits with Greek and Roman descriptions of Celtic love of color in dress. There is a strong trace of these -newcomers northward in Yorkshire, linked by Ptolemys description to +newcomers northward in Yorkshire, linked by Ptolemy�s description to the Parisii, doubtless part of the Celtic tribe which originally gave its name to Paris on the Seine. Near Glastonbury, in Somerset, two villages in swamps have been excavated. They seem to date toward the @@ -5469,7 +5469,7 @@ villagers. In Scotland, which yields its first iron tools at a date of about 100 B.C., and in northern Ireland even slightly earlier, the effects of the -two phases of newcomers tend especially to blend. Hill-forts, brochs +two phases of newcomers tend especially to blend. Hill-forts, �brochs� (stone-built round towers) and a variety of other strange structures seem to appear as the new ideas develop in the comparative isolation of northern Britain. @@ -5493,27 +5493,27 @@ at last, we can even begin to speak of dynasties and individuals. Some time before 55 B.C., the Catuvellauni, originally from the Marne district in France, had possessed themselves of a large part of southeastern England. They evidently sailed up the Thames and built a -town of over a hundred acres in area. Here ruled Cassivellaunus, the -first man in England whose name we know, and whose town Caesar sacked. +town of over a hundred acres in area. Here ruled Cassivellaunus, �the +first man in England whose name we know,� and whose town Caesar sacked. The town sprang up elsewhere again, however. THE END OF PREHISTORY Prehistory, strictly speaking, is now over in southern Britain. -Claudius effective invasion took place in 43 A.D.; by 83 A.D., a raid +Claudius� effective invasion took place in 43 A.D.; by 83 A.D., a raid had been made as far north as Aberdeen in Scotland. But by 127 A.D., Hadrian had completed his wall from the Solway to the Tyne, and the Romans settled behind it. In Scotland, Romanization can have affected -the countryside very little. Professor Piggott adds that ... it is +the countryside very little. Professor Piggott adds that �... it is when the pressure of Romanization is relaxed by the break-up of the Dark Ages that we see again the Celtic metal-smiths handling their material with the same consummate skill as they had before the Roman Conquest, and with traditional styles that had not even then forgotten -their Marnian and Belgic heritage. +their Marnian and Belgic heritage.� In fact, many centuries go by, in Britain as well as in the rest of -Europe, before the archeologists task is complete and the historian on +Europe, before the archeologist�s task is complete and the historian on his own is able to describe the ways of men in the past. @@ -5524,7 +5524,7 @@ you will have noticed how often I had to refer to the European continent itself. Britain, beyond the English Channel for all of her later prehistory, had a much simpler course of events than did most of the rest of Europe in later prehistoric times. This holds, in spite -of all the invasions and reverberations from the continent. Most +of all the �invasions� and �reverberations� from the continent. Most of Europe was the scene of an even more complicated ebb and flow of cultural change, save in some of its more remote mountain valleys and peninsulas. @@ -5536,7 +5536,7 @@ accounts and some good general accounts of part of the range from about 3000 B.C. to A.D. 1. I suspect that the difficulty of making a good book that covers all of its later prehistory is another aspect of what makes Europe so very complicated a continent today. The prehistoric -foundations for Europes very complicated set of civilizations, +foundations for Europe�s very complicated set of civilizations, cultures, and sub-cultures--which begin to appear as history proceeds--were in themselves very complicated. @@ -5552,8 +5552,8 @@ of their journeys. But by the same token, they had had time en route to take on their characteristic European aspects. Some time ago, Sir Cyril Fox wrote a famous book called _The -Personality of Britain_, sub-titled Its Influence on Inhabitant and -Invader in Prehistoric and Early Historic Times. We have not gone +Personality of Britain_, sub-titled �Its Influence on Inhabitant and +Invader in Prehistoric and Early Historic Times.� We have not gone into the post-Roman early historic period here; there are still the Anglo-Saxons and Normans to account for as well as the effects of the Romans. But what I have tried to do was to begin the story of @@ -5570,7 +5570,7 @@ Summary In the pages you have read so far, you have been brought through the -earliest 99 per cent of the story of mans life on this planet. I have +earliest 99 per cent of the story of man�s life on this planet. I have left only 1 per cent of the story for the historians to tell. @@ -5601,7 +5601,7 @@ But I think there may have been a few. Certainly the pace of the first act accelerated with the swing from simple gathering to more intensified collecting. The great cave art of France and Spain was probably an expression of a climax. Even the ideas of burying the dead -and of the Venus figurines must also point to levels of human thought +and of the �Venus� figurines must also point to levels of human thought and activity that were over and above pure food-getting. @@ -5629,7 +5629,7 @@ five thousand years after the second act began. But it could never have happened in the first act at all. There is another curious thing about the first act. Many of the players -didnt know it was over and they kept on with their roles long after +didn�t know it was over and they kept on with their roles long after the second act had begun. On the edges of the stage there are today some players who are still going on with the first act. The Eskimos, and the native Australians, and certain tribes in the Amazon jungle are @@ -5680,20 +5680,20 @@ act may have lessons for us and give depth to our thinking. I know there are at least _some_ lessons, even in the present incomplete state of our knowledge. The players who began the second act--that of food-production--separately, in different parts of the world, were not -all of one pure race nor did they have pure cultural traditions. +all of one �pure race� nor did they have �pure� cultural traditions. Some apparently quite mixed Mediterraneans got off to the first start on the second act and brought it to its first two climaxes as well. Peoples of quite different physical type achieved the first climaxes in China and in the New World. In our British example of how the late prehistory of Europe worked, we -listed a continuous series of invasions and reverberations. After +listed a continuous series of �invasions� and �reverberations.� After each of these came fusion. Even though the Channel protected Britain from some of the extreme complications of the mixture and fusion of continental Europe, you can see how silly it would be to refer to a -pure British race or a pure British culture. We speak of the United -States as a melting pot. But this is nothing new. Actually, Britain -and all the rest of the world have been melting pots at one time or +�pure� British race or a �pure� British culture. We speak of the United +States as a �melting pot.� But this is nothing new. Actually, Britain +and all the rest of the world have been �melting pots� at one time or another. By the time the written records of Mesopotamia and Egypt begin to turn @@ -5703,12 +5703,12 @@ itself, we are thrown back on prehistoric archeology. And this is as true for China, India, Middle America, and the Andes, as it is for the Near East. -There are lessons to be learned from all of mans past, not simply +There are lessons to be learned from all of man�s past, not simply lessons of how to fight battles or win peace conferences, but of how human society evolves from one stage to another. Many of these lessons can only be looked for in the prehistoric past. So far, we have only made a beginning. There is much still to do, and many gaps in the story -are yet to be filled. The prehistorians job is to find the evidence, +are yet to be filled. The prehistorian�s job is to find the evidence, to fill the gaps, and to discover the lessons men have learned in the past. As I see it, this is not only an exciting but a very practical goal for which to strive. @@ -5745,7 +5745,7 @@ paperbound books.) GEOCHRONOLOGY AND THE ICE AGE -(Two general books. Some Pleistocene geologists disagree with Zeuners +(Two general books. Some Pleistocene geologists disagree with Zeuner�s interpretation of the dating evidence, but their points of view appear in professional journals, in articles too cumbersome to list here.) @@ -5815,7 +5815,7 @@ GENERAL PREHISTORY Press. Movius, Hallam L., Jr. - Old World Prehistory: Paleolithic in _Anthropology Today_. + �Old World Prehistory: Paleolithic� in _Anthropology Today_. Kroeber, A. L., ed. 1953. University of Chicago Press. Oakley, Kenneth P. @@ -5826,7 +5826,7 @@ GENERAL PREHISTORY _British Prehistory._ 1949. Oxford University Press. Pittioni, Richard - _Die Urgeschichtlichen Grundlagen der Europischen Kultur._ + _Die Urgeschichtlichen Grundlagen der Europ�ischen Kultur._ 1949. Deuticke. (A single book which does attempt to cover the whole range of European prehistory to ca. 1 A.D.) @@ -5834,7 +5834,7 @@ GENERAL PREHISTORY THE NEAR EAST Adams, Robert M. - Developmental Stages in Ancient Mesopotamia, _in_ Steward, + �Developmental Stages in Ancient Mesopotamia,� _in_ Steward, Julian, _et al_, _Irrigation Civilizations: A Comparative Study_. 1955. Pan American Union. @@ -6000,7 +6000,7 @@ Index Bolas, 54 - Bordes, Franois, 62 + Bordes, Fran�ois, 62 Borer, 77 @@ -6028,7 +6028,7 @@ Index killed by stampede, 86 Burials, 66, 86; - in henges, 164; + in �henges,� 164; in urns, 168 Burins, 75 @@ -6085,7 +6085,7 @@ Index Combe Capelle, 30 - Combe Capelle-Brnn group, 34 + Combe Capelle-Br�nn group, 34 Commont, Victor, 51 @@ -6097,7 +6097,7 @@ Index Corrals for cattle, 140 - Cradle of mankind, 136 + �Cradle of mankind,� 136 Cremation, 167 @@ -6123,7 +6123,7 @@ Index Domestication, of animals, 100, 105, 107; of plants, 100 - Dragon teeth fossils in China, 28 + �Dragon teeth� fossils in China, 28 Drill, 77 @@ -6176,9 +6176,9 @@ Index Fayum, 135; radiocarbon date, 146 - Fertile Crescent, 107, 146 + �Fertile Crescent,� 107, 146 - Figurines, Venus, 84; + Figurines, �Venus,� 84; at Jarmo, 128; at Ubaid, 153 @@ -6197,7 +6197,7 @@ Index Flint industry, 127 - Fontchevade, 32, 56, 58 + Font�chevade, 32, 56, 58 Food-collecting, 104, 121; end of, 104 @@ -6223,7 +6223,7 @@ Index Food-vessel folk, 164 - Forest folk, 97, 98, 104, 110 + �Forest folk,� 97, 98, 104, 110 Fox, Sir Cyril, 174 @@ -6379,7 +6379,7 @@ Index Land bridges in Mediterranean, 19 - La Tne phase, 170 + La T�ne phase, 170 Laurel leaf point, 78, 89 @@ -6404,7 +6404,7 @@ Index Mammoth, 93; in cave art, 85 - Man-apes, 26 + �Man-apes,� 26 Mango, 107 @@ -6435,7 +6435,7 @@ Index Microliths, 87; at Jarmo, 130; - lunates, 87; + �lunates,� 87; trapezoids, 87; triangles, 87 @@ -6443,7 +6443,7 @@ Index Mine-shafts, 140 - Mlefaat, 126, 127 + M�lefaat, 126, 127 Mongoloids, 29, 90 @@ -6453,9 +6453,9 @@ Index Mount Carmel, 11, 33, 52, 59, 64, 69, 113, 114 - Mousterian man, 64 + �Mousterian man,� 64 - Mousterian tools, 61, 62; + �Mousterian� tools, 61, 62; of Acheulean tradition, 62 Movius, H. L., 47 @@ -6471,7 +6471,7 @@ Index Near East, beginnings of civilization in, 20, 144; cave sites, 58; climate in Ice Age, 99; - Fertile Crescent, 107, 146; + �Fertile Crescent,� 107, 146; food-production in, 99; Natufian assemblage in, 113-115; stone tools, 114 @@ -6539,7 +6539,7 @@ Index Pig, wild, 108 - Piltdown man, 29 + �Piltdown man,� 29 Pins, 80 @@ -6578,7 +6578,7 @@ Index Race, 35; biological, 36; - pure, 16 + �pure,� 16 Radioactivity, 9, 10 @@ -6795,7 +6795,7 @@ Index Writing, 158; cuneiform, 158 - Wrm I glaciation, 58 + W�rm I glaciation, 58 Zebu cattle, domestication of, 107 @@ -6810,7 +6810,7 @@ Index -Transcribers note: +Transcriber�s note: Punctuation, hyphenation, and spelling were made consistent when a predominant preference was found in this book; otherwise they were not diff --git a/ciphers/rabin_miller.py b/ciphers/rabin_miller.py index f71fb03c0051..502417497a8c 100644 --- a/ciphers/rabin_miller.py +++ b/ciphers/rabin_miller.py @@ -1,8 +1,11 @@ from __future__ import print_function -# Primality Testing with the Rabin-Miller Algorithm import random + +# Primality Testing with the Rabin-Miller Algorithm + + def rabinMiller(num): s = num - 1 t = 0 @@ -24,6 +27,7 @@ def rabinMiller(num): v = (v ** 2) % num return True + def isPrime(num): if (num < 2): return False @@ -52,12 +56,14 @@ def isPrime(num): return rabinMiller(num) -def generateLargePrime(keysize = 1024): + +def generateLargePrime(keysize=1024): while True: num = random.randrange(2 ** (keysize - 1), 2 ** (keysize)) if isPrime(num): return num + if __name__ == '__main__': num = generateLargePrime() print(('Prime number:', num)) diff --git a/ciphers/rot13.py b/ciphers/rot13.py index 2abf981e9d7d..ccc739d70f90 100644 --- a/ciphers/rot13.py +++ b/ciphers/rot13.py @@ -1,4 +1,6 @@ from __future__ import print_function + + def dencrypt(s, n): out = '' for c in s: diff --git a/ciphers/rsa_cipher.py b/ciphers/rsa_cipher.py index d81f1ffc1a1e..81031e3a5778 100644 --- a/ciphers/rsa_cipher.py +++ b/ciphers/rsa_cipher.py @@ -1,9 +1,13 @@ from __future__ import print_function -import sys, rsa_key_generator as rkg, os + +import os +import rsa_key_generator as rkg +import sys DEFAULT_BLOCK_SIZE = 128 BYTE_SIZE = 256 + def main(): filename = 'encrypted_file.txt' response = input(r'Encrypte\Decrypt [e\d]: ') @@ -16,7 +20,7 @@ def main(): if mode == 'encrypt': if not os.path.exists('rsa_pubkey.txt'): rkg.makeKeyFiles('rsa', 1024) - + message = input('\nEnter message: ') pubKeyFilename = 'rsa_pubkey.txt' print('Encrypting and writing to %s...' % (filename)) @@ -119,5 +123,6 @@ def readFromFileAndDecrypt(messageFilename, keyFilename): return decryptMessage(encryptedBlocks, messageLength, (n, d), blockSize) + if __name__ == '__main__': main() diff --git a/ciphers/rsa_key_generator.py b/ciphers/rsa_key_generator.py index 541e90d6e884..f0eec78e66ae 100644 --- a/ciphers/rsa_key_generator.py +++ b/ciphers/rsa_key_generator.py @@ -1,12 +1,19 @@ from __future__ import print_function -import random, sys, os -import rabin_miller as rabinMiller, cryptomath_module as cryptoMath + +import os +import random +import sys + +import cryptomath_module as cryptoMath +import rabin_miller as rabinMiller + def main(): print('Making key files...') makeKeyFiles('rsa', 1024) print('Key files generation successful.') + def generateKey(keySize): print('Generating prime p...') p = rabinMiller.generateLargePrime(keySize) @@ -27,6 +34,7 @@ def generateKey(keySize): privateKey = (n, d) return (publicKey, privateKey) + def makeKeyFiles(name, keySize): if os.path.exists('%s_pubkey.txt' % (name)) or os.path.exists('%s_privkey.txt' % (name)): print('\nWARNING:') @@ -42,5 +50,6 @@ def makeKeyFiles(name, keySize): with open('%s_privkey.txt' % name, 'w') as fo: fo.write('%s,%s,%s' % (keySize, privateKey[0], privateKey[1])) + if __name__ == '__main__': main() diff --git a/ciphers/simple_substitution_cipher.py b/ciphers/simple_substitution_cipher.py index 1bdd7dc04a57..b6fcc33819c8 100644 --- a/ciphers/simple_substitution_cipher.py +++ b/ciphers/simple_substitution_cipher.py @@ -1,8 +1,11 @@ from __future__ import print_function -import sys, random + +import random +import sys LETTERS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' + def main(): message = input('Enter message: ') key = 'LFWOAYUISVKMNXPBDCRJTQEGHZ' @@ -18,7 +21,8 @@ def main(): translated = decryptMessage(key, message) print('\n%sion: \n%s' % (mode.title(), translated)) - + + def checkValidKey(key): keyList = list(key) lettersList = list(LETTERS) @@ -28,6 +32,7 @@ def checkValidKey(key): if keyList != lettersList: sys.exit('Error in the key or symbol set.') + def encryptMessage(key, message): """ >>> encryptMessage('LFWOAYUISVKMNXPBDCRJTQEGHZ', 'Harshil Darji') @@ -35,6 +40,7 @@ def encryptMessage(key, message): """ return translateMessage(key, message, 'encrypt') + def decryptMessage(key, message): """ >>> decryptMessage('LFWOAYUISVKMNXPBDCRJTQEGHZ', 'Ilcrism Olcvs') @@ -42,6 +48,7 @@ def decryptMessage(key, message): """ return translateMessage(key, message, 'decrypt') + def translateMessage(key, message, mode): translated = '' charsA = LETTERS @@ -49,7 +56,7 @@ def translateMessage(key, message, mode): if mode == 'decrypt': charsA, charsB = charsB, charsA - + for symbol in message: if symbol.upper() in charsA: symIndex = charsA.find(symbol.upper()) @@ -62,10 +69,12 @@ def translateMessage(key, message, mode): return translated + def getRandomKey(): key = list(LETTERS) random.shuffle(key) return ''.join(key) + if __name__ == '__main__': main() diff --git a/ciphers/trafid_cipher.py b/ciphers/trafid_cipher.py index 0453272f26a0..852aed4ea965 100644 --- a/ciphers/trafid_cipher.py +++ b/ciphers/trafid_cipher.py @@ -1,86 +1,91 @@ -#https://en.wikipedia.org/wiki/Trifid_cipher - -def __encryptPart(messagePart, character2Number): - one, two, three = "", "", "" - tmp = [] - - for character in messagePart: - tmp.append(character2Number[character]) - - for each in tmp: - one += each[0] - two += each[1] - three += each[2] - - return one+two+three - -def __decryptPart(messagePart, character2Number): - tmp, thisPart = "", "" - result = [] - - for character in messagePart: - thisPart += character2Number[character] - - for digit in thisPart: - tmp += digit - if len(tmp) == len(messagePart): - result.append(tmp) - tmp = "" - - return result[0], result[1], result[2] - -def __prepare(message, alphabet): - #Validate message and alphabet, set to upper and remove spaces - alphabet = alphabet.replace(" ", "").upper() - message = message.replace(" ", "").upper() - - #Check length and characters - if len(alphabet) != 27: - raise KeyError("Length of alphabet has to be 27.") - for each in message: - if each not in alphabet: - raise ValueError("Each message character has to be included in alphabet!") - - #Generate dictionares - numbers = ("111","112","113","121","122","123","131","132","133","211","212","213","221","222","223","231","232","233","311","312","313","321","322","323","331","332","333") - character2Number = {} - number2Character = {} - for letter, number in zip(alphabet, numbers): - character2Number[letter] = number - number2Character[number] = letter - - return message, alphabet, character2Number, number2Character - -def encryptMessage(message, alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZ.", period=5): - message, alphabet, character2Number, number2Character = __prepare(message, alphabet) - encrypted, encrypted_numeric = "", "" - - for i in range(0, len(message)+1, period): - encrypted_numeric += __encryptPart(message[i:i+period], character2Number) - - for i in range(0, len(encrypted_numeric), 3): - encrypted += number2Character[encrypted_numeric[i:i+3]] - - return encrypted - -def decryptMessage(message, alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZ.", period=5): - message, alphabet, character2Number, number2Character = __prepare(message, alphabet) - decrypted_numeric = [] - decrypted = "" - - for i in range(0, len(message)+1, period): - a,b,c = __decryptPart(message[i:i+period], character2Number) - - for j in range(0, len(a)): - decrypted_numeric.append(a[j]+b[j]+c[j]) - - for each in decrypted_numeric: - decrypted += number2Character[each] - - return decrypted - -if __name__ == '__main__': - msg = "DEFEND THE EAST WALL OF THE CASTLE." - encrypted = encryptMessage(msg,"EPSDUCVWYM.ZLKXNBTFGORIJHAQ") - decrypted = decryptMessage(encrypted, "EPSDUCVWYM.ZLKXNBTFGORIJHAQ") - print ("Encrypted: {}\nDecrypted: {}".format(encrypted, decrypted)) \ No newline at end of file +# https://en.wikipedia.org/wiki/Trifid_cipher + +def __encryptPart(messagePart, character2Number): + one, two, three = "", "", "" + tmp = [] + + for character in messagePart: + tmp.append(character2Number[character]) + + for each in tmp: + one += each[0] + two += each[1] + three += each[2] + + return one + two + three + + +def __decryptPart(messagePart, character2Number): + tmp, thisPart = "", "" + result = [] + + for character in messagePart: + thisPart += character2Number[character] + + for digit in thisPart: + tmp += digit + if len(tmp) == len(messagePart): + result.append(tmp) + tmp = "" + + return result[0], result[1], result[2] + + +def __prepare(message, alphabet): + # Validate message and alphabet, set to upper and remove spaces + alphabet = alphabet.replace(" ", "").upper() + message = message.replace(" ", "").upper() + + # Check length and characters + if len(alphabet) != 27: + raise KeyError("Length of alphabet has to be 27.") + for each in message: + if each not in alphabet: + raise ValueError("Each message character has to be included in alphabet!") + + # Generate dictionares + numbers = ("111", "112", "113", "121", "122", "123", "131", "132", "133", "211", "212", "213", "221", "222", "223", "231", "232", "233", "311", "312", "313", "321", "322", "323", "331", "332", "333") + character2Number = {} + number2Character = {} + for letter, number in zip(alphabet, numbers): + character2Number[letter] = number + number2Character[number] = letter + + return message, alphabet, character2Number, number2Character + + +def encryptMessage(message, alphabet="ABCDEFGHIJKLMNOPQRSTUVWXYZ.", period=5): + message, alphabet, character2Number, number2Character = __prepare(message, alphabet) + encrypted, encrypted_numeric = "", "" + + for i in range(0, len(message) + 1, period): + encrypted_numeric += __encryptPart(message[i:i + period], character2Number) + + for i in range(0, len(encrypted_numeric), 3): + encrypted += number2Character[encrypted_numeric[i:i + 3]] + + return encrypted + + +def decryptMessage(message, alphabet="ABCDEFGHIJKLMNOPQRSTUVWXYZ.", period=5): + message, alphabet, character2Number, number2Character = __prepare(message, alphabet) + decrypted_numeric = [] + decrypted = "" + + for i in range(0, len(message) + 1, period): + a, b, c = __decryptPart(message[i:i + period], character2Number) + + for j in range(0, len(a)): + decrypted_numeric.append(a[j] + b[j] + c[j]) + + for each in decrypted_numeric: + decrypted += number2Character[each] + + return decrypted + + +if __name__ == '__main__': + msg = "DEFEND THE EAST WALL OF THE CASTLE." + encrypted = encryptMessage(msg, "EPSDUCVWYM.ZLKXNBTFGORIJHAQ") + decrypted = decryptMessage(encrypted, "EPSDUCVWYM.ZLKXNBTFGORIJHAQ") + print("Encrypted: {}\nDecrypted: {}".format(encrypted, decrypted)) diff --git a/ciphers/transposition_cipher.py b/ciphers/transposition_cipher.py index dbb358315d22..95220b0f4e0b 100644 --- a/ciphers/transposition_cipher.py +++ b/ciphers/transposition_cipher.py @@ -1,6 +1,8 @@ from __future__ import print_function + import math + def main(): message = input('Enter message: ') key = int(input('Enter key [2-%s]: ' % (len(message) - 1))) @@ -12,7 +14,8 @@ def main(): text = decryptMessage(key, message) # Append pipe symbol (vertical bar) to identify spaces at the end. - print('Output:\n%s' %(text + '|')) + print('Output:\n%s' % (text + '|')) + def encryptMessage(key, message): """ @@ -27,6 +30,7 @@ def encryptMessage(key, message): pointer += key return ''.join(cipherText) + def decryptMessage(key, message): """ >>> decryptMessage(6, 'Hlia rDsahrij') @@ -36,7 +40,8 @@ def decryptMessage(key, message): numRows = key numShadedBoxes = (numCols * numRows) - len(message) plainText = [""] * numCols - col = 0; row = 0; + col = 0; + row = 0; for symbol in message: plainText[col] += symbol @@ -48,7 +53,9 @@ def decryptMessage(key, message): return "".join(plainText) + if __name__ == '__main__': import doctest + doctest.testmod() main() diff --git a/ciphers/transposition_cipher_encrypt_decrypt_file.py b/ciphers/transposition_cipher_encrypt_decrypt_file.py index a186cf81cde7..7c21d1e8460f 100644 --- a/ciphers/transposition_cipher_encrypt_decrypt_file.py +++ b/ciphers/transposition_cipher_encrypt_decrypt_file.py @@ -1,7 +1,12 @@ from __future__ import print_function -import time, os, sys + +import os +import sys +import time + import transposition_cipher as transCipher + def main(): inputFile = 'Prehistoric Men.txt' outputFile = 'Output.txt' @@ -16,7 +21,7 @@ def main(): response = input('> ') if not response.lower().startswith('y'): sys.exit() - + startTime = time.time() if mode.lower().startswith('e'): with open(inputFile) as f: @@ -25,13 +30,14 @@ def main(): elif mode.lower().startswith('d'): with open(outputFile) as f: content = f.read() - translated =transCipher .decryptMessage(key, content) + translated = transCipher.decryptMessage(key, content) with open(outputFile, 'w') as outputObj: outputObj.write(translated) - + totalTime = round(time.time() - startTime, 2) print(('Done (', totalTime, 'seconds )')) - + + if __name__ == '__main__': main() diff --git a/ciphers/vigenere_cipher.py b/ciphers/vigenere_cipher.py index 5d5be0792835..b33b963bde35 100644 --- a/ciphers/vigenere_cipher.py +++ b/ciphers/vigenere_cipher.py @@ -1,6 +1,8 @@ from __future__ import print_function + LETTERS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' + def main(): message = input('Enter message: ') key = input('Enter key [alphanumeric]: ') @@ -16,6 +18,7 @@ def main(): print('\n%sed message:' % mode.title()) print(translated) + def encryptMessage(key, message): ''' >>> encryptMessage('HDarji', 'This is Harshil Darji from Dharmaj.') @@ -23,6 +26,7 @@ def encryptMessage(key, message): ''' return translateMessage(key, message, 'encrypt') + def decryptMessage(key, message): ''' >>> decryptMessage('HDarji', 'Akij ra Odrjqqs Gaisq muod Mphumrs.') @@ -30,6 +34,7 @@ def decryptMessage(key, message): ''' return translateMessage(key, message, 'decrypt') + def translateMessage(key, message, mode): translated = [] keyIndex = 0 @@ -57,5 +62,6 @@ def translateMessage(key, message, mode): translated.append(symbol) return ''.join(translated) + if __name__ == '__main__': main() diff --git a/ciphers/xor_cipher.py b/ciphers/xor_cipher.py index 727fac3b0703..1a9770af5517 100644 --- a/ciphers/xor_cipher.py +++ b/ciphers/xor_cipher.py @@ -16,172 +16,166 @@ - encrypt_file : boolean - decrypt_file : boolean """ -class XORCipher(object): - - def __init__(self, key = 0): - """ - simple constructor that receives a key or uses - default key = 0 - """ - - #private field - self.__key = key - - def encrypt(self, content, key): - """ - input: 'content' of type string and 'key' of type int - output: encrypted string 'content' as a list of chars - if key not passed the method uses the key by the constructor. - otherwise key = 1 - """ - # precondition - assert (isinstance(key,int) and isinstance(content,str)) - key = key or self.__key or 1 - - # make sure key can be any size - while (key > 255): - key -= 255 - - # This will be returned - ans = [] +class XORCipher(object): - for ch in content: - ans.append(chr(ord(ch) ^ key)) + def __init__(self, key=0): + """ + simple constructor that receives a key or uses + default key = 0 + """ - return ans + # private field + self.__key = key - def decrypt(self,content,key): - """ - input: 'content' of type list and 'key' of type int - output: decrypted string 'content' as a list of chars - if key not passed the method uses the key by the constructor. - otherwise key = 1 - """ + def encrypt(self, content, key): + """ + input: 'content' of type string and 'key' of type int + output: encrypted string 'content' as a list of chars + if key not passed the method uses the key by the constructor. + otherwise key = 1 + """ - # precondition - assert (isinstance(key,int) and isinstance(content,list)) + # precondition + assert (isinstance(key, int) and isinstance(content, str)) - key = key or self.__key or 1 + key = key or self.__key or 1 - # make sure key can be any size - while (key > 255): - key -= 255 + # make sure key can be any size + while (key > 255): + key -= 255 - # This will be returned - ans = [] + # This will be returned + ans = [] - for ch in content: - ans.append(chr(ord(ch) ^ key)) + for ch in content: + ans.append(chr(ord(ch) ^ key)) - return ans + return ans + def decrypt(self, content, key): + """ + input: 'content' of type list and 'key' of type int + output: decrypted string 'content' as a list of chars + if key not passed the method uses the key by the constructor. + otherwise key = 1 + """ - def encrypt_string(self,content, key = 0): - """ - input: 'content' of type string and 'key' of type int - output: encrypted string 'content' - if key not passed the method uses the key by the constructor. - otherwise key = 1 - """ + # precondition + assert (isinstance(key, int) and isinstance(content, list)) - # precondition - assert (isinstance(key,int) and isinstance(content,str)) + key = key or self.__key or 1 - key = key or self.__key or 1 + # make sure key can be any size + while (key > 255): + key -= 255 - # make sure key can be any size - while (key > 255): - key -= 255 + # This will be returned + ans = [] - # This will be returned - ans = "" + for ch in content: + ans.append(chr(ord(ch) ^ key)) - for ch in content: - ans += chr(ord(ch) ^ key) + return ans - return ans + def encrypt_string(self, content, key=0): + """ + input: 'content' of type string and 'key' of type int + output: encrypted string 'content' + if key not passed the method uses the key by the constructor. + otherwise key = 1 + """ - def decrypt_string(self,content,key = 0): - """ - input: 'content' of type string and 'key' of type int - output: decrypted string 'content' - if key not passed the method uses the key by the constructor. - otherwise key = 1 - """ + # precondition + assert (isinstance(key, int) and isinstance(content, str)) - # precondition - assert (isinstance(key,int) and isinstance(content,str)) + key = key or self.__key or 1 - key = key or self.__key or 1 + # make sure key can be any size + while (key > 255): + key -= 255 - # make sure key can be any size - while (key > 255): - key -= 255 + # This will be returned + ans = "" - # This will be returned - ans = "" - - for ch in content: - ans += chr(ord(ch) ^ key) + for ch in content: + ans += chr(ord(ch) ^ key) - return ans + return ans + def decrypt_string(self, content, key=0): + """ + input: 'content' of type string and 'key' of type int + output: decrypted string 'content' + if key not passed the method uses the key by the constructor. + otherwise key = 1 + """ - def encrypt_file(self, file, key = 0): - """ - input: filename (str) and a key (int) - output: returns true if encrypt process was - successful otherwise false - if key not passed the method uses the key by the constructor. - otherwise key = 1 - """ + # precondition + assert (isinstance(key, int) and isinstance(content, str)) - #precondition - assert (isinstance(file,str) and isinstance(key,int)) + key = key or self.__key or 1 - try: - with open(file,"r") as fin: - with open("encrypt.out","w+") as fout: + # make sure key can be any size + while (key > 255): + key -= 255 - # actual encrypt-process - for line in fin: - fout.write(self.encrypt_string(line,key)) + # This will be returned + ans = "" - except: - return False + for ch in content: + ans += chr(ord(ch) ^ key) - return True + return ans + def encrypt_file(self, file, key=0): + """ + input: filename (str) and a key (int) + output: returns true if encrypt process was + successful otherwise false + if key not passed the method uses the key by the constructor. + otherwise key = 1 + """ - def decrypt_file(self,file, key): - """ - input: filename (str) and a key (int) - output: returns true if decrypt process was - successful otherwise false - if key not passed the method uses the key by the constructor. - otherwise key = 1 - """ + # precondition + assert (isinstance(file, str) and isinstance(key, int)) - #precondition - assert (isinstance(file,str) and isinstance(key,int)) + try: + with open(file, "r") as fin: + with open("encrypt.out", "w+") as fout: + # actual encrypt-process + for line in fin: + fout.write(self.encrypt_string(line, key)) - try: - with open(file,"r") as fin: - with open("decrypt.out","w+") as fout: + except: + return False - # actual encrypt-process - for line in fin: - fout.write(self.decrypt_string(line,key)) + return True - except: - return False + def decrypt_file(self, file, key): + """ + input: filename (str) and a key (int) + output: returns true if decrypt process was + successful otherwise false + if key not passed the method uses the key by the constructor. + otherwise key = 1 + """ - return True + # precondition + assert (isinstance(file, str) and isinstance(key, int)) + try: + with open(file, "r") as fin: + with open("decrypt.out", "w+") as fout: + # actual encrypt-process + for line in fin: + fout.write(self.decrypt_string(line, key)) + except: + return False + return True # Tests # crypt = XORCipher() @@ -206,4 +200,4 @@ def decrypt_file(self,file, key): # if (crypt.decrypt_file("encrypt.out",key)): # print "decrypt successful" # else: -# print "decrypt unsuccessful" \ No newline at end of file +# print "decrypt unsuccessful" diff --git a/compression/huffman.py b/compression/huffman.py index 7417551ba209..5593746b6d48 100644 --- a/compression/huffman.py +++ b/compression/huffman.py @@ -1,5 +1,6 @@ import sys + class Letter: def __init__(self, letter, freq): self.letter = letter @@ -31,6 +32,7 @@ def parse_file(file_path): chars[c] = chars[c] + 1 if c in chars.keys() else 1 return sorted([Letter(c, f) for c, f in chars.items()], key=lambda l: l.freq) + def build_tree(letters): """ Run through the list of Letters and build the min heap @@ -45,6 +47,7 @@ def build_tree(letters): letters.sort(key=lambda l: l.freq) return letters[0] + def traverse_tree(root, bitstring): """ Recursively traverse the Huffman Tree to set each @@ -58,6 +61,7 @@ def traverse_tree(root, bitstring): letters += traverse_tree(root.right, bitstring + "1") return letters + def huffman(file_path): """ Parse the file, build the tree, then run through the file @@ -77,6 +81,7 @@ def huffman(file_path): print(le.bitstring, end=" ") print() + if __name__ == "__main__": # pass the file path to the huffman function huffman(sys.argv[1]) diff --git a/concurrent_test/D8ger.py b/concurrent_test/D8ger.py new file mode 100644 index 000000000000..4231f000cb1b --- /dev/null +++ b/concurrent_test/D8ger.py @@ -0,0 +1,129 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +import argparse +import random +import time + +import requests +import logging + + +def user_agent() -> list: + opera_1 = {'User-Agent': 'Mozilla/5.0 (Windows NT 5.1; U; en; rv:1.8.1) Gecko/20061208 Firefox/2.0.0 Opera 9.50'} + opera_2 = {'User-Agent': 'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; en) Opera 9.50'} + opera_3 = {'User-Agent': 'Opera/9.80 (Android 2.3.4; Linux; Opera Mobi/build-1107180945; U; en-GB) Presto/2.8.149 Version/11.10'} + firefox_1 = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:34.0) Gecko/20100101 Firefox/34.0'} + firefox_2 = {'User-Agent': 'Mozilla/5.0 (X11; U; Linux x86_64; zh-CN; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10'} + safari = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534.57.2 (KHTML, like Gecko) Version/5.1.7 Safari/534.57.2'} + chrome_1 = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.71 Safari/537.36'} + chrome_2 = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11'} + chrome_3 = {'User-Agent': 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16'} + taobao = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.11 TaoBrowser/2.0 Safari/536.11'} + liebao_1 = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.71 Safari/537.1 LBBROWSER'} + liebao_2 = {'User-Agent': 'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; LBBROWSER)'} + qq_1 = {'User-Agent': 'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; QQBrowser/7.0.3698.400)'} + qq_2 = {'User-Agent': 'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E)'} + qq_3 = {'User-Agent': 'MQQBrowser/26 Mozilla/5.0 (Linux; U; Android 2.3.7; zh-cn; MB200 Build/GRJ22; CyanogenMod-7) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1'} + sougou_1 = {'User-Agent': 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.84 Safari/535.11 SE 2.X MetaSr 1.0'} + sougou_2 = {'User-Agent': 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SV1; QQDownload 732; .NET4.0C; .NET4.0E; SE 2.X MetaSr 1.0)'} + maxthon = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Maxthon/4.4.3.4000 Chrome/30.0.1599.101 Safari/537.36'} + uc = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122 UBrowser/4.0.3214.0 Safari/537.36'} + iphone = {'User-Agent': 'Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5'} + ipod = {'User-Agent': 'Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5'} + ipad_1 = {'User-Agent': 'Mozilla/5.0 (iPad; U; CPU OS 4_2_1 like Mac OS X; zh-cn) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8C148 Safari/6533.18.5'} + ipad_2 = {'User-Agent': 'Mozilla/5.0 (iPad; U; CPU OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5'} + android_1 = {'User-Agent': 'Mozilla/5.0 (Linux; U; Android 2.2.1; zh-cn; HTC_Wildfire_A3333 Build/FRG83D) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1'} + android_2 = {'User-Agent': 'Mozilla/5.0 (Linux; U; Android 2.3.7; en-us; Nexus One Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1'} + pad_moto_xoom = {'User-Agent': 'Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/534.13 (KHTML, like Gecko) Version/4.0 Safari/534.13'} + black_berry = {'User-Agent': 'Mozilla/5.0 (BlackBerry; U; BlackBerry 9800; en) AppleWebKit/534.1+ (KHTML, like Gecko) Version/6.0.0.337 Mobile Safari/534.1+'} + hp_touch_pad = {'User-Agent': 'Mozilla/5.0 (hp-tablet; Linux; hpwOS/3.0.0; U; en-US) AppleWebKit/534.6 (KHTML, like Gecko) wOSBrowser/233.70 Safari/534.6 TouchPad/1.0'} + nokia_n97 = {'User-Agent': 'Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaN97-1/20.0.019; Profile/MIDP-2.1 Configuration/CLDC-1.1) AppleWebKit/525 (KHTML, like Gecko) BrowserNG/7.1.18124'} + windows_phone_mango = {'User-Agent': 'Mozilla/5.0 (compatible; MSIE 9.0; Windows Phone OS 7.5; Trident/5.0; IEMobile/9.0; HTC; Titan)'} + first_list = [opera_1, opera_2, opera_3, firefox_1, firefox_2, safari] + second_list = [chrome_1, chrome_2, chrome_3, taobao, liebao_1, liebao_2] + third_list = [qq_1, qq_2, qq_3, sougou_1, sougou_2, maxthon, uc, iphone] + fourth_list = [ipod, ipad_1, ipad_2, android_1, android_2, pad_moto_xoom] + fifth_list = [black_berry, hp_touch_pad, nokia_n97, windows_phone_mango] + first_list.extend(second_list) + first_list.extend(third_list) + first_list.extend(fourth_list) + first_list.extend(fifth_list) + return first_list + + +def build_logs(): + # 设置log名称 + log_name = "v5.log" + # 定义logger + logger = logging.getLogger() + # 设置级别为debug + logger.setLevel(level=logging.DEBUG) + # 设置 logging文件名称 + handler = logging.FileHandler(log_name) + # 设置级别为debug + handler.setLevel(logging.DEBUG) + # 设置log的格式 + formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') + # 将格式压进logger + handler.setFormatter(formatter) + console = logging.StreamHandler() + console.setLevel(logging.DEBUG) + # 写入logger + logger.addHandler(handler) + logger.addHandler(console) + # 将logger返回 + return logger + + +def execute_download(): + parser = argparse.ArgumentParser(description="帝八嫂的小秘密") + parser.add_argument("-l", "--loop", type=int, default=200, help="下载次数默认200") + parser.add_argument("-d", "--delay", type=int, default=0, help="延时默认0秒") + args = parser.parse_args() + loop = args.loop + if loop < 0: + loop = 200 + delay = args.delay + if delay < 0: + delay = 0 + # 日志 + logger = build_logs() + logger.debug("参数设置结果: 下载次数=[{}], 延时=[{}]s".format(loop, delay)) + tasks = list(range(1, loop + 1)) + download_url = "https://plugins.jetbrains.com/plugin/download?rel=true&updateId=92649" + # 添加头部,伪装浏览器,字典格式 + agent_list = user_agent() + gama = len(agent_list) + failed = 0 + for i in tasks: + logger.debug("执行第[{}]次下载任务".format(i)) + # 随机获取浏览器代理 + seed = random.randint(0, 1000) + index = seed % gama + headers = agent_list[index] + file_name = "D8{}.zip".format(i) + try: + logger.debug("第[{}]次下载任务: [随机数={}], [索引={}],\n[浏览器代理={}]".format(i, seed, index, headers)) + jet = requests.get(download_url, headers=headers, timeout=600) + # 下载文件 + with open(file_name, "wb") as d8ger_writer: + d8ger_writer.write(jet.content) + except Exception as e: + # 服务端关闭连接, 防火墙超时关闭连接, 或其他异常 + logger.error("第[{}]次下载任务出现异常, 原因: {}".format(i, e)) + failed += 1 + # 继续下一次 + continue + # 延时5秒执行 + logger.debug("文件[{}]下载完成".format(file_name)) + if delay > 0: + time.sleep(delay) + logger.debug("失败[{}]次, 成功下载[{}]次".format(failed, loop - failed)) + + +def main(): + execute_download() + + +if __name__ == '__main__': + main() diff --git a/concurrent_test/D8gerRich.py b/concurrent_test/D8gerRich.py new file mode 100755 index 000000000000..cf1853194387 --- /dev/null +++ b/concurrent_test/D8gerRich.py @@ -0,0 +1,93 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +from concurrent.futures import ThreadPoolExecutor +from functools import partial +import os.path +import sys +from typing import Iterable +from urllib.request import urlopen +from rich import print +from rich.console import Console +import logging +from rich.logging import RichHandler + +from rich.progress import ( + BarColumn, + DownloadColumn, + TextColumn, + TransferSpeedColumn, + TimeRemainingColumn, + Progress, + TaskID, +) + +progress = Progress( + TextColumn("[bold blue]{task.fields[filename]}", justify="right"), + BarColumn(bar_width=None), + "[progress.percentage]{task.percentage:>3.1f}%", + "•", + DownloadColumn(), + "•", + TransferSpeedColumn(), + "•", + TimeRemainingColumn(), +) + + +def copy_url(task_id: TaskID, url: str, path: str) -> None: + """Copy data from a url to a local file.""" + response = urlopen(url) + # This will break if the response doesn't contain content length + progress.update(task_id, total=int(response.info()["Content-length"])) + with open(path, "wb") as dest_file: + progress.start_task(task_id) + for data in iter(partial(response.read, 32768), b""): + dest_file.write(data) + progress.update(task_id, advance=len(data)) + + +def download(url: str, loop: int, workers: int, dest_dir: str): + """Download multuple files to the given directory.""" + with progress: + with ThreadPoolExecutor(max_workers=workers) as pool: + for i in range(loop): + filename = "D8{}.zip".format(i+1) + dest_path = os.path.join(dest_dir, filename) + task_id = progress.add_task("download", filename=filename, start=False) + pool.submit(copy_url, task_id, url, dest_path) + + +def execute_download(): + url = "https://plugins.jetbrains.com/plugin/download?rel=true&updateId=94618" + loop = 10 + workers = 2 + console = Console() + console.print(":smiley: :vampire: :pile_of_poo: :thumbs_up: :raccoon:") + logging.basicConfig(level="DEBUG", format="%(message)s", datefmt="[%Y-%m-%d %X]", handlers=[RichHandler()]) + log = logging.getLogger("rich") + try: + if sys.argv[1]: + loop = int(sys.argv[1]) + if sys.argv[2]: + workers = int(sys.argv[2]) + except Exception as e: + # just log + log.warning("you didn't set any parameters!") + if loop > 64: + loop = 64 + if loop < 0: + loop = 10 + if workers > 8: + workers = 8 + if workers < 0: + workers = 2 + console.print("parameter [bold magenta]loop[/bold magenta] = [bold green]{}[/bold green], parameter [bold magenta]workers[/bold magenta] = [bold green]{}[/bold green]".format(loop, workers)) + download(url, loop, workers, "./") + + +def main(): + execute_download() + + +if __name__ == '__main__': + main() diff --git a/concurrent_test/concurrent_test.py b/concurrent_test/concurrent_test.py new file mode 100644 index 000000000000..cb656866377c --- /dev/null +++ b/concurrent_test/concurrent_test.py @@ -0,0 +1,123 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +import datetime +import json +import ssl +from concurrent.futures.thread import ThreadPoolExecutor + +import requests + +import numpy as np +import argparse + +# 屏蔽HTTPS证书校验, 忽略安全警告 +requests.packages.urllib3.disable_warnings() +context = ssl._create_unverified_context() + + +def init_param() -> list: + """ + 初始化参数, 读取shell命令参数, 自动登录 + 依次返回httpie_view方式, 线程池, 登录cookie + :rtype: list + """ + parser = argparse.ArgumentParser(description="并发执行接口") + parser.add_argument("-w", "--workers", type=int, choices=choice_nums(1, 65, 1), default=1, help="并发执行线程数, 取值范围[1, 64]") + parser.add_argument("-l", "--loops", type=int, default=1, help="循环执行次数") + args = parser.parse_args() + loops = args.loops + if loops < 1: + loops = 1 + print("参数设置结果: 执行次数=[{}], 并发线程数=[{}]".format(loops, args.workers)) + init_executor = ThreadPoolExecutor(max_workers=args.workers) + cookie = auto_login() + return [loops, init_executor, cookie] + + +def choice_nums(start: int, end: int, delta: int) -> list: + """ + 返回指定的数组序列 + :rtype: list + """ + return np.arange(start, end, delta).tolist() + + +def execute_http(i): + """ + 执行excuteUrl.json接口 + :param i 仅用于计数虚拟参数 + :return: + """ + with open("./excuteUrl.json", 'r') as request_data: + request_json = json.load(request_data) + url = request_json['url'] + method = request_json['method'] + request_headers = handle_json_str_value(request_json['headers']) + request_headers['Cookie'] = init_cookie + request_body = handle_json_str_value(request_json['body']) + response_body = { + "status": -1, + "msg": "接口执行失败", + "data": "请检查接口是否返回JSON格式的相应数据, 以及抛出未经处理的特殊异常" + } + executeStartTime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f') + try: + response = requests.request(method, url, headers=request_headers, json=request_body, timeout=3, verify=False) + # JSON标准格式 + response_body = json.dumps(response.json(), ensure_ascii=False, indent=4) + except Exception as e: + print(e) + executeEndTime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f') + return "executeStartTime=[{}], executeEndTime=[{}]\n响应结果:\n{}".format(executeStartTime, executeEndTime, response_body) + + +def auto_login(): + """ + 自动登录, 获取登录Cookie + """ + with open("./ssoLogin.json", 'r') as sso_login_request_data: + request_json = json.load(sso_login_request_data) + url = request_json['url'] + method = request_json['method'] + request_headers = handle_json_str_value(request_json['headers']) + request_body = handle_json_str_value(request_json['body']) + # request_headers = {"Content-Type": "application/json", "HT-app": "6"} + response = requests.request(method, url, headers=request_headers, json=request_body, timeout=3, verify=False) + response_headers = response.headers + # 处理Cookie, 多个Cookie之间使用';'分隔, 否则校验cookie时出现"domain."在高版本中tomcat中报错 + # https://blog.csdn.net/w57685321/article/details/84943176 + cookie = response_headers.get("set-Cookie").replace(", _r", "; _r").replace(", _a", "; _a") + # JSON标准格式 + response_body = json.dumps(response.json(), ensure_ascii=False, indent=4) + print("登录响应Cookie结果: \n{}\n登录响应BODY结果: {}".format(cookie, response_body)) + return cookie + + +def handle_json_str_value(json): + """ + 将json的值都变为字符串处理 + :param json: + :return: + """ + for (k, v) in json.items(): + json[k] = str(v) + return json + + +def main(): + # 全局变量 + global execute_num + global init_cookie + global executor + # 初始化参数 + initial_param_list = init_param() + execute_num = initial_param_list[0] + executor = initial_param_list[1] + init_cookie = initial_param_list[2] + nums = list(range(0, execute_num)) + for result in executor.map(execute_http, nums): + print(result) + + +if __name__ == '__main__': + main() diff --git a/concurrent_test/easy_test.py b/concurrent_test/easy_test.py new file mode 100644 index 000000000000..8a431990dd4b --- /dev/null +++ b/concurrent_test/easy_test.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +import argparse +import datetime +import ssl +from concurrent.futures.thread import ThreadPoolExecutor + +import numpy as np +import requests + +# 屏蔽HTTPS证书校验, 忽略安全警告 +requests.packages.urllib3.disable_warnings() +context = ssl._create_unverified_context() + + +def init_param() -> list: + """ + 初始化参数, 读取shell命令参数, 自动登录 + 依次返回httpie_view方式, 线程池, 登录cookie + :rtype: list + """ + parser = argparse.ArgumentParser(description="并发执行接口") + parser.add_argument("url", type=str, help="接口请求地址") + parser.add_argument("-w", "--workers", type=int, choices=choice_nums(1, 65, 1), default=1, help="并发执行线程数, 取值范围[1, 64]") + parser.add_argument("-l", "--loops", type=int, default=1, help="循环执行次数") + args = parser.parse_args() + loops = args.loops + if loops < 1: + loops = 1 + print("参数设置结果: 请求url=[{}], 执行次数=[{}], 并发线程数=[{}]".format(args.url, loops, args.workers)) + init_executor = ThreadPoolExecutor(max_workers=args.workers) + return [loops, init_executor, args.url] + + +def choice_nums(start: int, end: int, delta: int) -> list: + """ + 返回指定的数组序列 + :rtype: list + """ + return np.arange(start, end, delta).tolist() + + +def execute_http(i): + """ + 执行excuteUrl.json接口 + :param i 仅用于计数虚拟参数 + :return: + """ + request_headers = {} + request_body = {} + response_text = "无响应文本" + executeStartTime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f') + try: + response = requests.request("POST", url, headers=request_headers, json=request_body, timeout=3, verify=False) + # JSON标准格式 + response_text = response.text + except Exception as e: + print(e) + executeEndTime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f') + return "executeStartTime=[{}], executeEndTime=[{}]\n响应结果:\n{}".format(executeStartTime, executeEndTime, response_text) + + +def handle_json_str_value(json): + """ + 将json的值都变为字符串处理 + :param json: + :return: + """ + for (k, v) in json.items(): + json[k] = str(v) + return json + + +def main(): + # 全局变量 + global execute_num + global url + global executor + # 初始化参数 + initial_param_list = init_param() + execute_num = initial_param_list[0] + executor = initial_param_list[1] + url = initial_param_list[2] + nums = list(range(0, execute_num)) + for result in executor.map(execute_http, nums): + print(result) + + +if __name__ == '__main__': + main() diff --git a/concurrent_test/excuteUrl.json b/concurrent_test/excuteUrl.json new file mode 100644 index 000000000000..84b0201df4a3 --- /dev/null +++ b/concurrent_test/excuteUrl.json @@ -0,0 +1,11 @@ +{ + "url": "https://localhost:8119/account/sentinel", + "method": "POST", + "headers" : { + "Content-Type": "application/json", + "HT-app": 6 + }, + "body": { + "subAccountId": 11231095 + } +} diff --git a/concurrent_test/ssoLogin.json b/concurrent_test/ssoLogin.json new file mode 100644 index 000000000000..6ded12f95279 --- /dev/null +++ b/concurrent_test/ssoLogin.json @@ -0,0 +1,14 @@ +{ + "url": "https://sso.testa.huitong.com/api/v100/ssonew/login", + "method": "POST", + "headers": { + "Content-Type": "application/json", + "HT-app": 6 + }, + "body": { + "phone": "18999999999", + "smsAuthCode": "123456", + "loginType": 0, + "pwd": "ht123456." + } +} diff --git a/data_structures/arrays.py b/data_structures/arrays.py index feb061013556..f958585fbfb4 100644 --- a/data_structures/arrays.py +++ b/data_structures/arrays.py @@ -1,3 +1,3 @@ arr = [10, 20, 30, 40] -arr[1] = 30 # set element 1 (20) of array to 30 +arr[1] = 30 # set element 1 (20) of array to 30 print(arr) diff --git a/data_structures/avl.py b/data_structures/avl.py index d01e8f825368..4f8c966ab902 100644 --- a/data_structures/avl.py +++ b/data_structures/avl.py @@ -102,9 +102,9 @@ def rebalance(self, node): left_child = n.left if left_child is not None: h_right = (left_child.right.height - if (left_child.right is not None) else 0) + if (left_child.right is not None) else 0) h_left = (left_child.left.height - if (left_child.left is not None) else 0) + if (left_child.left is not None) else 0) if (h_left > h_right): self.rotate_left(n) break @@ -115,9 +115,9 @@ def rebalance(self, node): right_child = n.right if right_child is not None: h_right = (right_child.right.height - if (right_child.right is not None) else 0) + if (right_child.right is not None) else 0) h_left = (right_child.left.height - if (right_child.left is not None) else 0) + if (right_child.left is not None) else 0) if (h_left > h_right): self.double_rotate_left(n) break @@ -133,7 +133,6 @@ def rotate_left(self, node): node.parent.right.height = node.parent.height + 1 node.parent.left = node.right - def rotate_right(self, node): aux = node.parent.label node.parent.label = node.label @@ -169,6 +168,7 @@ def preorder(self, curr_node): def getRoot(self): return self.root + t = AVL() t.insert(1) t.insert(2) diff --git a/data_structures/binary tree/AVL_tree.py b/data_structures/binary tree/AVL_tree.py index ff44963d1690..eafcbaa99040 100644 --- a/data_structures/binary tree/AVL_tree.py +++ b/data_structures/binary tree/AVL_tree.py @@ -4,66 +4,83 @@ ''' import math import random + + class my_queue: def __init__(self): self.data = [] self.head = 0 self.tail = 0 + def isEmpty(self): return self.head == self.tail - def push(self,data): + + def push(self, data): self.data.append(data) self.tail = self.tail + 1 + def pop(self): ret = self.data[self.head] self.head = self.head + 1 return ret + def count(self): return self.tail - self.head + def print(self): print(self.data) print("**************") print(self.data[self.head:self.tail]) - + + class my_node: - def __init__(self,data): + def __init__(self, data): self.data = data self.left = None self.right = None self.height = 1 + def getdata(self): return self.data + def getleft(self): return self.left + def getright(self): return self.right + def getheight(self): return self.height - def setdata(self,data): + + def setdata(self, data): self.data = data return - def setleft(self,node): + + def setleft(self, node): self.left = node return - def setright(self,node): + + def setright(self, node): self.right = node return - def setheight(self,height): + + def setheight(self, height): self.height = height return + def getheight(node): if node is None: return 0 return node.getheight() -def my_max(a,b): + +def my_max(a, b): if a > b: return a return b - def leftrotation(node): r''' A B @@ -76,30 +93,32 @@ def leftrotation(node): UB = unbalanced node ''' - print("left rotation node:",node.getdata()) + print("left rotation node:", node.getdata()) ret = node.getleft() node.setleft(ret.getright()) ret.setright(node) - h1 = my_max(getheight(node.getright()),getheight(node.getleft())) + 1 + h1 = my_max(getheight(node.getright()), getheight(node.getleft())) + 1 node.setheight(h1) - h2 = my_max(getheight(ret.getright()),getheight(ret.getleft())) + 1 + h2 = my_max(getheight(ret.getright()), getheight(ret.getleft())) + 1 ret.setheight(h2) return ret + def rightrotation(node): ''' a mirror symmetry rotation of the leftrotation ''' - print("right rotation node:",node.getdata()) + print("right rotation node:", node.getdata()) ret = node.getright() node.setright(ret.getleft()) ret.setleft(node) - h1 = my_max(getheight(node.getright()),getheight(node.getleft())) + 1 + h1 = my_max(getheight(node.getright()), getheight(node.getleft())) + 1 node.setheight(h1) - h2 = my_max(getheight(ret.getright()),getheight(ret.getleft())) + 1 + h2 = my_max(getheight(ret.getright()), getheight(ret.getleft())) + 1 ret.setheight(h2) return ret + def rlrotation(node): r''' A A Br @@ -114,47 +133,52 @@ def rlrotation(node): node.setleft(rightrotation(node.getleft())) return leftrotation(node) + def lrrotation(node): node.setright(leftrotation(node.getright())) return rightrotation(node) -def insert_node(node,data): +def insert_node(node, data): if node is None: return my_node(data) if data < node.getdata(): - node.setleft(insert_node(node.getleft(),data)) - if getheight(node.getleft()) - getheight(node.getright()) == 2: #an unbalance detected - if data < node.getleft().getdata(): #new node is the left child of the left child + node.setleft(insert_node(node.getleft(), data)) + if getheight(node.getleft()) - getheight(node.getright()) == 2: # an unbalance detected + if data < node.getleft().getdata(): # new node is the left child of the left child node = leftrotation(node) else: - node = rlrotation(node) #new node is the right child of the left child + node = rlrotation(node) # new node is the right child of the left child else: - node.setright(insert_node(node.getright(),data)) + node.setright(insert_node(node.getright(), data)) if getheight(node.getright()) - getheight(node.getleft()) == 2: if data < node.getright().getdata(): node = lrrotation(node) else: node = rightrotation(node) - h1 = my_max(getheight(node.getright()),getheight(node.getleft())) + 1 + h1 = my_max(getheight(node.getright()), getheight(node.getleft())) + 1 node.setheight(h1) return node + def getRightMost(root): while root.getright() is not None: root = root.getright() return root.getdata() + + def getLeftMost(root): while root.getleft() is not None: root = root.getleft() return root.getdata() -def del_node(root,data): + +def del_node(root, data): if root.getdata() == data: if root.getleft() is not None and root.getright() is not None: temp_data = getLeftMost(root.getright()) root.setdata(temp_data) - root.setright(del_node(root.getright(),temp_data)) + root.setright(del_node(root.getright(), temp_data)) elif root.getleft() is not None: root = root.getleft() else: @@ -164,12 +188,12 @@ def del_node(root,data): print("No such data") return root else: - root.setleft(del_node(root.getleft(),data)) + root.setleft(del_node(root.getleft(), data)) elif root.getdata() < data: if root.getright() is None: return root else: - root.setright(del_node(root.getright(),data)) + root.setright(del_node(root.getright(), data)) if root is None: return root if getheight(root.getright()) - getheight(root.getleft()) == 2: @@ -182,27 +206,31 @@ def del_node(root,data): root = leftrotation(root) else: root = rlrotation(root) - height = my_max(getheight(root.getright()),getheight(root.getleft())) + 1 + height = my_max(getheight(root.getright()), getheight(root.getleft())) + 1 root.setheight(height) return root + class AVLtree: def __init__(self): self.root = None + def getheight(self): -# print("yyy") + # print("yyy") return getheight(self.root) - def insert(self,data): - print("insert:"+str(data)) - self.root = insert_node(self.root,data) - - def del_node(self,data): - print("delete:"+str(data)) + + def insert(self, data): + print("insert:" + str(data)) + self.root = insert_node(self.root, data) + + def del_node(self, data): + print("delete:" + str(data)) if self.root is None: print("Tree is empty!") return - self.root = del_node(self.root,data) - def traversale(self): #a level traversale, gives a more intuitive look on the tree + self.root = del_node(self.root, data) + + def traversale(self): # a level traversale, gives a more intuitive look on the tree q = my_queue() q.push(self.root) layer = self.getheight() @@ -211,21 +239,21 @@ def traversale(self): #a level traversale, gives a more intuitive look on the tr cnt = 0 while not q.isEmpty(): node = q.pop() - space = " "*int(math.pow(2,layer-1)) - print(space,end = "") + space = " " * int(math.pow(2, layer - 1)) + print(space, end="") if node is None: - print("*",end = "") + print("*", end="") q.push(None) q.push(None) else: - print(node.getdata(),end = "") + print(node.getdata(), end="") q.push(node.getleft()) q.push(node.getright()) - print(space,end = "") + print(space, end="") cnt = cnt + 1 for i in range(100): - if cnt == math.pow(2,i) - 1: - layer = layer -1 + if cnt == math.pow(2, i) - 1: + layer = layer - 1 if layer == 0: print() print("*************************************") @@ -235,11 +263,13 @@ def traversale(self): #a level traversale, gives a more intuitive look on the tr print() print("*************************************") return - + def test(self): getheight(None) print("****") self.getheight() + + if __name__ == "__main__": t = AVLtree() t.traversale() @@ -248,7 +278,7 @@ def test(self): for i in l: t.insert(i) t.traversale() - + random.shuffle(l) for i in l: t.del_node(i) diff --git a/data_structures/binary tree/binary_search_tree.py b/data_structures/binary tree/binary_search_tree.py index cef5b55f245d..183cfcc74eac 100644 --- a/data_structures/binary tree/binary_search_tree.py +++ b/data_structures/binary tree/binary_search_tree.py @@ -2,13 +2,15 @@ A binary search Tree ''' from __future__ import print_function + + class Node: def __init__(self, label, parent): self.label = label self.left = None self.right = None - #Added in order to delete a node easier + # Added in order to delete a node easier self.parent = parent def getLabel(self): @@ -35,6 +37,7 @@ def getParent(self): def setParent(self, parent): self.parent = parent + class BinarySearchTree: def __init__(self): @@ -47,90 +50,90 @@ def insert(self, label): if self.empty(): self.root = new_node else: - #If Tree is not empty + # If Tree is not empty curr_node = self.root - #While we don't get to a leaf + # While we don't get to a leaf while curr_node is not None: - #We keep reference of the parent node + # We keep reference of the parent node parent_node = curr_node - #If node label is less than current node + # If node label is less than current node if new_node.getLabel() < curr_node.getLabel(): - #We go left + # We go left curr_node = curr_node.getLeft() else: - #Else we go right + # Else we go right curr_node = curr_node.getRight() - #We insert the new node in a leaf + # We insert the new node in a leaf if new_node.getLabel() < parent_node.getLabel(): parent_node.setLeft(new_node) else: parent_node.setRight(new_node) - #Set parent to the new node - new_node.setParent(parent_node) - + # Set parent to the new node + new_node.setParent(parent_node) + def delete(self, label): if (not self.empty()): - #Look for the node with that label + # Look for the node with that label node = self.getNode(label) - #If the node exists - if(node is not None): - #If it has no children - if(node.getLeft() is None and node.getRight() is None): + # If the node exists + if (node is not None): + # If it has no children + if (node.getLeft() is None and node.getRight() is None): self.__reassignNodes(node, None) node = None - #Has only right children - elif(node.getLeft() is None and node.getRight() is not None): + # Has only right children + elif (node.getLeft() is None and node.getRight() is not None): self.__reassignNodes(node, node.getRight()) - #Has only left children - elif(node.getLeft() is not None and node.getRight() is None): + # Has only left children + elif (node.getLeft() is not None and node.getRight() is None): self.__reassignNodes(node, node.getLeft()) - #Has two children + # Has two children else: - #Gets the max value of the left branch + # Gets the max value of the left branch tmpNode = self.getMax(node.getLeft()) - #Deletes the tmpNode + # Deletes the tmpNode self.delete(tmpNode.getLabel()) - #Assigns the value to the node to delete and keesp tree structure + # Assigns the value to the node to delete and keesp tree structure node.setLabel(tmpNode.getLabel()) - + def getNode(self, label): curr_node = None - #If the tree is not empty - if(not self.empty()): - #Get tree root + # If the tree is not empty + if (not self.empty()): + # Get tree root curr_node = self.getRoot() - #While we don't find the node we look for - #I am using lazy evaluation here to avoid NoneType Attribute error + # While we don't find the node we look for + # I am using lazy evaluation here to avoid NoneType Attribute error while curr_node is not None and curr_node.getLabel() is not label: - #If node label is less than current node + # If node label is less than current node if label < curr_node.getLabel(): - #We go left + # We go left curr_node = curr_node.getLeft() else: - #Else we go right + # Else we go right curr_node = curr_node.getRight() return curr_node - def getMax(self, root = None): - if(root is not None): + def getMax(self, root=None): + if (root is not None): curr_node = root else: - #We go deep on the right branch + # We go deep on the right branch curr_node = self.getRoot() - if(not self.empty()): - while(curr_node.getRight() is not None): + if (not self.empty()): + while (curr_node.getRight() is not None): curr_node = curr_node.getRight() return curr_node - def getMin(self, root = None): - if(root is not None): + def getMin(self, root=None): + if (root is not None): curr_node = root else: - #We go deep on the left branch + # We go deep on the left branch curr_node = self.getRoot() - if(not self.empty()): + if (not self.empty()): curr_node = self.getRoot() - while(curr_node.getLeft() is not None): + while (curr_node.getLeft() is not None): curr_node = curr_node.getLeft() return curr_node @@ -151,34 +154,34 @@ def getRoot(self): return self.root def __isRightChildren(self, node): - if(node == node.getParent().getRight()): + if (node == node.getParent().getRight()): return True return False def __reassignNodes(self, node, newChildren): - if(newChildren is not None): + if (newChildren is not None): newChildren.setParent(node.getParent()) - if(node.getParent() is not None): - #If it is the Right Children - if(self.__isRightChildren(node)): + if (node.getParent() is not None): + # If it is the Right Children + if (self.__isRightChildren(node)): node.getParent().setRight(newChildren) else: - #Else it is the left children + # Else it is the left children node.getParent().setLeft(newChildren) - #This function traversal the tree. By default it returns an - #In order traversal list. You can pass a function to traversal - #The tree as needed by client code - def traversalTree(self, traversalFunction = None, root = None): - if(traversalFunction is None): - #Returns a list of nodes in preOrder by default + # This function traversal the tree. By default it returns an + # In order traversal list. You can pass a function to traversal + # The tree as needed by client code + def traversalTree(self, traversalFunction=None, root=None): + if (traversalFunction is None): + # Returns a list of nodes in preOrder by default return self.__InOrderTraversal(self.root) else: - #Returns a list of nodes in the order that the users wants to + # Returns a list of nodes in the order that the users wants to return traversalFunction(self.root) - #Returns an string of all the nodes labels in the list - #In Order Traversal + # Returns an string of all the nodes labels in the list + # In Order Traversal def __str__(self): list = self.__InOrderTraversal(self.root) str = "" @@ -186,6 +189,7 @@ def __str__(self): str = str + " " + x.getLabel().__str__() return str + def InPreOrder(curr_node): nodeList = [] if curr_node is not None: @@ -194,6 +198,7 @@ def InPreOrder(curr_node): nodeList = nodeList + InPreOrder(curr_node.getRight()) return nodeList + def testBinarySearchTree(): r''' Example @@ -224,23 +229,23 @@ def testBinarySearchTree(): t.insert(4) t.insert(7) - #Prints all the elements of the list in order traversal + # Prints all the elements of the list in order traversal print(t.__str__()) - if(t.getNode(6) is not None): + if (t.getNode(6) is not None): print("The label 6 exists") else: print("The label 6 doesn't exist") - if(t.getNode(-1) is not None): + if (t.getNode(-1) is not None): print("The label -1 exists") else: print("The label -1 doesn't exist") - - if(not t.empty()): + + if (not t.empty()): print(("Max Value: ", t.getMax().getLabel())) print(("Min Value: ", t.getMin().getLabel())) - + t.delete(13) t.delete(10) t.delete(8) @@ -248,11 +253,12 @@ def testBinarySearchTree(): t.delete(6) t.delete(14) - #Gets all the elements of the tree In pre order - #And it prints them + # Gets all the elements of the tree In pre order + # And it prints them list = t.traversalTree(InPreOrder, t.root) for x in list: print(x) + if __name__ == "__main__": testBinarySearchTree() diff --git a/data_structures/binary tree/fenwick_tree.py b/data_structures/binary tree/fenwick_tree.py index f429161c8c36..9253210c9875 100644 --- a/data_structures/binary tree/fenwick_tree.py +++ b/data_structures/binary tree/fenwick_tree.py @@ -1,29 +1,32 @@ from __future__ import print_function + + class FenwickTree: - def __init__(self, SIZE): # create fenwick tree with size SIZE + def __init__(self, SIZE): # create fenwick tree with size SIZE self.Size = SIZE - self.ft = [0 for i in range (0,SIZE)] + self.ft = [0 for i in range(0, SIZE)] - def update(self, i, val): # update data (adding) in index i in O(lg N) + def update(self, i, val): # update data (adding) in index i in O(lg N) while (i < self.Size): self.ft[i] += val i += i & (-i) - def query(self, i): # query cumulative data from index 0 to i in O(lg N) + def query(self, i): # query cumulative data from index 0 to i in O(lg N) ret = 0 while (i > 0): ret += self.ft[i] i -= i & (-i) return ret - + + if __name__ == '__main__': f = FenwickTree(100) - f.update(1,20) - f.update(4,4) - print (f.query(1)) - print (f.query(3)) - print (f.query(4)) - f.update(2,-5) - print (f.query(1)) - print (f.query(3)) + f.update(1, 20) + f.update(4, 4) + print(f.query(1)) + print(f.query(3)) + print(f.query(4)) + f.update(2, -5) + print(f.query(1)) + print(f.query(3)) diff --git a/data_structures/binary tree/lazy_segment_tree.py b/data_structures/binary tree/lazy_segment_tree.py index 9b14b24e81fa..0bb0b0edc1af 100644 --- a/data_structures/binary tree/lazy_segment_tree.py +++ b/data_structures/binary tree/lazy_segment_tree.py @@ -1,58 +1,60 @@ from __future__ import print_function + import math + class SegmentTree: - + def __init__(self, N): self.N = N - self.st = [0 for i in range(0,4*N)] # approximate the overall size of segment tree with array N - self.lazy = [0 for i in range(0,4*N)] # create array to store lazy update - self.flag = [0 for i in range(0,4*N)] # flag for lazy update - + self.st = [0 for i in range(0, 4 * N)] # approximate the overall size of segment tree with array N + self.lazy = [0 for i in range(0, 4 * N)] # create array to store lazy update + self.flag = [0 for i in range(0, 4 * N)] # flag for lazy update + def left(self, idx): - return idx*2 + return idx * 2 def right(self, idx): - return idx*2 + 1 + return idx * 2 + 1 def build(self, idx, l, r, A): - if l==r: - self.st[idx] = A[l-1] - else : - mid = (l+r)//2 - self.build(self.left(idx),l,mid, A) - self.build(self.right(idx),mid+1,r, A) - self.st[idx] = max(self.st[self.left(idx)] , self.st[self.right(idx)]) + if l == r: + self.st[idx] = A[l - 1] + else: + mid = (l + r) // 2 + self.build(self.left(idx), l, mid, A) + self.build(self.right(idx), mid + 1, r, A) + self.st[idx] = max(self.st[self.left(idx)], self.st[self.right(idx)]) # update with O(lg N) (Normal segment tree without lazy update will take O(Nlg N) for each update) - def update(self, idx, l, r, a, b, val): # update(1, 1, N, a, b, v) for update val v to [a,b] + def update(self, idx, l, r, a, b, val): # update(1, 1, N, a, b, v) for update val v to [a,b] if self.flag[idx] == True: self.st[idx] = self.lazy[idx] self.flag[idx] = False - if l!=r: + if l != r: self.lazy[self.left(idx)] = self.lazy[idx] self.lazy[self.right(idx)] = self.lazy[idx] self.flag[self.left(idx)] = True self.flag[self.right(idx)] = True - + if r < a or l > b: return True - if l >= a and r <= b : + if l >= a and r <= b: self.st[idx] = val - if l!=r: + if l != r: self.lazy[self.left(idx)] = val self.lazy[self.right(idx)] = val self.flag[self.left(idx)] = True self.flag[self.right(idx)] = True return True - mid = (l+r)//2 - self.update(self.left(idx),l,mid,a,b,val) - self.update(self.right(idx),mid+1,r,a,b,val) - self.st[idx] = max(self.st[self.left(idx)] , self.st[self.right(idx)]) + mid = (l + r) // 2 + self.update(self.left(idx), l, mid, a, b, val) + self.update(self.right(idx), mid + 1, r, a, b, val) + self.st[idx] = max(self.st[self.left(idx)], self.st[self.right(idx)]) return True # query with O(lg N) - def query(self, idx, l, r, a, b): #query(1, 1, N, a, b) for query max of [a,b] + def query(self, idx, l, r, a, b): # query(1, 1, N, a, b) for query max of [a,b] if self.flag[idx] == True: self.st[idx] = self.lazy[idx] self.flag[idx] = False @@ -65,27 +67,27 @@ def query(self, idx, l, r, a, b): #query(1, 1, N, a, b) for query max of [a,b] return -math.inf if l >= a and r <= b: return self.st[idx] - mid = (l+r)//2 - q1 = self.query(self.left(idx),l,mid,a,b) - q2 = self.query(self.right(idx),mid+1,r,a,b) - return max(q1,q2) + mid = (l + r) // 2 + q1 = self.query(self.left(idx), l, mid, a, b) + q2 = self.query(self.right(idx), mid + 1, r, a, b) + return max(q1, q2) def showData(self): showList = [] - for i in range(1,N+1): + for i in range(1, N + 1): showList += [self.query(1, 1, self.N, i, i)] - print (showList) - + print(showList) + if __name__ == '__main__': - A = [1,2,-4,7,3,-5,6,11,-20,9,14,15,5,2,-8] + A = [1, 2, -4, 7, 3, -5, 6, 11, -20, 9, 14, 15, 5, 2, -8] N = 15 segt = SegmentTree(N) - segt.build(1,1,N,A) - print (segt.query(1,1,N,4,6)) - print (segt.query(1,1,N,7,11)) - print (segt.query(1,1,N,7,12)) - segt.update(1,1,N,1,3,111) - print (segt.query(1,1,N,1,15)) - segt.update(1,1,N,7,8,235) + segt.build(1, 1, N, A) + print(segt.query(1, 1, N, 4, 6)) + print(segt.query(1, 1, N, 7, 11)) + print(segt.query(1, 1, N, 7, 12)) + segt.update(1, 1, N, 1, 3, 111) + print(segt.query(1, 1, N, 1, 15)) + segt.update(1, 1, N, 7, 8, 235) segt.showData() diff --git a/data_structures/binary tree/segment_tree.py b/data_structures/binary tree/segment_tree.py index 001bf999f391..cb68749936a4 100644 --- a/data_structures/binary tree/segment_tree.py +++ b/data_structures/binary tree/segment_tree.py @@ -1,13 +1,15 @@ from __future__ import print_function + import math + class SegmentTree: - + def __init__(self, A): self.N = len(A) - self.st = [0] * (4 * self.N) # approximate the overall size of segment tree with array N + self.st = [0] * (4 * self.N) # approximate the overall size of segment tree with array N self.build(1, 0, self.N - 1) - + def left(self, idx): return idx * 2 @@ -21,51 +23,51 @@ def build(self, idx, l, r): mid = (l + r) // 2 self.build(self.left(idx), l, mid) self.build(self.right(idx), mid + 1, r) - self.st[idx] = max(self.st[self.left(idx)] , self.st[self.right(idx)]) - + self.st[idx] = max(self.st[self.left(idx)], self.st[self.right(idx)]) + def update(self, a, b, val): return self.update_recursive(1, 0, self.N - 1, a - 1, b - 1, val) - - def update_recursive(self, idx, l, r, a, b, val): # update(1, 1, N, a, b, v) for update val v to [a,b] + + def update_recursive(self, idx, l, r, a, b, val): # update(1, 1, N, a, b, v) for update val v to [a,b] if r < a or l > b: return True - if l == r : + if l == r: self.st[idx] = val return True - mid = (l+r)//2 + mid = (l + r) // 2 self.update_recursive(self.left(idx), l, mid, a, b, val) - self.update_recursive(self.right(idx), mid+1, r, a, b, val) - self.st[idx] = max(self.st[self.left(idx)] , self.st[self.right(idx)]) + self.update_recursive(self.right(idx), mid + 1, r, a, b, val) + self.st[idx] = max(self.st[self.left(idx)], self.st[self.right(idx)]) return True def query(self, a, b): return self.query_recursive(1, 0, self.N - 1, a - 1, b - 1) - def query_recursive(self, idx, l, r, a, b): #query(1, 1, N, a, b) for query max of [a,b] + def query_recursive(self, idx, l, r, a, b): # query(1, 1, N, a, b) for query max of [a,b] if r < a or l > b: return -math.inf if l >= a and r <= b: return self.st[idx] - mid = (l+r)//2 + mid = (l + r) // 2 q1 = self.query_recursive(self.left(idx), l, mid, a, b) q2 = self.query_recursive(self.right(idx), mid + 1, r, a, b) return max(q1, q2) def showData(self): showList = [] - for i in range(1,N+1): + for i in range(1, N + 1): showList += [self.query(i, i)] - print (showList) - + print(showList) + if __name__ == '__main__': - A = [1,2,-4,7,3,-5,6,11,-20,9,14,15,5,2,-8] + A = [1, 2, -4, 7, 3, -5, 6, 11, -20, 9, 14, 15, 5, 2, -8] N = 15 segt = SegmentTree(A) - print (segt.query(4, 6)) - print (segt.query(7, 11)) - print (segt.query(7, 12)) - segt.update(1,3,111) - print (segt.query(1, 15)) - segt.update(7,8,235) + print(segt.query(4, 6)) + print(segt.query(7, 11)) + print(segt.query(7, 12)) + segt.update(1, 3, 111) + print(segt.query(1, 15)) + segt.update(7, 8, 235) segt.showData() diff --git a/data_structures/binary tree/treap.py b/data_structures/binary tree/treap.py index 0399ff67030a..5d34abc3c931 100644 --- a/data_structures/binary tree/treap.py +++ b/data_structures/binary tree/treap.py @@ -7,6 +7,7 @@ class Node: Treap's node Treap is a binary tree by key and heap by priority """ + def __init__(self, key: int): self.key = key self.prior = random() diff --git a/data_structures/hashing/__init__.py b/data_structures/hashing/__init__.py index b96ddd478458..034faa2b5fa9 100644 --- a/data_structures/hashing/__init__.py +++ b/data_structures/hashing/__init__.py @@ -1,5 +1,6 @@ from .hash_table import HashTable + class QuadraticProbing(HashTable): def __init__(self): diff --git a/data_structures/hashing/double_hash.py b/data_structures/hashing/double_hash.py index 60098cda0ce1..d7cd8d2e2db6 100644 --- a/data_structures/hashing/double_hash.py +++ b/data_structures/hashing/double_hash.py @@ -1,20 +1,22 @@ #!/usr/bin/env python3 -from .hash_table import HashTable from number_theory.prime_numbers import next_prime, check_prime +from .hash_table import HashTable + class DoubleHash(HashTable): """ Hash Table example with open addressing and Double Hash """ + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def __hash_function_2(self, value, data): next_prime_gt = next_prime(value % self.size_table) \ - if not check_prime(value % self.size_table) else value % self.size_table #gt = bigger than + if not check_prime(value % self.size_table) else value % self.size_table # gt = bigger than return next_prime_gt - (data % next_prime_gt) def __hash_double_function(self, key, data, increment): @@ -27,7 +29,9 @@ def _colision_resolution(self, key, data=None): while self.values[new_key] is not None and self.values[new_key] != key: new_key = self.__hash_double_function(key, data, i) if \ self.balanced_factor() >= self.lim_charge else None - if new_key is None: break - else: i += 1 + if new_key is None: + break + else: + i += 1 return new_key diff --git a/data_structures/hashing/hash_table.py b/data_structures/hashing/hash_table.py index f0de128d1ad1..ff624dbdf323 100644 --- a/data_structures/hashing/hash_table.py +++ b/data_structures/hashing/hash_table.py @@ -61,7 +61,7 @@ def rehashing(self): survivor_values = [value for value in self.values if value is not None] self.size_table = next_prime(self.size_table, factor=2) self._keys.clear() - self.values = [None] * self.size_table #hell's pointers D: don't DRY ;/ + self.values = [None] * self.size_table # hell's pointers D: don't DRY ;/ map(self.insert_data, survivor_values) def insert_data(self, data): @@ -80,5 +80,3 @@ def insert_data(self, data): else: self.rehashing() self.insert_data(data) - - diff --git a/data_structures/hashing/hash_table_with_linked_list.py b/data_structures/hashing/hash_table_with_linked_list.py index 9689e4fc9fcf..6e5ed2828779 100644 --- a/data_structures/hashing/hash_table_with_linked_list.py +++ b/data_structures/hashing/hash_table_with_linked_list.py @@ -1,24 +1,23 @@ -from .hash_table import HashTable from collections import deque +from .hash_table import HashTable + class HashTableWithLinkedList(HashTable): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def _set_value(self, key, data): - self.values[key] = deque([]) if self.values[key] is None else self.values[key] + self.values[key] = deque([]) if self.values[key] is None else self.values[key] self.values[key].appendleft(data) self._keys[key] = self.values[key] def balanced_factor(self): - return sum([self.charge_factor - len(slot) for slot in self.values])\ + return sum([self.charge_factor - len(slot) for slot in self.values]) \ / self.size_table * self.charge_factor - + def _colision_resolution(self, key, data=None): if not (len(self.values[key]) == self.charge_factor and self.values.count(None) == 0): return key return super()._colision_resolution(key, data) - - diff --git a/data_structures/hashing/number_theory/prime_numbers.py b/data_structures/hashing/number_theory/prime_numbers.py index 8a521bc45758..778cda8a2843 100644 --- a/data_structures/hashing/number_theory/prime_numbers.py +++ b/data_structures/hashing/number_theory/prime_numbers.py @@ -5,25 +5,25 @@ def check_prime(number): - """ - it's not the best solution - """ - special_non_primes = [0,1,2] - if number in special_non_primes[:2]: - return 2 - elif number == special_non_primes[-1]: - return 3 - - return all([number % i for i in range(2, number)]) + """ + it's not the best solution + """ + special_non_primes = [0, 1, 2] + if number in special_non_primes[:2]: + return 2 + elif number == special_non_primes[-1]: + return 3 + + return all([number % i for i in range(2, number)]) def next_prime(value, factor=1, **kwargs): value = factor * value first_value_val = value - + while not check_prime(value): value += 1 if not ("desc" in kwargs.keys() and kwargs["desc"] is True) else -1 - + if value == first_value_val: return next_prime(value + 1, **kwargs) return value diff --git a/data_structures/hashing/quadratic_probing.py b/data_structures/hashing/quadratic_probing.py index f7a9ac1ae347..dd0af607cc66 100644 --- a/data_structures/hashing/quadratic_probing.py +++ b/data_structures/hashing/quadratic_probing.py @@ -7,17 +7,18 @@ class QuadraticProbing(HashTable): """ Basic Hash Table example with open addressing using Quadratic Probing """ + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def _colision_resolution(self, key, data=None): i = 1 - new_key = self.hash_function(key + i*i) + new_key = self.hash_function(key + i * i) while self.values[new_key] is not None \ and self.values[new_key] != key: i += 1 - new_key = self.hash_function(key + i*i) if not \ + new_key = self.hash_function(key + i * i) if not \ self.balanced_factor() >= self.lim_charge else None if new_key is None: diff --git a/data_structures/heap/heap.py b/data_structures/heap/heap.py index 39778f725c3a..8431116d6b24 100644 --- a/data_structures/heap/heap.py +++ b/data_structures/heap/heap.py @@ -3,89 +3,90 @@ from __future__ import print_function, division try: - raw_input # Python 2 + raw_input # Python 2 except NameError: raw_input = input # Python 3 -#This heap class start from here. + +# This heap class start from here. class Heap: - def __init__(self): #Default constructor of heap class. - self.h = [] - self.currsize = 0 - - def leftChild(self,i): - if 2*i+1 < self.currsize: - return 2*i+1 - return None - - def rightChild(self,i): - if 2*i+2 < self.currsize: - return 2*i+2 - return None - - def maxHeapify(self,node): - if node < self.currsize: - m = node - lc = self.leftChild(node) - rc = self.rightChild(node) - if lc is not None and self.h[lc] > self.h[m]: - m = lc - if rc is not None and self.h[rc] > self.h[m]: - m = rc - if m!=node: - temp = self.h[node] - self.h[node] = self.h[m] - self.h[m] = temp - self.maxHeapify(m) - - def buildHeap(self,a): #This function is used to build the heap from the data container 'a'. - self.currsize = len(a) - self.h = list(a) - for i in range(self.currsize//2,-1,-1): - self.maxHeapify(i) - - def getMax(self): #This function is used to get maximum value from the heap. - if self.currsize >= 1: - me = self.h[0] - temp = self.h[0] - self.h[0] = self.h[self.currsize-1] - self.h[self.currsize-1] = temp - self.currsize -= 1 - self.maxHeapify(0) - return me - return None - - def heapSort(self): #This function is used to sort the heap. - size = self.currsize - while self.currsize-1 >= 0: - temp = self.h[0] - self.h[0] = self.h[self.currsize-1] - self.h[self.currsize-1] = temp - self.currsize -= 1 - self.maxHeapify(0) - self.currsize = size - - def insert(self,data): #This function is used to insert data in the heap. - self.h.append(data) - curr = self.currsize - self.currsize+=1 - while self.h[curr] > self.h[curr/2]: - temp = self.h[curr/2] - self.h[curr/2] = self.h[curr] - self.h[curr] = temp - curr = curr/2 - - def display(self): #This function is used to print the heap. - print(self.h) + def __init__(self): # Default constructor of heap class. + self.h = [] + self.currsize = 0 -def main(): - l = list(map(int, raw_input().split())) - h = Heap() - h.buildHeap(l) - h.heapSort() - h.display() + def leftChild(self, i): + if 2 * i + 1 < self.currsize: + return 2 * i + 1 + return None + + def rightChild(self, i): + if 2 * i + 2 < self.currsize: + return 2 * i + 2 + return None + + def maxHeapify(self, node): + if node < self.currsize: + m = node + lc = self.leftChild(node) + rc = self.rightChild(node) + if lc is not None and self.h[lc] > self.h[m]: + m = lc + if rc is not None and self.h[rc] > self.h[m]: + m = rc + if m != node: + temp = self.h[node] + self.h[node] = self.h[m] + self.h[m] = temp + self.maxHeapify(m) + + def buildHeap(self, a): # This function is used to build the heap from the data container 'a'. + self.currsize = len(a) + self.h = list(a) + for i in range(self.currsize // 2, -1, -1): + self.maxHeapify(i) + + def getMax(self): # This function is used to get maximum value from the heap. + if self.currsize >= 1: + me = self.h[0] + temp = self.h[0] + self.h[0] = self.h[self.currsize - 1] + self.h[self.currsize - 1] = temp + self.currsize -= 1 + self.maxHeapify(0) + return me + return None -if __name__=='__main__': - main() + def heapSort(self): # This function is used to sort the heap. + size = self.currsize + while self.currsize - 1 >= 0: + temp = self.h[0] + self.h[0] = self.h[self.currsize - 1] + self.h[self.currsize - 1] = temp + self.currsize -= 1 + self.maxHeapify(0) + self.currsize = size + + def insert(self, data): # This function is used to insert data in the heap. + self.h.append(data) + curr = self.currsize + self.currsize += 1 + while self.h[curr] > self.h[curr / 2]: + temp = self.h[curr / 2] + self.h[curr / 2] = self.h[curr] + self.h[curr] = temp + curr = curr / 2 + + def display(self): # This function is used to print the heap. + print(self.h) + + +def main(): + l = list(map(int, raw_input().split())) + h = Heap() + h.buildHeap(l) + h.heapSort() + h.display() +if __name__ == '__main__': + main() diff --git a/data_structures/linked_list/__init__.py b/data_structures/linked_list/__init__.py index 6d50f23c1f1a..a050adba42b2 100644 --- a/data_structures/linked_list/__init__.py +++ b/data_structures/linked_list/__init__.py @@ -3,6 +3,7 @@ def __init__(self, item, next): self.item = item self.next = next + class LinkedList: def __init__(self): self.head = None diff --git a/data_structures/linked_list/doubly_linked_list.py b/data_structures/linked_list/doubly_linked_list.py index 75b1f889dfc2..b00b4f52c82b 100644 --- a/data_structures/linked_list/doubly_linked_list.py +++ b/data_structures/linked_list/doubly_linked_list.py @@ -7,71 +7,74 @@ from __future__ import print_function -class LinkedList: #making main class named linked list +class LinkedList: # making main class named linked list def __init__(self): self.head = None self.tail = None - + def insertHead(self, x): - newLink = Link(x) #Create a new link with a value attached to it - if(self.isEmpty() == True): #Set the first element added to be the tail + newLink = Link(x) # Create a new link with a value attached to it + if (self.isEmpty() == True): # Set the first element added to be the tail self.tail = newLink else: - self.head.previous = newLink # newLink <-- currenthead(head) - newLink.next = self.head # newLink <--> currenthead(head) - self.head = newLink # newLink(head) <--> oldhead - + self.head.previous = newLink # newLink <-- currenthead(head) + newLink.next = self.head # newLink <--> currenthead(head) + self.head = newLink # newLink(head) <--> oldhead + def deleteHead(self): temp = self.head - self.head = self.head.next # oldHead <--> 2ndElement(head) - self.head.previous = None # oldHead --> 2ndElement(head) nothing pointing at it so the old head will be removed - if(self.head is None): - self.tail = None #if empty linked list + self.head = self.head.next # oldHead <--> 2ndElement(head) + self.head.previous = None # oldHead --> 2ndElement(head) nothing pointing at it so the old head will be removed + if (self.head is None): + self.tail = None # if empty linked list return temp - + def insertTail(self, x): newLink = Link(x) - newLink.next = None # currentTail(tail) newLink --> - self.tail.next = newLink # currentTail(tail) --> newLink --> - newLink.previous = self.tail #currentTail(tail) <--> newLink --> - self.tail = newLink # oldTail <--> newLink(tail) --> - + newLink.next = None # currentTail(tail) newLink --> + self.tail.next = newLink # currentTail(tail) --> newLink --> + newLink.previous = self.tail # currentTail(tail) <--> newLink --> + self.tail = newLink # oldTail <--> newLink(tail) --> + def deleteTail(self): temp = self.tail - self.tail = self.tail.previous # 2ndLast(tail) <--> oldTail --> None - self.tail.next = None # 2ndlast(tail) --> None + self.tail = self.tail.previous # 2ndLast(tail) <--> oldTail --> None + self.tail.next = None # 2ndlast(tail) --> None return temp - + def delete(self, x): current = self.head - - while(current.value != x): # Find the position to delete + + while (current.value != x): # Find the position to delete current = current.next - - if(current == self.head): + + if (current == self.head): self.deleteHead() - - elif(current == self.tail): + + elif (current == self.tail): self.deleteTail() - - else: #Before: 1 <--> 2(current) <--> 3 - current.previous.next = current.next # 1 --> 3 - current.next.previous = current.previous # 1 <--> 3 - - def isEmpty(self): #Will return True if the list is empty - return(self.head is None) - - def display(self): #Prints contents of the list + + else: # Before: 1 <--> 2(current) <--> 3 + current.previous.next = current.next # 1 --> 3 + current.next.previous = current.previous # 1 <--> 3 + + def isEmpty(self): # Will return True if the list is empty + return (self.head is None) + + def display(self): # Prints contents of the list current = self.head - while(current != None): + while (current != None): current.displayLink() - current = current.next + current = current.next print() + class Link: - next = None #This points to the link in front of the new link - previous = None #This points to the link behind the new link + next = None # This points to the link in front of the new link + previous = None # This points to the link behind the new link + def __init__(self, x): self.value = x + def displayLink(self): print("{}".format(self.value), end=" ") diff --git a/data_structures/linked_list/singly_linked_list.py b/data_structures/linked_list/singly_linked_list.py index 5ae97523b9a1..6cfaec235bee 100644 --- a/data_structures/linked_list/singly_linked_list.py +++ b/data_structures/linked_list/singly_linked_list.py @@ -9,21 +9,22 @@ def __init__(self, data): class Linked_List: def __init__(self): - self.Head = None # Initialize Head to None - + self.Head = None # Initialize Head to None + def insert_tail(self, data): - if(self.Head is None): self.insert_head(data) #If this is first node, call insert_head + if (self.Head is None): + self.insert_head(data) # If this is first node, call insert_head else: temp = self.Head - while(temp.next != None): #traverse to last node + while (temp.next != None): # traverse to last node temp = temp.next - temp.next = Node(data) #create node & link to tail + temp.next = Node(data) # create node & link to tail def insert_head(self, data): - newNod = Node(data) # create a new node + newNod = Node(data) # create a new node if self.Head != None: - newNod.next = self.Head # link newNode to head - self.Head = newNod # make NewNode as Head + newNod.next = self.Head # link newNode to head + self.Head = newNod # make NewNode as Head def printList(self): # print every node data tamp = self.Head @@ -37,16 +38,16 @@ def delete_head(self): # delete from head self.Head = self.Head.next temp.next = None return temp - + def delete_tail(self): # delete from tail tamp = self.Head if self.Head != None: - if(self.Head.next is None): # if Head is the only Node in the Linked List + if (self.Head.next is None): # if Head is the only Node in the Linked List self.Head = None else: while tamp.next.next is not None: # find the 2nd last element tamp = tamp.next - tamp.next, tamp = None, tamp.next #(2nd last element).next = None and tamp = last element + tamp.next, tamp = None, tamp.next # (2nd last element).next = None and tamp = last element return tamp def isEmpty(self): @@ -68,21 +69,22 @@ def reverse(self): # Return prev in order to put the head at the end self.Head = prev + def main(): A = Linked_List() print("Inserting 1st at Head") - a1=input() + a1 = input() A.insert_head(a1) print("Inserting 2nd at Head") - a2=input() + a2 = input() A.insert_head(a2) print("\nPrint List : ") A.printList() print("\nInserting 1st at Tail") - a3=input() + a3 = input() A.insert_tail(a3) print("Inserting 2nd at Tail") - a4=input() + a4 = input() A.insert_tail(a4) print("\nPrint List : ") A.printList() @@ -96,6 +98,7 @@ def main(): A.reverse() print("\nPrint List : ") A.printList() - + + if __name__ == '__main__': - main() + main() diff --git a/data_structures/linked_list/swapNodes.py b/data_structures/linked_list/swapNodes.py index ce2543bc46d8..9f9e37ccdeac 100644 --- a/data_structures/linked_list/swapNodes.py +++ b/data_structures/linked_list/swapNodes.py @@ -1,75 +1,72 @@ -class Node: - def __init__(self, data): - self.data = data; - self.next = None - - -class Linkedlist: - def __init__(self): - self.head = None - - def print_list(self): - temp = self.head - while temp is not None: - print(temp.data) - temp = temp.next - -# adding nodes - def push(self, new_data): - new_node = Node(new_data) - new_node.next = self.head - self.head = new_node - -# swapping nodes - def swapNodes(self, d1, d2): - prevD1 = None - prevD2 = None - if d1 == d2: - return - else: - # find d1 - D1 = self.head - while D1 is not None and D1.data != d1: - prevD1 = D1 - D1 = D1.next - # find d2 - D2 = self.head - while D2 is not None and D2.data != d2: - prevD2 = D2 - D2 = D2.next - if D1 is None and D2 is None: - return - # if D1 is head - if prevD1 is not None: - prevD1.next = D2 - else: - self.head = D2 - # if D2 is head - if prevD2 is not None: - prevD2.next = D1 - else: - self.head = D1 - temp = D1.next - D1.next = D2.next - D2.next = temp - -# swapping code ends here - - - -if __name__ == '__main__': - list = Linkedlist() - list.push(5) - list.push(4) - list.push(3) - list.push(2) - list.push(1) - - list.print_list() - - list.swapNodes(1, 4) - print("After swapping") - list.print_list() - - - +class Node: + def __init__(self, data): + self.data = data; + self.next = None + + +class Linkedlist: + def __init__(self): + self.head = None + + def print_list(self): + temp = self.head + while temp is not None: + print(temp.data) + temp = temp.next + + # adding nodes + def push(self, new_data): + new_node = Node(new_data) + new_node.next = self.head + self.head = new_node + + # swapping nodes + def swapNodes(self, d1, d2): + prevD1 = None + prevD2 = None + if d1 == d2: + return + else: + # find d1 + D1 = self.head + while D1 is not None and D1.data != d1: + prevD1 = D1 + D1 = D1.next + # find d2 + D2 = self.head + while D2 is not None and D2.data != d2: + prevD2 = D2 + D2 = D2.next + if D1 is None and D2 is None: + return + # if D1 is head + if prevD1 is not None: + prevD1.next = D2 + else: + self.head = D2 + # if D2 is head + if prevD2 is not None: + prevD2.next = D1 + else: + self.head = D1 + temp = D1.next + D1.next = D2.next + D2.next = temp + + +# swapping code ends here + + +if __name__ == '__main__': + list = Linkedlist() + list.push(5) + list.push(4) + list.push(3) + list.push(2) + list.push(1) + + list.print_list() + + list.swapNodes(1, 4) + print("After swapping") + list.print_list() diff --git a/data_structures/queue/double_ended_queue.py b/data_structures/queue/double_ended_queue.py index fdee64eb6ae0..26e6e74343e9 100644 --- a/data_structures/queue/double_ended_queue.py +++ b/data_structures/queue/double_ended_queue.py @@ -1,40 +1,41 @@ from __future__ import print_function -# Python code to demonstrate working of -# extend(), extendleft(), rotate(), reverse() - + # importing "collections" for deque operations import collections - + +# Python code to demonstrate working of +# extend(), extendleft(), rotate(), reverse() + # initializing deque -de = collections.deque([1, 2, 3,]) - +de = collections.deque([1, 2, 3, ]) + # using extend() to add numbers to right end # adds 4,5,6 to right end -de.extend([4,5,6]) - +de.extend([4, 5, 6]) + # printing modified deque -print ("The deque after extending deque at end is : ") -print (de) - +print("The deque after extending deque at end is : ") +print(de) + # using extendleft() to add numbers to left end # adds 7,8,9 to right end -de.extendleft([7,8,9]) - +de.extendleft([7, 8, 9]) + # printing modified deque -print ("The deque after extending deque at beginning is : ") -print (de) - +print("The deque after extending deque at beginning is : ") +print(de) + # using rotate() to rotate the deque # rotates by 3 to left de.rotate(-3) - + # printing modified deque -print ("The deque after rotating deque is : ") -print (de) - +print("The deque after rotating deque is : ") +print(de) + # using reverse() to reverse the deque de.reverse() - + # printing modified deque -print ("The deque after reversing deque is : ") -print (de) +print("The deque after reversing deque is : ") +print(de) diff --git a/data_structures/queue/queue_on_list.py b/data_structures/queue/queue_on_list.py index 2ec9bac8398a..8f57ddd63ab7 100644 --- a/data_structures/queue/queue_on_list.py +++ b/data_structures/queue/queue_on_list.py @@ -1,46 +1,52 @@ -"""Queue represented by a python list""" -class Queue(): - def __init__(self): - self.entries = [] - self.length = 0 - self.front=0 - - def __str__(self): - printed = '<' + str(self.entries)[1:-1] + '>' - return printed - - """Enqueues {@code item} - @param item - item to enqueue""" - def put(self, item): - self.entries.append(item) - self.length = self.length + 1 - - - """Dequeues {@code item} - @requirement: |self.length| > 0 - @return dequeued - item that was dequeued""" - def get(self): - self.length = self.length - 1 - dequeued = self.entries[self.front] - #self.front-=1 - #self.entries = self.entries[self.front:] - self.entries = self.entries[1:] - return dequeued - - """Rotates the queue {@code rotation} times - @param rotation - number of times to rotate queue""" - def rotate(self, rotation): - for i in range(rotation): - self.put(self.get()) - - """Enqueues {@code item} - @return item at front of self.entries""" - def front(self): - return self.entries[0] - - """Returns the length of this.entries""" - def size(self): - return self.length +"""Queue represented by a python list""" + + +class Queue(): + def __init__(self): + self.entries = [] + self.length = 0 + self.front = 0 + + def __str__(self): + printed = '<' + str(self.entries)[1:-1] + '>' + return printed + + """Enqueues {@code item} + @param item + item to enqueue""" + + def put(self, item): + self.entries.append(item) + self.length = self.length + 1 + + """Dequeues {@code item} + @requirement: |self.length| > 0 + @return dequeued + item that was dequeued""" + + def get(self): + self.length = self.length - 1 + dequeued = self.entries[self.front] + # self.front-=1 + # self.entries = self.entries[self.front:] + self.entries = self.entries[1:] + return dequeued + + """Rotates the queue {@code rotation} times + @param rotation + number of times to rotate queue""" + + def rotate(self, rotation): + for i in range(rotation): + self.put(self.get()) + + """Enqueues {@code item} + @return item at front of self.entries""" + + def front(self): + return self.entries[0] + + """Returns the length of this.entries""" + + def size(self): + return self.length diff --git a/data_structures/queue/queue_on_pseudo_stack.py b/data_structures/queue/queue_on_pseudo_stack.py index b69fbcc988f7..939ba66cca78 100644 --- a/data_structures/queue/queue_on_pseudo_stack.py +++ b/data_structures/queue/queue_on_pseudo_stack.py @@ -1,50 +1,57 @@ -"""Queue represented by a pseudo stack (represented by a list with pop and append)""" -class Queue(): - def __init__(self): - self.stack = [] - self.length = 0 - - def __str__(self): - printed = '<' + str(self.stack)[1:-1] + '>' - return printed - - """Enqueues {@code item} - @param item - item to enqueue""" - def put(self, item): - self.stack.append(item) - self.length = self.length + 1 - - """Dequeues {@code item} - @requirement: |self.length| > 0 - @return dequeued - item that was dequeued""" - def get(self): - self.rotate(1) - dequeued = self.stack[self.length-1] - self.stack = self.stack[:-1] - self.rotate(self.length-1) - self.length = self.length -1 - return dequeued - - """Rotates the queue {@code rotation} times - @param rotation - number of times to rotate queue""" - def rotate(self, rotation): - for i in range(rotation): - temp = self.stack[0] - self.stack = self.stack[1:] - self.put(temp) - self.length = self.length - 1 - - """Reports item at the front of self - @return item at front of self.stack""" - def front(self): - front = self.get() - self.put(front) - self.rotate(self.length-1) - return front - - """Returns the length of this.stack""" - def size(self): - return self.length +"""Queue represented by a pseudo stack (represented by a list with pop and append)""" + + +class Queue(): + def __init__(self): + self.stack = [] + self.length = 0 + + def __str__(self): + printed = '<' + str(self.stack)[1:-1] + '>' + return printed + + """Enqueues {@code item} + @param item + item to enqueue""" + + def put(self, item): + self.stack.append(item) + self.length = self.length + 1 + + """Dequeues {@code item} + @requirement: |self.length| > 0 + @return dequeued + item that was dequeued""" + + def get(self): + self.rotate(1) + dequeued = self.stack[self.length - 1] + self.stack = self.stack[:-1] + self.rotate(self.length - 1) + self.length = self.length - 1 + return dequeued + + """Rotates the queue {@code rotation} times + @param rotation + number of times to rotate queue""" + + def rotate(self, rotation): + for i in range(rotation): + temp = self.stack[0] + self.stack = self.stack[1:] + self.put(temp) + self.length = self.length - 1 + + """Reports item at the front of self + @return item at front of self.stack""" + + def front(self): + front = self.get() + self.put(front) + self.rotate(self.length - 1) + return front + + """Returns the length of this.stack""" + + def size(self): + return self.length diff --git a/data_structures/stacks/__init__.py b/data_structures/stacks/__init__.py index f7e92ae2d269..17b8ca2fe8f6 100644 --- a/data_structures/stacks/__init__.py +++ b/data_structures/stacks/__init__.py @@ -1,23 +1,23 @@ class Stack: - def __init__(self): - self.stack = [] - self.top = 0 + def __init__(self): + self.stack = [] + self.top = 0 - def is_empty(self): - return (self.top == 0) + def is_empty(self): + return (self.top == 0) - def push(self, item): - if self.top < len(self.stack): - self.stack[self.top] = item - else: - self.stack.append(item) + def push(self, item): + if self.top < len(self.stack): + self.stack[self.top] = item + else: + self.stack.append(item) - self.top += 1 + self.top += 1 - def pop(self): - if self.is_empty(): - return None - else: - self.top -= 1 - return self.stack[self.top] + def pop(self): + if self.is_empty(): + return None + else: + self.top -= 1 + return self.stack[self.top] diff --git a/data_structures/stacks/balanced_parentheses.py b/data_structures/stacks/balanced_parentheses.py index 3229d19c8621..30a4d0dbd4ab 100644 --- a/data_structures/stacks/balanced_parentheses.py +++ b/data_structures/stacks/balanced_parentheses.py @@ -1,5 +1,6 @@ -from __future__ import print_function from __future__ import absolute_import +from __future__ import print_function + from stack import Stack __author__ = 'Omkar Pathak' diff --git a/data_structures/stacks/infix_to_postfix_conversion.py b/data_structures/stacks/infix_to_postfix_conversion.py index 75211fed258d..ef4810501211 100644 --- a/data_structures/stacks/infix_to_postfix_conversion.py +++ b/data_structures/stacks/infix_to_postfix_conversion.py @@ -1,5 +1,6 @@ -from __future__ import print_function from __future__ import absolute_import +from __future__ import print_function + import string from .Stack import Stack @@ -38,7 +39,7 @@ def infix_to_postfix(expression): postfix.append(char) elif char not in {'(', ')'}: while (not stack.is_empty() - and precedence(char) <= precedence(stack.peek())): + and precedence(char) <= precedence(stack.peek())): postfix.append(stack.pop()) stack.push(char) elif char == '(': diff --git a/data_structures/stacks/infix_to_prefix_conversion.py b/data_structures/stacks/infix_to_prefix_conversion.py index da5fc261fb9f..8192b3f8b1fd 100644 --- a/data_structures/stacks/infix_to_prefix_conversion.py +++ b/data_structures/stacks/infix_to_prefix_conversion.py @@ -14,48 +14,56 @@ a+b^c (Infix) -> +a^bc (Prefix) """ + def infix_2_postfix(Infix): Stack = [] Postfix = [] - priority = {'^':3, '*':2, '/':2, '%':2, '+':1, '-':1} # Priority of each operator - print_width = len(Infix) if(len(Infix)>7) else 7 + priority = {'^': 3, '*': 2, '/': 2, '%': 2, '+': 1, '-': 1} # Priority of each operator + print_width = len(Infix) if (len(Infix) > 7) else 7 # Print table header for output - print('Symbol'.center(8), 'Stack'.center(print_width), 'Postfix'.center(print_width), sep = " | ") - print('-'*(print_width*3+7)) + print('Symbol'.center(8), 'Stack'.center(print_width), 'Postfix'.center(print_width), sep=" | ") + print('-' * (print_width * 3 + 7)) for x in Infix: - if(x.isalpha() or x.isdigit()): Postfix.append(x) # if x is Alphabet / Digit, add it to Postfix - elif(x == '('): Stack.append(x) # if x is "(" push to Stack - elif(x == ')'): # if x is ")" pop stack until "(" is encountered - while(Stack[-1] != '('): - Postfix.append( Stack.pop() ) #Pop stack & add the content to Postfix + if (x.isalpha() or x.isdigit()): + Postfix.append(x) # if x is Alphabet / Digit, add it to Postfix + elif (x == '('): + Stack.append(x) # if x is "(" push to Stack + elif (x == ')'): # if x is ")" pop stack until "(" is encountered + while (Stack[-1] != '('): + Postfix.append(Stack.pop()) # Pop stack & add the content to Postfix Stack.pop() else: - if(len(Stack)==0): Stack.append(x) #If stack is empty, push x to stack + if (len(Stack) == 0): + Stack.append(x) # If stack is empty, push x to stack else: - while( len(Stack) > 0 and priority[x] <= priority[Stack[-1]]): # while priority of x is not greater than priority of element in the stack - Postfix.append( Stack.pop() ) # pop stack & add to Postfix - Stack.append(x) # push x to stack + while (len(Stack) > 0 and priority[x] <= priority[Stack[-1]]): # while priority of x is not greater than priority of element in the stack + Postfix.append(Stack.pop()) # pop stack & add to Postfix + Stack.append(x) # push x to stack + + print(x.center(8), (''.join(Stack)).ljust(print_width), (''.join(Postfix)).ljust(print_width), sep=" | ") # Output in tabular format - print(x.center(8), (''.join(Stack)).ljust(print_width), (''.join(Postfix)).ljust(print_width), sep = " | ") # Output in tabular format + while (len(Stack) > 0): # while stack is not empty + Postfix.append(Stack.pop()) # pop stack & add to Postfix + print(' '.center(8), (''.join(Stack)).ljust(print_width), (''.join(Postfix)).ljust(print_width), sep=" | ") # Output in tabular format - while(len(Stack) > 0): # while stack is not empty - Postfix.append( Stack.pop() ) # pop stack & add to Postfix - print(' '.center(8), (''.join(Stack)).ljust(print_width), (''.join(Postfix)).ljust(print_width), sep = " | ") # Output in tabular format + return "".join(Postfix) # return Postfix as str - return "".join(Postfix) # return Postfix as str def infix_2_prefix(Infix): - Infix = list(Infix[::-1]) # reverse the infix equation - + Infix = list(Infix[::-1]) # reverse the infix equation + for i in range(len(Infix)): - if(Infix[i] == '('): Infix[i] = ')' # change "(" to ")" - elif(Infix[i] == ')'): Infix[i] = '(' # change ")" to "(" - + if (Infix[i] == '('): + Infix[i] = ')' # change "(" to ")" + elif (Infix[i] == ')'): + Infix[i] = '(' # change ")" to "(" + return (infix_2_postfix("".join(Infix)))[::-1] # call infix_2_postfix on Infix, return reverse of Postfix + if __name__ == "__main__": - Infix = input("\nEnter an Infix Equation = ") #Input an Infix equation - Infix = "".join(Infix.split()) #Remove spaces from the input + Infix = input("\nEnter an Infix Equation = ") # Input an Infix equation + Infix = "".join(Infix.split()) # Remove spaces from the input print("\n\t", Infix, "(Infix) -> ", infix_2_prefix(Infix), "(Prefix)") diff --git a/data_structures/stacks/next.py b/data_structures/stacks/next.py index bca83339592c..3fc22281ede5 100644 --- a/data_structures/stacks/next.py +++ b/data_structures/stacks/next.py @@ -1,17 +1,19 @@ from __future__ import print_function + + # Function to print element and NGE pair for all elements of list def printNGE(arr): - for i in range(0, len(arr), 1): - + next = -1 - for j in range(i+1, len(arr), 1): + for j in range(i + 1, len(arr), 1): if arr[i] < arr[j]: next = arr[j] break - + print(str(arr[i]) + " -- " + str(next)) - + + # Driver program to test above function -arr = [11,13,21,3] +arr = [11, 13, 21, 3] printNGE(arr) diff --git a/data_structures/stacks/postfix_evaluation.py b/data_structures/stacks/postfix_evaluation.py index 1786e71dd383..151f27070a50 100644 --- a/data_structures/stacks/postfix_evaluation.py +++ b/data_structures/stacks/postfix_evaluation.py @@ -19,28 +19,29 @@ import operator as op + def Solve(Postfix): Stack = [] - Div = lambda x, y: int(x/y) # integer division operation - Opr = {'^':op.pow, '*':op.mul, '/':Div, '+':op.add, '-':op.sub} # operators & their respective operation + Div = lambda x, y: int(x / y) # integer division operation + Opr = {'^': op.pow, '*': op.mul, '/': Div, '+': op.add, '-': op.sub} # operators & their respective operation # print table header - print('Symbol'.center(8), 'Action'.center(12), 'Stack', sep = " | ") - print('-'*(30+len(Postfix))) + print('Symbol'.center(8), 'Action'.center(12), 'Stack', sep=" | ") + print('-' * (30 + len(Postfix))) for x in Postfix: - if( x.isdigit() ): # if x in digit - Stack.append(x) # append x to stack - print(x.rjust(8), ('push('+x+')').ljust(12), ','.join(Stack), sep = " | ") # output in tabular format + if (x.isdigit()): # if x in digit + Stack.append(x) # append x to stack + print(x.rjust(8), ('push(' + x + ')').ljust(12), ','.join(Stack), sep=" | ") # output in tabular format else: - B = Stack.pop() # pop stack - print("".rjust(8), ('pop('+B+')').ljust(12), ','.join(Stack), sep = " | ") # output in tabular format + B = Stack.pop() # pop stack + print("".rjust(8), ('pop(' + B + ')').ljust(12), ','.join(Stack), sep=" | ") # output in tabular format - A = Stack.pop() # pop stack - print("".rjust(8), ('pop('+A+')').ljust(12), ','.join(Stack), sep = " | ") # output in tabular format + A = Stack.pop() # pop stack + print("".rjust(8), ('pop(' + A + ')').ljust(12), ','.join(Stack), sep=" | ") # output in tabular format - Stack.append( str(Opr[x](int(A), int(B))) ) # evaluate the 2 values poped from stack & push result to stack - print(x.rjust(8), ('push('+A+x+B+')').ljust(12), ','.join(Stack), sep = " | ") # output in tabular format + Stack.append(str(Opr[x](int(A), int(B)))) # evaluate the 2 values poped from stack & push result to stack + print(x.rjust(8), ('push(' + A + x + B + ')').ljust(12), ','.join(Stack), sep=" | ") # output in tabular format return int(Stack[0]) diff --git a/data_structures/stacks/stack.py b/data_structures/stacks/stack.py index 7f979d927d08..dae37ef61e28 100644 --- a/data_structures/stacks/stack.py +++ b/data_structures/stacks/stack.py @@ -1,4 +1,5 @@ from __future__ import print_function + __author__ = 'Omkar Pathak' diff --git a/data_structures/stacks/stock_span_problem.py b/data_structures/stacks/stock_span_problem.py index 9628864edd10..508823cfa690 100644 --- a/data_structures/stacks/stock_span_problem.py +++ b/data_structures/stacks/stock_span_problem.py @@ -7,46 +7,49 @@ on the current day is less than or equal to its price on the given day. ''' from __future__ import print_function -def calculateSpan(price, S): - - n = len(price) + + +def calculateSpan(price, S): + n = len(price) # Create a stack and push index of fist element to it - st = [] - st.append(0) - + st = [] + st.append(0) + # Span value of first element is always 1 - S[0] = 1 - + S[0] = 1 + # Calculate span values for rest of the elements - for i in range(1, n): - + for i in range(1, n): + # Pop elements from stack whlie stack is not # empty and top of stack is smaller than price[i] - while( len(st) > 0 and price[st[0]] <= price[i]): - st.pop() - - # If stack becomes empty, then price[i] is greater + while (len(st) > 0 and price[st[0]] <= price[i]): + st.pop() + + # If stack becomes empty, then price[i] is greater # than all elements on left of it, i.e. price[0], # price[1], ..price[i-1]. Else the price[i] is # greater than elements after top of stack - S[i] = i+1 if len(st) <= 0 else (i - st[0]) - + S[i] = i + 1 if len(st) <= 0 else (i - st[0]) + # Push this element to stack - st.append(i) - - -# A utility function to print elements of array -def printArray(arr, n): - for i in range(0,n): - print (arr[i],end =" ") - - -# Driver program to test above function -price = [10, 4, 5, 90, 120, 80] -S = [0 for i in range(len(price)+1)] - + st.append(i) + + # A utility function to print elements of array + + +def printArray(arr, n): + for i in range(0, n): + print(arr[i], end=" ") + + # Driver program to test above function + + +price = [10, 4, 5, 90, 120, 80] +S = [0 for i in range(len(price) + 1)] + # Fill the span values in array S[] -calculateSpan(price, S) - +calculateSpan(price, S) + # Print the calculated span values -printArray(S, len(price)) +printArray(S, len(price)) diff --git a/data_structures/trie/trie.py b/data_structures/trie/trie.py index b6234c6704c6..d300c40bccbd 100644 --- a/data_structures/trie/trie.py +++ b/data_structures/trie/trie.py @@ -72,4 +72,5 @@ def test(): assert not root.find('apps') assert root.find('apple') + test() diff --git a/data_structures/union_find/tests_union_find.py b/data_structures/union_find/tests_union_find.py index b0708778ddbd..949fc9887062 100644 --- a/data_structures/union_find/tests_union_find.py +++ b/data_structures/union_find/tests_union_find.py @@ -1,7 +1,9 @@ from __future__ import absolute_import -from .union_find import UnionFind + import unittest +from .union_find import UnionFind + class TestUnionFind(unittest.TestCase): def test_init_with_valid_size(self): diff --git a/data_structures/union_find/union_find.py b/data_structures/union_find/union_find.py index 40eea67ac944..c28907ae6f4a 100644 --- a/data_structures/union_find/union_find.py +++ b/data_structures/union_find/union_find.py @@ -12,6 +12,7 @@ class UnionFind(): The elements are in range [0, size] """ + def __init__(self, size): if size <= 0: raise ValueError("size should be greater than 0") @@ -22,10 +23,10 @@ def __init__(self, size): # in range [0, size]. It makes more sense. # Every set begins with only itself - self.root = [i for i in range(size+1)] + self.root = [i for i in range(size + 1)] # This is used for heuristic union by rank - self.weight = [0 for i in range(size+1)] + self.weight = [0 for i in range(size + 1)] def union(self, u, v): """ @@ -82,6 +83,6 @@ def _validate_element_range(self, u, element_name): """ if u < 0 or u > self.size: msg = ("element {0} with value {1} " - "should be in range [0~{2}]")\ - .format(element_name, u, self.size) + "should be in range [0~{2}]") \ + .format(element_name, u, self.size) raise ValueError(msg) diff --git a/dynamic_programming/Fractional_Knapsack.py b/dynamic_programming/Fractional_Knapsack.py index 74e85b4b4708..4907f4fd5dbf 100644 --- a/dynamic_programming/Fractional_Knapsack.py +++ b/dynamic_programming/Fractional_Knapsack.py @@ -1,12 +1,13 @@ -from itertools import accumulate from bisect import bisect +from itertools import accumulate + def fracKnapsack(vl, wt, W, n): + r = list(sorted(zip(vl, wt), key=lambda x: x[0] / x[1], reverse=True)) + vl, wt = [i[0] for i in r], [i[1] for i in r] + acc = list(accumulate(wt)) + k = bisect(acc, W) + return 0 if k == 0 else sum(vl[:k]) + (W - acc[k - 1]) * (vl[k]) / (wt[k]) if k != n else sum(vl[:k]) - r = list(sorted(zip(vl,wt), key=lambda x:x[0]/x[1],reverse=True)) - vl , wt = [i[0] for i in r],[i[1] for i in r] - acc=list(accumulate(wt)) - k = bisect(acc,W) - return 0 if k == 0 else sum(vl[:k])+(W-acc[k-1])*(vl[k])/(wt[k]) if k!=n else sum(vl[:k]) -print("%.0f"%fracKnapsack([60, 100, 120],[10, 20, 30],50,3)) +print("%.0f" % fracKnapsack([60, 100, 120], [10, 20, 30], 50, 3)) diff --git a/dynamic_programming/bitmask.py b/dynamic_programming/bitmask.py index 213b22fe9051..d97eb5d0e48a 100644 --- a/dynamic_programming/bitmask.py +++ b/dynamic_programming/bitmask.py @@ -10,81 +10,80 @@ """ from __future__ import print_function + from collections import defaultdict class AssignmentUsingBitmask: - def __init__(self,task_performed,total): - - self.total_tasks = total #total no of tasks (N) - + def __init__(self, task_performed, total): + + self.total_tasks = total # total no of tasks (N) + # DP table will have a dimension of (2^M)*N # initially all values are set to -1 - self.dp = [[-1 for i in range(total+1)] for j in range(2**len(task_performed))] - - self.task = defaultdict(list) #stores the list of persons for each task - - #finalmask is used to check if all persons are included by setting all bits to 1 - self.finalmask = (1< self.total_tasks: return 0 - #if case already considered - if self.dp[mask][taskno]!=-1: + # if case already considered + if self.dp[mask][taskno] != -1: return self.dp[mask][taskno] # Number of ways when we dont this task in the arrangement - total_ways_util = self.CountWaysUtil(mask,taskno+1) + total_ways_util = self.CountWaysUtil(mask, taskno + 1) # now assign the tasks one by one to all possible persons and recursively assign for the remaining tasks. if taskno in self.task: for p in self.task[taskno]: - + # if p is already given a task - if mask & (1<-1): + if (x == -1): + return y + 1 + elif (y == -1): + return x + 1 + elif (self.dp[x][y] > -1): return self.dp[x][y] else: - if (self.A[x]==self.B[y]): - self.dp[x][y] = self.__solveDP(x-1,y-1) + if (self.A[x] == self.B[y]): + self.dp[x][y] = self.__solveDP(x - 1, y - 1) else: - self.dp[x][y] = 1+min(self.__solveDP(x,y-1), self.__solveDP(x-1,y), self.__solveDP(x-1,y-1)) + self.dp[x][y] = 1 + min(self.__solveDP(x, y - 1), self.__solveDP(x - 1, y), self.__solveDP(x - 1, y - 1)) return self.dp[x][y] def solve(self, A, B): - if isinstance(A,bytes): + if isinstance(A, bytes): A = A.decode('ascii') - if isinstance(B,bytes): + if isinstance(B, bytes): B = B.decode('ascii') self.A = str(A) @@ -50,26 +50,27 @@ def solve(self, A, B): self.__prepare__(len(A), len(B)) - return self.__solveDP(len(A)-1, len(B)-1) + return self.__solveDP(len(A) - 1, len(B) - 1) + if __name__ == '__main__': - try: - raw_input # Python 2 - except NameError: - raw_input = input # Python 3 + try: + raw_input # Python 2 + except NameError: + raw_input = input # Python 3 - solver = EditDistance() + solver = EditDistance() - print("****************** Testing Edit Distance DP Algorithm ******************") - print() + print("****************** Testing Edit Distance DP Algorithm ******************") + print() - print("Enter the first string: ", end="") - S1 = raw_input().strip() + print("Enter the first string: ", end="") + S1 = raw_input().strip() - print("Enter the second string: ", end="") - S2 = raw_input().strip() + print("Enter the second string: ", end="") + S2 = raw_input().strip() - print() - print("The minimum Edit Distance is: %d" % (solver.solve(S1, S2))) - print() - print("*************** End of Testing Edit Distance DP Algorithm ***************") + print() + print("The minimum Edit Distance is: %d" % (solver.solve(S1, S2))) + print() + print("*************** End of Testing Edit Distance DP Algorithm ***************") diff --git a/dynamic_programming/fast_fibonacci.py b/dynamic_programming/fast_fibonacci.py index cbc118467b3c..9b0197acd252 100644 --- a/dynamic_programming/fast_fibonacci.py +++ b/dynamic_programming/fast_fibonacci.py @@ -6,6 +6,7 @@ It's possible to calculate F(1000000) in less than a second. """ from __future__ import print_function + import sys diff --git a/dynamic_programming/fibonacci.py b/dynamic_programming/fibonacci.py index b453ce255853..b4a6dda1384b 100644 --- a/dynamic_programming/fibonacci.py +++ b/dynamic_programming/fibonacci.py @@ -30,7 +30,7 @@ def get(self, sequence_no=None): if __name__ == '__main__': print("\n********* Fibonacci Series Using Dynamic Programming ************\n") try: - raw_input # Python 2 + raw_input # Python 2 except NameError: raw_input = input # Python 3 diff --git a/dynamic_programming/floyd_warshall.py b/dynamic_programming/floyd_warshall.py index 038499ca03b6..06c1b6bf2d20 100644 --- a/dynamic_programming/floyd_warshall.py +++ b/dynamic_programming/floyd_warshall.py @@ -1,37 +1,39 @@ -import math - -class Graph: - - def __init__(self, N = 0): # a graph with Node 0,1,...,N-1 - self.N = N - self.W = [[math.inf for j in range(0,N)] for i in range(0,N)] # adjacency matrix for weight - self.dp = [[math.inf for j in range(0,N)] for i in range(0,N)] # dp[i][j] stores minimum distance from i to j - - def addEdge(self, u, v, w): - self.dp[u][v] = w - - def floyd_warshall(self): - for k in range(0,self.N): - for i in range(0,self.N): - for j in range(0,self.N): - self.dp[i][j] = min(self.dp[i][j], self.dp[i][k] + self.dp[k][j]) - - def showMin(self, u, v): - return self.dp[u][v] - -if __name__ == '__main__': - graph = Graph(5) - graph.addEdge(0,2,9) - graph.addEdge(0,4,10) - graph.addEdge(1,3,5) - graph.addEdge(2,3,7) - graph.addEdge(3,0,10) - graph.addEdge(3,1,2) - graph.addEdge(3,2,1) - graph.addEdge(3,4,6) - graph.addEdge(4,1,3) - graph.addEdge(4,2,4) - graph.addEdge(4,3,9) - graph.floyd_warshall() - graph.showMin(1,4) - graph.showMin(0,3) +import math + + +class Graph: + + def __init__(self, N=0): # a graph with Node 0,1,...,N-1 + self.N = N + self.W = [[math.inf for j in range(0, N)] for i in range(0, N)] # adjacency matrix for weight + self.dp = [[math.inf for j in range(0, N)] for i in range(0, N)] # dp[i][j] stores minimum distance from i to j + + def addEdge(self, u, v, w): + self.dp[u][v] = w + + def floyd_warshall(self): + for k in range(0, self.N): + for i in range(0, self.N): + for j in range(0, self.N): + self.dp[i][j] = min(self.dp[i][j], self.dp[i][k] + self.dp[k][j]) + + def showMin(self, u, v): + return self.dp[u][v] + + +if __name__ == '__main__': + graph = Graph(5) + graph.addEdge(0, 2, 9) + graph.addEdge(0, 4, 10) + graph.addEdge(1, 3, 5) + graph.addEdge(2, 3, 7) + graph.addEdge(3, 0, 10) + graph.addEdge(3, 1, 2) + graph.addEdge(3, 2, 1) + graph.addEdge(3, 4, 6) + graph.addEdge(4, 1, 3) + graph.addEdge(4, 2, 4) + graph.addEdge(4, 3, 9) + graph.floyd_warshall() + graph.showMin(1, 4) + graph.showMin(0, 3) diff --git a/dynamic_programming/integer_partition.py b/dynamic_programming/integer_partition.py index 7b27afebaa6c..0f1e6de59d88 100644 --- a/dynamic_programming/integer_partition.py +++ b/dynamic_programming/integer_partition.py @@ -1,45 +1,48 @@ from __future__ import print_function try: - xrange #Python 2 + xrange # Python 2 except NameError: - xrange = range #Python 3 + xrange = range # Python 3 try: - raw_input #Python 2 + raw_input # Python 2 except NameError: - raw_input = input #Python 3 + raw_input = input # Python 3 ''' The number of partitions of a number n into at least k parts equals the number of partitions into exactly k parts plus the number of partitions into at least k-1 parts. Subtracting 1 from each part of a partition of n into k parts gives a partition of n-k into k parts. These two facts together are used for this algorithm. ''' + + def partition(m): - memo = [[0 for _ in xrange(m)] for _ in xrange(m+1)] - for i in xrange(m+1): - memo[i][0] = 1 + memo = [[0 for _ in xrange(m)] for _ in xrange(m + 1)] + for i in xrange(m + 1): + memo[i][0] = 1 - for n in xrange(m+1): - for k in xrange(1, m): - memo[n][k] += memo[n][k-1] - if n-k > 0: - memo[n][k] += memo[n-k-1][k] + for n in xrange(m + 1): + for k in xrange(1, m): + memo[n][k] += memo[n][k - 1] + if n - k > 0: + memo[n][k] += memo[n - k - 1][k] + + return memo[m][m - 1] - return memo[m][m-1] if __name__ == '__main__': - import sys - - if len(sys.argv) == 1: - try: - n = int(raw_input('Enter a number: ')) - print(partition(n)) - except ValueError: - print('Please enter a number.') - else: - try: - n = int(sys.argv[1]) - print(partition(n)) - except ValueError: - print('Please pass a number.') \ No newline at end of file + import sys + + if len(sys.argv) == 1: + try: + n = int(raw_input('Enter a number: ')) + print(partition(n)) + except ValueError: + print('Please enter a number.') + else: + try: + n = int(sys.argv[1]) + print(partition(n)) + except ValueError: + print('Please pass a number.') diff --git a/dynamic_programming/k_means_clustering_tensorflow.py b/dynamic_programming/k_means_clustering_tensorflow.py index b6813c6a22b3..e8f9674e93fa 100644 --- a/dynamic_programming/k_means_clustering_tensorflow.py +++ b/dynamic_programming/k_means_clustering_tensorflow.py @@ -1,5 +1,6 @@ -import tensorflow as tf from random import shuffle + +import tensorflow as tf from numpy import array @@ -14,24 +15,24 @@ def TFKMeansCluster(vectors, noofclusters): noofclusters = int(noofclusters) assert noofclusters < len(vectors) - #Find out the dimensionality + # Find out the dimensionality dim = len(vectors[0]) - #Will help select random centroids from among the available vectors + # Will help select random centroids from among the available vectors vector_indices = list(range(len(vectors))) shuffle(vector_indices) - #GRAPH OF COMPUTATION - #We initialize a new graph and set it as the default during each run - #of this algorithm. This ensures that as this function is called - #multiple times, the default graph doesn't keep getting crowded with - #unused ops and Variables from previous function calls. + # GRAPH OF COMPUTATION + # We initialize a new graph and set it as the default during each run + # of this algorithm. This ensures that as this function is called + # multiple times, the default graph doesn't keep getting crowded with + # unused ops and Variables from previous function calls. graph = tf.Graph() with graph.as_default(): - #SESSION OF COMPUTATION + # SESSION OF COMPUTATION sess = tf.Session() @@ -60,14 +61,14 @@ def TFKMeansCluster(vectors, noofclusters): assignment_value)) ##Now lets construct the node that will compute the mean - #The placeholder for the input + # The placeholder for the input mean_input = tf.placeholder("float", [None, dim]) - #The Node/op takes the input and computes a mean along the 0th - #dimension, i.e. the list of input vectors + # The Node/op takes the input and computes a mean along the 0th + # dimension, i.e. the list of input vectors mean_op = tf.reduce_mean(mean_input, 0) ##Node for computing Euclidean distances - #Placeholders for input + # Placeholders for input v1 = tf.placeholder("float", [dim]) v2 = tf.placeholder("float", [dim]) euclid_dist = tf.sqrt(tf.reduce_sum(tf.pow(tf.sub( @@ -75,7 +76,7 @@ def TFKMeansCluster(vectors, noofclusters): ##This node will figure out which cluster to assign a vector to, ##based on Euclidean distances of the vector from the centroids. - #Placeholder for input + # Placeholder for input centroid_distances = tf.placeholder("float", [noofclusters]) cluster_assignment = tf.argmin(centroid_distances, 0) @@ -87,55 +88,54 @@ def TFKMeansCluster(vectors, noofclusters): ##will be included in the initialization. init_op = tf.initialize_all_variables() - #Initialize all variables + # Initialize all variables sess.run(init_op) ##CLUSTERING ITERATIONS - #Now perform the Expectation-Maximization steps of K-Means clustering - #iterations. To keep things simple, we will only do a set number of - #iterations, instead of using a Stopping Criterion. + # Now perform the Expectation-Maximization steps of K-Means clustering + # iterations. To keep things simple, we will only do a set number of + # iterations, instead of using a Stopping Criterion. noofiterations = 100 for iteration_n in range(noofiterations): ##EXPECTATION STEP ##Based on the centroid locations till last iteration, compute ##the _expected_ centroid assignments. - #Iterate over each vector + # Iterate over each vector for vector_n in range(len(vectors)): vect = vectors[vector_n] - #Compute Euclidean distance between this vector and each - #centroid. Remember that this list cannot be named - #'centroid_distances', since that is the input to the - #cluster assignment node. + # Compute Euclidean distance between this vector and each + # centroid. Remember that this list cannot be named + # 'centroid_distances', since that is the input to the + # cluster assignment node. distances = [sess.run(euclid_dist, feed_dict={ v1: vect, v2: sess.run(centroid)}) for centroid in centroids] - #Now use the cluster assignment node, with the distances - #as the input - assignment = sess.run(cluster_assignment, feed_dict = { + # Now use the cluster assignment node, with the distances + # as the input + assignment = sess.run(cluster_assignment, feed_dict={ centroid_distances: distances}) - #Now assign the value to the appropriate state variable + # Now assign the value to the appropriate state variable sess.run(cluster_assigns[vector_n], feed_dict={ assignment_value: assignment}) ##MAXIMIZATION STEP - #Based on the expected state computed from the Expectation Step, - #compute the locations of the centroids so as to maximize the - #overall objective of minimizing within-cluster Sum-of-Squares + # Based on the expected state computed from the Expectation Step, + # compute the locations of the centroids so as to maximize the + # overall objective of minimizing within-cluster Sum-of-Squares for cluster_n in range(noofclusters): - #Collect all the vectors assigned to this cluster + # Collect all the vectors assigned to this cluster assigned_vects = [vectors[i] for i in range(len(vectors)) if sess.run(assignments[i]) == cluster_n] - #Compute new centroid location + # Compute new centroid location new_location = sess.run(mean_op, feed_dict={ mean_input: array(assigned_vects)}) - #Assign value to appropriate variable + # Assign value to appropriate variable sess.run(cent_assigns[cluster_n], feed_dict={ centroid_value: new_location}) - #Return centroids and assignments + # Return centroids and assignments centroids = sess.run(centroids) assignments = sess.run(assignments) return centroids, assignments - diff --git a/dynamic_programming/knapsack.py b/dynamic_programming/knapsack.py index 27d1cfed799b..1f6921e8c7f1 100644 --- a/dynamic_programming/knapsack.py +++ b/dynamic_programming/knapsack.py @@ -1,7 +1,9 @@ """ Given weights and values of n items, put these items in a knapsack of capacity W to get the maximum total value in the knapsack. """ -def MF_knapsack(i,wt,val,j): + + +def MF_knapsack(i, wt, val, j): ''' This code involves the concept of memory functions. Here we solve the subproblems which are needed unlike the below example @@ -10,33 +12,34 @@ def MF_knapsack(i,wt,val,j): global F # a global dp table for knapsack if F[i][j] < 0: if j < wt[i - 1]: - val = MF_knapsack(i - 1,wt,val,j) + val = MF_knapsack(i - 1, wt, val, j) else: - val = max(MF_knapsack(i - 1,wt,val,j),MF_knapsack(i - 1,wt,val,j - wt[i - 1]) + val[i - 1]) + val = max(MF_knapsack(i - 1, wt, val, j), MF_knapsack(i - 1, wt, val, j - wt[i - 1]) + val[i - 1]) F[i][j] = val return F[i][j] + def knapsack(W, wt, val, n): - dp = [[0 for i in range(W+1)]for j in range(n+1)] + dp = [[0 for i in range(W + 1)] for j in range(n + 1)] - for i in range(1,n+1): - for w in range(1,W+1): - if(wt[i-1]<=w): - dp[i][w] = max(val[i-1]+dp[i-1][w-wt[i-1]],dp[i-1][w]) + for i in range(1, n + 1): + for w in range(1, W + 1): + if (wt[i - 1] <= w): + dp[i][w] = max(val[i - 1] + dp[i - 1][w - wt[i - 1]], dp[i - 1][w]) else: - dp[i][w] = dp[i-1][w] + dp[i][w] = dp[i - 1][w] return dp[n][w] + if __name__ == '__main__': ''' Adding test case for knapsack ''' - val = [3,2,4,4] - wt = [4,3,2,3] + val = [3, 2, 4, 4] + wt = [4, 3, 2, 3] n = 4 w = 6 - F = [[0]*(w + 1)] + [[0] + [-1 for i in range(w + 1)] for j in range(n + 1)] - print(knapsack(w,wt,val,n)) - print(MF_knapsack(n,wt,val,w)) # switched the n and w - + F = [[0] * (w + 1)] + [[0] + [-1 for i in range(w + 1)] for j in range(n + 1)] + print(knapsack(w, wt, val, n)) + print(MF_knapsack(n, wt, val, w)) # switched the n and w diff --git a/dynamic_programming/longest_common_subsequence.py b/dynamic_programming/longest_common_subsequence.py index 0a4771cb2efd..4ed43bef51cb 100644 --- a/dynamic_programming/longest_common_subsequence.py +++ b/dynamic_programming/longest_common_subsequence.py @@ -6,10 +6,11 @@ from __future__ import print_function try: - xrange # Python 2 + xrange # Python 2 except NameError: xrange = range # Python 3 + def lcs_dp(x, y): # find the length of strings m = len(x) @@ -23,15 +24,16 @@ def lcs_dp(x, y): for j in range(n + 1): if i == 0 or j == 0: L[i][j] = 0 - elif x[i - 1] == y[ j - 1]: + elif x[i - 1] == y[j - 1]: L[i][j] = L[i - 1][j - 1] + 1 - seq.append(x[i -1]) + seq.append(x[i - 1]) else: L[i][j] = max(L[i - 1][j], L[i][j - 1]) # L[m][n] contains the length of LCS of X[0..n-1] & Y[0..m-1] return L[m][n], seq -if __name__=='__main__': + +if __name__ == '__main__': x = 'AGGTAB' y = 'GXTXAYB' print(lcs_dp(x, y)) diff --git a/dynamic_programming/longest_increasing_subsequence.py b/dynamic_programming/longest_increasing_subsequence.py index b6d165909e70..6bfab55977c9 100644 --- a/dynamic_programming/longest_increasing_subsequence.py +++ b/dynamic_programming/longest_increasing_subsequence.py @@ -9,34 +9,36 @@ ''' from __future__ import print_function -def longestSub(ARRAY): #This function is recursive - - ARRAY_LENGTH = len(ARRAY) - if(ARRAY_LENGTH <= 1): #If the array contains only one element, we return it (it's the stop condition of recursion) - return ARRAY - #Else - PIVOT=ARRAY[0] - isFound=False - i=1 - LONGEST_SUB=[] - while(not isFound and i= ARRAY[i] ] - TEMPORARY_ARRAY = longestSub(TEMPORARY_ARRAY) - if ( len(TEMPORARY_ARRAY) > len(LONGEST_SUB) ): - LONGEST_SUB = TEMPORARY_ARRAY - else: - i+=1 - - TEMPORARY_ARRAY = [ element for element in ARRAY[1:] if element >= PIVOT ] - TEMPORARY_ARRAY = [PIVOT] + longestSub(TEMPORARY_ARRAY) - if ( len(TEMPORARY_ARRAY) > len(LONGEST_SUB) ): - return TEMPORARY_ARRAY - else: - return LONGEST_SUB - -#Some examples - -print(longestSub([4,8,7,5,1,12,2,3,9])) -print(longestSub([9,8,7,6,5,7])) \ No newline at end of file + +def longestSub(ARRAY): # This function is recursive + + ARRAY_LENGTH = len(ARRAY) + if (ARRAY_LENGTH <= 1): # If the array contains only one element, we return it (it's the stop condition of recursion) + return ARRAY + # Else + PIVOT = ARRAY[0] + isFound = False + i = 1 + LONGEST_SUB = [] + while (not isFound and i < ARRAY_LENGTH): + if (ARRAY[i] < PIVOT): + isFound = True + TEMPORARY_ARRAY = [element for element in ARRAY[i:] if element >= ARRAY[i]] + TEMPORARY_ARRAY = longestSub(TEMPORARY_ARRAY) + if (len(TEMPORARY_ARRAY) > len(LONGEST_SUB)): + LONGEST_SUB = TEMPORARY_ARRAY + else: + i += 1 + + TEMPORARY_ARRAY = [element for element in ARRAY[1:] if element >= PIVOT] + TEMPORARY_ARRAY = [PIVOT] + longestSub(TEMPORARY_ARRAY) + if (len(TEMPORARY_ARRAY) > len(LONGEST_SUB)): + return TEMPORARY_ARRAY + else: + return LONGEST_SUB + + +# Some examples + +print(longestSub([4, 8, 7, 5, 1, 12, 2, 3, 9])) +print(longestSub([9, 8, 7, 6, 5, 7])) diff --git a/dynamic_programming/longest_increasing_subsequence_O(nlogn).py b/dynamic_programming/longest_increasing_subsequence_O(nlogn).py index 21122a04d69f..6da23b62a89d 100644 --- a/dynamic_programming/longest_increasing_subsequence_O(nlogn).py +++ b/dynamic_programming/longest_increasing_subsequence_O(nlogn).py @@ -1,41 +1,43 @@ from __future__ import print_function + + ############################# # Author: Aravind Kashyap # File: lis.py # comments: This programme outputs the Longest Strictly Increasing Subsequence in O(NLogN) # Where N is the Number of elements in the list ############################# -def CeilIndex(v,l,r,key): - while r-l > 1: - m = (l + r)/2 - if v[m] >= key: - r = m - else: - l = m - - return r - +def CeilIndex(v, l, r, key): + while r - l > 1: + m = (l + r) / 2 + if v[m] >= key: + r = m + else: + l = m + + return r + def LongestIncreasingSubsequenceLength(v): - if(len(v) == 0): - return 0 - - tail = [0]*len(v) - length = 1 - - tail[0] = v[0] - - for i in range(1,len(v)): - if v[i] < tail[0]: - tail[0] = v[i] - elif v[i] > tail[length-1]: - tail[length] = v[i] - length += 1 - else: - tail[CeilIndex(tail,-1,length-1,v[i])] = v[i] - - return length - + if (len(v) == 0): + return 0 + + tail = [0] * len(v) + length = 1 + + tail[0] = v[0] + + for i in range(1, len(v)): + if v[i] < tail[0]: + tail[0] = v[i] + elif v[i] > tail[length - 1]: + tail[length] = v[i] + length += 1 + else: + tail[CeilIndex(tail, -1, length - 1, v[i])] = v[i] + + return length + v = [2, 5, 3, 7, 11, 8, 10, 13, 6] print(LongestIncreasingSubsequenceLength(v)) diff --git a/dynamic_programming/longest_sub_array.py b/dynamic_programming/longest_sub_array.py index de2c88a8b525..2cfe270995db 100644 --- a/dynamic_programming/longest_sub_array.py +++ b/dynamic_programming/longest_sub_array.py @@ -17,12 +17,12 @@ def __init__(self, arr): print(("the input array is:", self.array)) def solve_sub_array(self): - rear = [int(self.array[0])]*len(self.array) - sum_value = [int(self.array[0])]*len(self.array) + rear = [int(self.array[0])] * len(self.array) + sum_value = [int(self.array[0])] * len(self.array) for i in range(1, len(self.array)): - sum_value[i] = max(int(self.array[i]) + sum_value[i-1], int(self.array[i])) - rear[i] = max(sum_value[i], rear[i-1]) - return rear[len(self.array)-1] + sum_value[i] = max(int(self.array[i]) + sum_value[i - 1], int(self.array[i])) + rear[i] = max(sum_value[i], rear[i - 1]) + return rear[len(self.array) - 1] if __name__ == '__main__': @@ -30,4 +30,3 @@ def solve_sub_array(self): array = SubArray(whole_array) re = array.solve_sub_array() print(("the results is:", re)) - diff --git a/dynamic_programming/matrix_chain_order.py b/dynamic_programming/matrix_chain_order.py index b8234a65acbe..5e8d70a3c40d 100644 --- a/dynamic_programming/matrix_chain_order.py +++ b/dynamic_programming/matrix_chain_order.py @@ -1,46 +1,54 @@ from __future__ import print_function import sys + ''' Dynamic Programming Implementation of Matrix Chain Multiplication Time Complexity: O(n^3) Space Complexity: O(n^2) ''' + + def MatrixChainOrder(array): - N=len(array) - Matrix=[[0 for x in range(N)] for x in range(N)] - Sol=[[0 for x in range(N)] for x in range(N)] + N = len(array) + Matrix = [[0 for x in range(N)] for x in range(N)] + Sol = [[0 for x in range(N)] for x in range(N)] - for ChainLength in range(2,N): - for a in range(1,N-ChainLength+1): - b = a+ChainLength-1 + for ChainLength in range(2, N): + for a in range(1, N - ChainLength + 1): + b = a + ChainLength - 1 Matrix[a][b] = sys.maxsize - for c in range(a , b): - cost = Matrix[a][c] + Matrix[c+1][b] + array[a-1]*array[c]*array[b] + for c in range(a, b): + cost = Matrix[a][c] + Matrix[c + 1][b] + array[a - 1] * array[c] * array[b] if cost < Matrix[a][b]: Matrix[a][b] = cost Sol[a][b] = c - return Matrix , Sol -#Print order of matrix with Ai as Matrix -def PrintOptimalSolution(OptimalSolution,i,j): - if i==j: - print("A" + str(i),end = " ") + return Matrix, Sol + + +# Print order of matrix with Ai as Matrix +def PrintOptimalSolution(OptimalSolution, i, j): + if i == j: + print("A" + str(i), end=" ") else: - print("(",end = " ") - PrintOptimalSolution(OptimalSolution,i,OptimalSolution[i][j]) - PrintOptimalSolution(OptimalSolution,OptimalSolution[i][j]+1,j) - print(")",end = " ") + print("(", end=" ") + PrintOptimalSolution(OptimalSolution, i, OptimalSolution[i][j]) + PrintOptimalSolution(OptimalSolution, OptimalSolution[i][j] + 1, j) + print(")", end=" ") + def main(): - array=[30,35,15,5,10,20,25] - n=len(array) - #Size of matrix created from above array will be + array = [30, 35, 15, 5, 10, 20, 25] + n = len(array) + # Size of matrix created from above array will be # 30*35 35*15 15*5 5*10 10*20 20*25 - Matrix , OptimalSolution = MatrixChainOrder(array) + Matrix, OptimalSolution = MatrixChainOrder(array) + + print("No. of Operation required: " + str((Matrix[1][n - 1]))) + PrintOptimalSolution(OptimalSolution, 1, n - 1) + - print("No. of Operation required: "+str((Matrix[1][n-1]))) - PrintOptimalSolution(OptimalSolution,1,n-1) if __name__ == '__main__': main() diff --git a/dynamic_programming/max_sub_array.py b/dynamic_programming/max_sub_array.py index 5d48882427c0..3f07aa297fe8 100644 --- a/dynamic_programming/max_sub_array.py +++ b/dynamic_programming/max_sub_array.py @@ -4,57 +4,58 @@ from __future__ import print_function import time -import matplotlib.pyplot as plt from random import randint -def find_max_sub_array(A,low,high): - if low==high: - return low,high,A[low] - else : - mid=(low+high)//2 - left_low,left_high,left_sum=find_max_sub_array(A,low,mid) - right_low,right_high,right_sum=find_max_sub_array(A,mid+1,high) - cross_left,cross_right,cross_sum=find_max_cross_sum(A,low,mid,high) - if left_sum>=right_sum and left_sum>=cross_sum: - return left_low,left_high,left_sum - elif right_sum>=left_sum and right_sum>=cross_sum : - return right_low,right_high,right_sum + +import matplotlib.pyplot as plt + + +def find_max_sub_array(A, low, high): + if low == high: + return low, high, A[low] + else: + mid = (low + high) // 2 + left_low, left_high, left_sum = find_max_sub_array(A, low, mid) + right_low, right_high, right_sum = find_max_sub_array(A, mid + 1, high) + cross_left, cross_right, cross_sum = find_max_cross_sum(A, low, mid, high) + if left_sum >= right_sum and left_sum >= cross_sum: + return left_low, left_high, left_sum + elif right_sum >= left_sum and right_sum >= cross_sum: + return right_low, right_high, right_sum else: - return cross_left,cross_right,cross_sum - -def find_max_cross_sum(A,low,mid,high): - left_sum,max_left=-999999999,-1 - right_sum,max_right=-999999999,-1 - summ=0 - for i in range(mid,low-1,-1): - summ+=A[i] + return cross_left, cross_right, cross_sum + + +def find_max_cross_sum(A, low, mid, high): + left_sum, max_left = -999999999, -1 + right_sum, max_right = -999999999, -1 + summ = 0 + for i in range(mid, low - 1, -1): + summ += A[i] if summ > left_sum: - left_sum=summ - max_left=i - summ=0 - for i in range(mid+1,high+1): - summ+=A[i] + left_sum = summ + max_left = i + summ = 0 + for i in range(mid + 1, high + 1): + summ += A[i] if summ > right_sum: - right_sum=summ - max_right=i - return max_left,max_right,(left_sum+right_sum) - - -if __name__=='__main__': - inputs=[10,100,1000,10000,50000,100000,200000,300000,400000,500000] - tim=[] - for i in inputs: - li=[randint(1,i) for j in range(i)] - strt=time.time() - (find_max_sub_array(li,0,len(li)-1)) - end=time.time() - tim.append(end-strt) - print("No of Inputs Time Taken") - for i in range(len(inputs)): - print(inputs[i],'\t\t',tim[i]) - plt.plot(inputs,tim) - plt.xlabel("Number of Inputs");plt.ylabel("Time taken in seconds ") - plt.show() + right_sum = summ + max_right = i + return max_left, max_right, (left_sum + right_sum) - - +if __name__ == '__main__': + inputs = [10, 100, 1000, 10000, 50000, 100000, 200000, 300000, 400000, 500000] + tim = [] + for i in inputs: + li = [randint(1, i) for j in range(i)] + strt = time.time() + (find_max_sub_array(li, 0, len(li) - 1)) + end = time.time() + tim.append(end - strt) + print("No of Inputs Time Taken") + for i in range(len(inputs)): + print(inputs[i], '\t\t', tim[i]) + plt.plot(inputs, tim) + plt.xlabel("Number of Inputs"); + plt.ylabel("Time taken in seconds ") + plt.show() diff --git a/dynamic_programming/minimum_partition.py b/dynamic_programming/minimum_partition.py index 18aa1faa2fa6..fc412823d316 100644 --- a/dynamic_programming/minimum_partition.py +++ b/dynamic_programming/minimum_partition.py @@ -1,28 +1,30 @@ """ Partition a set into two subsets such that the difference of subset sums is minimum """ + + def findMin(arr): n = len(arr) s = sum(arr) - dp = [[False for x in range(s+1)]for y in range(n+1)] + dp = [[False for x in range(s + 1)] for y in range(n + 1)] - for i in range(1, n+1): + for i in range(1, n + 1): dp[i][0] = True - for i in range(1, s+1): + for i in range(1, s + 1): dp[0][i] = False - for i in range(1, n+1): - for j in range(1, s+1): - dp[i][j]= dp[i][j-1] + for i in range(1, n + 1): + for j in range(1, s + 1): + dp[i][j] = dp[i][j - 1] - if (arr[i-1] <= j): - dp[i][j] = dp[i][j] or dp[i-1][j-arr[i-1]] + if (arr[i - 1] <= j): + dp[i][j] = dp[i][j] or dp[i - 1][j - arr[i - 1]] - for j in range(int(s/2), -1, -1): + for j in range(int(s / 2), -1, -1): if dp[n][j] == True: - diff = s-2*j + diff = s - 2 * j break; return diff diff --git a/dynamic_programming/rod_cutting.py b/dynamic_programming/rod_cutting.py index 34350cb8202b..1cb83c8e3fce 100644 --- a/dynamic_programming/rod_cutting.py +++ b/dynamic_programming/rod_cutting.py @@ -18,41 +18,41 @@ Choose the maximum price we can get. """ + def CutRod(n): - if(n == 1): - #Cannot cut rod any further + if (n == 1): + # Cannot cut rod any further return prices[1] - noCut = prices[n] #The price you get when you don't cut the rod - yesCut = [-1 for x in range(n)] #The prices for the different cutting options + noCut = prices[n] # The price you get when you don't cut the rod + yesCut = [-1 for x in range(n)] # The prices for the different cutting options - for i in range(1,n): - if(solutions[i] == -1): - #We haven't calulated solution for length i yet. - #We know we sell the part of length i so we get prices[i]. - #We just need to know how to sell rod of length n-i - yesCut[i] = prices[i] + CutRod(n-i) + for i in range(1, n): + if (solutions[i] == -1): + # We haven't calulated solution for length i yet. + # We know we sell the part of length i so we get prices[i]. + # We just need to know how to sell rod of length n-i + yesCut[i] = prices[i] + CutRod(n - i) else: - #We have calculated solution for length i. - #We add the two prices. - yesCut[i] = prices[i] + solutions[n-i] + # We have calculated solution for length i. + # We add the two prices. + yesCut[i] = prices[i] + solutions[n - i] - #We need to find the highest price in order to sell more efficiently. - #We have to choose between noCut and the prices in yesCut. - m = noCut #Initialize max to noCut + # We need to find the highest price in order to sell more efficiently. + # We have to choose between noCut and the prices in yesCut. + m = noCut # Initialize max to noCut for i in range(n): - if(yesCut[i] > m): + if (yesCut[i] > m): m = yesCut[i] solutions[n] = m return m - ### EXAMPLE ### length = 5 -#The first price, 0, is for when we have no rod. +# The first price, 0, is for when we have no rod. prices = [0, 1, 3, 7, 9, 11, 13, 17, 21, 21, 30] -solutions = [-1 for x in range(length+1)] +solutions = [-1 for x in range(length + 1)] print(CutRod(length)) diff --git a/dynamic_programming/subset_generation.py b/dynamic_programming/subset_generation.py index 4b7a2bf87fd5..a5525b5176d9 100644 --- a/dynamic_programming/subset_generation.py +++ b/dynamic_programming/subset_generation.py @@ -1,39 +1,43 @@ # python program to print all subset combination of n element in given set of r element . -#arr[] ---> Input Array -#data[] ---> Temporary array to store current combination +# arr[] ---> Input Array +# data[] ---> Temporary array to store current combination # start & end ---> Staring and Ending indexes in arr[] # index ---> Current index in data[] -#r ---> Size of a combination to be printed -def combinationUtil(arr,n,r,index,data,i): -#Current combination is ready to be printed, -# print it - if(index == r): - for j in range(r): - print(data[j],end =" ") - print(" ") - return -# When no more elements are there to put in data[] - if(i >= n): - return -#current is included, put next at next -# location - data[index] = arr[i] - combinationUtil(arr,n,r,index+1,data,i+1) - # current is excluded, replace it with - # next (Note that i+1 is passed, but - # index is not changed) - combinationUtil(arr,n,r,index,data,i+1) - # The main function that prints all combinations - #of size r in arr[] of size n. This function - #mainly uses combinationUtil() -def printcombination(arr,n,r): -# A temporary array to store all combination -# one by one - data = [0]*r -#Print all combination using temprary -#array 'data[]' - combinationUtil(arr,n,r,0,data,0) +# r ---> Size of a combination to be printed +def combinationUtil(arr, n, r, index, data, i): + # Current combination is ready to be printed, + # print it + if (index == r): + for j in range(r): + print(data[j], end=" ") + print(" ") + return + # When no more elements are there to put in data[] + if (i >= n): + return + # current is included, put next at next + # location + data[index] = arr[i] + combinationUtil(arr, n, r, index + 1, data, i + 1) + # current is excluded, replace it with + # next (Note that i+1 is passed, but + # index is not changed) + combinationUtil(arr, n, r, index, data, i + 1) + # The main function that prints all combinations + + +# of size r in arr[] of size n. This function +# mainly uses combinationUtil() +def printcombination(arr, n, r): + # A temporary array to store all combination + # one by one + data = [0] * r + # Print all combination using temprary + # array 'data[]' + combinationUtil(arr, n, r, 0, data, 0) + + # Driver function to check for above function -arr = [10,20,30,40,50] -printcombination(arr,len(arr),3) -#This code is contributed by Ambuj sahu +arr = [10, 20, 30, 40, 50] +printcombination(arr, len(arr), 3) +# This code is contributed by Ambuj sahu diff --git a/file_transfer_protocol/ftp_client_server.py b/file_transfer_protocol/ftp_client_server.py index 414c336dee9f..ff051f372f7b 100644 --- a/file_transfer_protocol/ftp_client_server.py +++ b/file_transfer_protocol/ftp_client_server.py @@ -1,17 +1,17 @@ # server -import socket # Import socket module +import socket # Import socket module -port = 60000 # Reserve a port for your service. -s = socket.socket() # Create a socket object -host = socket.gethostname() # Get local machine name -s.bind((host, port)) # Bind to the port -s.listen(5) # Now wait for client connection. +port = 60000 # Reserve a port for your service. +s = socket.socket() # Create a socket object +host = socket.gethostname() # Get local machine name +s.bind((host, port)) # Bind to the port +s.listen(5) # Now wait for client connection. print('Server listening....') while True: - conn, addr = s.accept() # Establish connection with client. + conn, addr = s.accept() # Establish connection with client. print('Got connection from', addr) data = conn.recv(1024) print('Server received', repr(data)) @@ -28,14 +28,13 @@ conn.send('Thank you for connecting') conn.close() - # client side server -import socket # Import socket module +import socket # Import socket module -s = socket.socket() # Create a socket object -host = socket.gethostname() # Get local machine name -port = 60000 # Reserve a port for your service. +s = socket.socket() # Create a socket object +host = socket.gethostname() # Get local machine name +port = 60000 # Reserve a port for your service. s.connect((host, port)) s.send("Hello server!") diff --git a/file_transfer_protocol/ftp_send_receive.py b/file_transfer_protocol/ftp_send_receive.py index 6a9819ef3f21..ae6c3e0098af 100644 --- a/file_transfer_protocol/ftp_send_receive.py +++ b/file_transfer_protocol/ftp_send_receive.py @@ -9,6 +9,7 @@ """ from ftplib import FTP + ftp = FTP('xxx.xxx.x.x') # Enter the ip address or the domain name here ftp.login(user='username', passwd='password') ftp.cwd('/Enter the directory here/') @@ -18,19 +19,22 @@ Enter the location of the file where the file is received """ + def ReceiveFile(): - FileName = 'example.txt' """ Enter the location of the file """ - with open(FileName, 'wb') as LocalFile: - ftp.retrbinary('RETR ' + FileName, LocalFile.write, 1024) - ftp.quit() + FileName = 'example.txt' """ Enter the location of the file """ + with open(FileName, 'wb') as LocalFile: + ftp.retrbinary('RETR ' + FileName, LocalFile.write, 1024) + ftp.quit() + """ The file which will be sent via the FTP server The file send will be send to the current working directory """ + def SendFile(): - FileName = 'example.txt' """ Enter the name of the file """ - with open(FileName, 'rb') as LocalFile: - ftp.storbinary('STOR ' + FileName, LocalFile) - ftp.quit() + FileName = 'example.txt' """ Enter the name of the file """ + with open(FileName, 'rb') as LocalFile: + ftp.storbinary('STOR ' + FileName, LocalFile) + ftp.quit() diff --git a/graphs/BFS.py b/graphs/BFS.py index bf9b572cec50..1f5d4b8b3384 100644 --- a/graphs/BFS.py +++ b/graphs/BFS.py @@ -14,8 +14,6 @@ """ -import collections - def bfs(graph, start): explored, queue = set(), [start] # collections.deque([start]) diff --git a/graphs/DFS.py b/graphs/DFS.py index c9843ca25382..77d486072407 100644 --- a/graphs/DFS.py +++ b/graphs/DFS.py @@ -19,7 +19,7 @@ def dfs(graph, start): explored.add(start) while stack: v = stack.pop() # one difference from BFS is to pop last element here instead of first one - + if v in explored: continue diff --git a/graphs/Directed_and_Undirected_(Weighted)_Graph.py b/graphs/Directed_and_Undirected_(Weighted)_Graph.py index a31a4a96d6d0..9acecde8cfe0 100644 --- a/graphs/Directed_and_Undirected_(Weighted)_Graph.py +++ b/graphs/Directed_and_Undirected_(Weighted)_Graph.py @@ -1,472 +1,477 @@ -from collections import deque -import random as rand import math as math +import random as rand import time +from collections import deque + # the dfault weight is 1 if not assigend but all the implementation is weighted class DirectedGraph: - def __init__(self): - self.graph = {} - - # adding vertices and edges - # adding the weight is optional - # handels repetition - def add_pair(self, u, v, w = 1): - if self.graph.get(u): - if self.graph[u].count([w,v]) == 0: - self.graph[u].append([w, v]) - else: - self.graph[u] = [[w, v]] - if not self.graph.get(v): - self.graph[v] = [] - - def all_nodes(self): - return list(self.graph) - - # handels if the input does not exist - def remove_pair(self, u, v): - if self.graph.get(u): - for _ in self.graph[u]: - if _[1] == v: - self.graph[u].remove(_) - - # if no destination is meant the defaut value is -1 - def dfs(self, s = -2, d = -1): - if s == d: - return [] - stack = [] - visited = [] - if s == -2: - s = list(self.graph.keys())[0] - stack.append(s) - visited.append(s) - ss = s - - while True: - # check if there is any non isolated nodes - if len(self.graph[s]) != 0: - ss = s - for __ in self.graph[s]: - if visited.count(__[1]) < 1: - if __[1] == d: - visited.append(d) - return visited - else: - stack.append(__[1]) - visited.append(__[1]) - ss =__[1] - break - - # check if all the children are visited - if s == ss : - stack.pop() - if len(stack) != 0: - s = stack[len(stack) - 1] - else: - s = ss - - # check if se have reached the starting point - if len(stack) == 0: - return visited - - # c is the count of nodes you want and if you leave it or pass -1 to the funtion the count - # will be random from 10 to 10000 - def fill_graph_randomly(self, c = -1): - if c == -1: - c = (math.floor(rand.random() * 10000)) + 10 - for _ in range(c): - # every vertex has max 100 edges - e = math.floor(rand.random() * 102) + 1 - for __ in range(e): - n = math.floor(rand.random() * (c)) + 1 - if n == _: - continue - self.add_pair(_, n, 1) - - def bfs(self, s = -2): - d = deque() - visited = [] - if s == -2: - s = list(self.graph.keys())[0] - d.append(s) - visited.append(s) - while d: - s = d.popleft() - if len(self.graph[s]) != 0: - for __ in self.graph[s]: - if visited.count(__[1]) < 1: - d.append(__[1]) - visited.append(__[1]) - return visited - def in_degree(self, u): - count = 0 - for _ in self.graph: - for __ in self.graph[_]: - if __[1] == u: - count += 1 - return count - - def out_degree(self, u): - return len(self.graph[u]) - - def topological_sort(self, s = -2): - stack = [] - visited = [] - if s == -2: - s = list(self.graph.keys())[0] - stack.append(s) - visited.append(s) - ss = s - sorted_nodes = [] - - while True: - # check if there is any non isolated nodes - if len(self.graph[s]) != 0: - ss = s - for __ in self.graph[s]: - if visited.count(__[1]) < 1: - stack.append(__[1]) - visited.append(__[1]) - ss =__[1] - break - - # check if all the children are visited - if s == ss : - sorted_nodes.append(stack.pop()) - if len(stack) != 0: - s = stack[len(stack) - 1] - else: - s = ss - - # check if se have reached the starting point - if len(stack) == 0: - return sorted_nodes - - def cycle_nodes(self): - stack = [] - visited = [] - s = list(self.graph.keys())[0] - stack.append(s) - visited.append(s) - parent = -2 - indirect_parents = [] - ss = s - on_the_way_back = False - anticipating_nodes = set() - - while True: - # check if there is any non isolated nodes - if len(self.graph[s]) != 0: - ss = s - for __ in self.graph[s]: - if visited.count(__[1]) > 0 and __[1] != parent and indirect_parents.count(__[1]) > 0 and not on_the_way_back: - l = len(stack) - 1 - while True and l >= 0: - if stack[l] == __[1]: - anticipating_nodes.add(__[1]) - break - else: - anticipating_nodes.add(stack[l]) - l -= 1 - if visited.count(__[1]) < 1: - stack.append(__[1]) - visited.append(__[1]) - ss =__[1] - break - - # check if all the children are visited - if s == ss : - stack.pop() - on_the_way_back = True - if len(stack) != 0: - s = stack[len(stack) - 1] - else: - on_the_way_back = False - indirect_parents.append(parent) - parent = s - s = ss - - # check if se have reached the starting point - if len(stack) == 0: - return list(anticipating_nodes) - - def has_cycle(self): - stack = [] - visited = [] - s = list(self.graph.keys())[0] - stack.append(s) - visited.append(s) - parent = -2 - indirect_parents = [] - ss = s - on_the_way_back = False - anticipating_nodes = set() - - while True: - # check if there is any non isolated nodes - if len(self.graph[s]) != 0: - ss = s - for __ in self.graph[s]: - if visited.count(__[1]) > 0 and __[1] != parent and indirect_parents.count(__[1]) > 0 and not on_the_way_back: - l = len(stack) - 1 - while True and l >= 0: - if stack[l] == __[1]: - anticipating_nodes.add(__[1]) - break - else: - return True - anticipating_nodes.add(stack[l]) - l -= 1 - if visited.count(__[1]) < 1: - stack.append(__[1]) - visited.append(__[1]) - ss =__[1] - break - - # check if all the children are visited - if s == ss : - stack.pop() - on_the_way_back = True - if len(stack) != 0: - s = stack[len(stack) - 1] - else: - on_the_way_back = False - indirect_parents.append(parent) - parent = s - s = ss - - # check if se have reached the starting point - if len(stack) == 0: - return False - - def dfs_time(self, s = -2, e = -1): - begin = time.time() - self.dfs(s,e) - end = time.time() - return end - begin - - def bfs_time(self, s = -2): - begin = time.time() - self.bfs(s) - end = time.time() - return end - begin + def __init__(self): + self.graph = {} + + # adding vertices and edges + # adding the weight is optional + # handels repetition + def add_pair(self, u, v, w=1): + if self.graph.get(u): + if self.graph[u].count([w, v]) == 0: + self.graph[u].append([w, v]) + else: + self.graph[u] = [[w, v]] + if not self.graph.get(v): + self.graph[v] = [] + + def all_nodes(self): + return list(self.graph) + + # handels if the input does not exist + def remove_pair(self, u, v): + if self.graph.get(u): + for _ in self.graph[u]: + if _[1] == v: + self.graph[u].remove(_) + + # if no destination is meant the defaut value is -1 + def dfs(self, s=-2, d=-1): + if s == d: + return [] + stack = [] + visited = [] + if s == -2: + s = list(self.graph.keys())[0] + stack.append(s) + visited.append(s) + ss = s + + while True: + # check if there is any non isolated nodes + if len(self.graph[s]) != 0: + ss = s + for __ in self.graph[s]: + if visited.count(__[1]) < 1: + if __[1] == d: + visited.append(d) + return visited + else: + stack.append(__[1]) + visited.append(__[1]) + ss = __[1] + break + + # check if all the children are visited + if s == ss: + stack.pop() + if len(stack) != 0: + s = stack[len(stack) - 1] + else: + s = ss + + # check if se have reached the starting point + if len(stack) == 0: + return visited + + # c is the count of nodes you want and if you leave it or pass -1 to the funtion the count + # will be random from 10 to 10000 + def fill_graph_randomly(self, c=-1): + if c == -1: + c = (math.floor(rand.random() * 10000)) + 10 + for _ in range(c): + # every vertex has max 100 edges + e = math.floor(rand.random() * 102) + 1 + for __ in range(e): + n = math.floor(rand.random() * (c)) + 1 + if n == _: + continue + self.add_pair(_, n, 1) + + def bfs(self, s=-2): + d = deque() + visited = [] + if s == -2: + s = list(self.graph.keys())[0] + d.append(s) + visited.append(s) + while d: + s = d.popleft() + if len(self.graph[s]) != 0: + for __ in self.graph[s]: + if visited.count(__[1]) < 1: + d.append(__[1]) + visited.append(__[1]) + return visited + + def in_degree(self, u): + count = 0 + for _ in self.graph: + for __ in self.graph[_]: + if __[1] == u: + count += 1 + return count + + def out_degree(self, u): + return len(self.graph[u]) + + def topological_sort(self, s=-2): + stack = [] + visited = [] + if s == -2: + s = list(self.graph.keys())[0] + stack.append(s) + visited.append(s) + ss = s + sorted_nodes = [] + + while True: + # check if there is any non isolated nodes + if len(self.graph[s]) != 0: + ss = s + for __ in self.graph[s]: + if visited.count(__[1]) < 1: + stack.append(__[1]) + visited.append(__[1]) + ss = __[1] + break + + # check if all the children are visited + if s == ss: + sorted_nodes.append(stack.pop()) + if len(stack) != 0: + s = stack[len(stack) - 1] + else: + s = ss + + # check if se have reached the starting point + if len(stack) == 0: + return sorted_nodes + + def cycle_nodes(self): + stack = [] + visited = [] + s = list(self.graph.keys())[0] + stack.append(s) + visited.append(s) + parent = -2 + indirect_parents = [] + ss = s + on_the_way_back = False + anticipating_nodes = set() + + while True: + # check if there is any non isolated nodes + if len(self.graph[s]) != 0: + ss = s + for __ in self.graph[s]: + if visited.count(__[1]) > 0 and __[1] != parent and indirect_parents.count(__[1]) > 0 and not on_the_way_back: + l = len(stack) - 1 + while True and l >= 0: + if stack[l] == __[1]: + anticipating_nodes.add(__[1]) + break + else: + anticipating_nodes.add(stack[l]) + l -= 1 + if visited.count(__[1]) < 1: + stack.append(__[1]) + visited.append(__[1]) + ss = __[1] + break + + # check if all the children are visited + if s == ss: + stack.pop() + on_the_way_back = True + if len(stack) != 0: + s = stack[len(stack) - 1] + else: + on_the_way_back = False + indirect_parents.append(parent) + parent = s + s = ss + + # check if se have reached the starting point + if len(stack) == 0: + return list(anticipating_nodes) + + def has_cycle(self): + stack = [] + visited = [] + s = list(self.graph.keys())[0] + stack.append(s) + visited.append(s) + parent = -2 + indirect_parents = [] + ss = s + on_the_way_back = False + anticipating_nodes = set() + + while True: + # check if there is any non isolated nodes + if len(self.graph[s]) != 0: + ss = s + for __ in self.graph[s]: + if visited.count(__[1]) > 0 and __[1] != parent and indirect_parents.count(__[1]) > 0 and not on_the_way_back: + l = len(stack) - 1 + while True and l >= 0: + if stack[l] == __[1]: + anticipating_nodes.add(__[1]) + break + else: + return True + anticipating_nodes.add(stack[l]) + l -= 1 + if visited.count(__[1]) < 1: + stack.append(__[1]) + visited.append(__[1]) + ss = __[1] + break + + # check if all the children are visited + if s == ss: + stack.pop() + on_the_way_back = True + if len(stack) != 0: + s = stack[len(stack) - 1] + else: + on_the_way_back = False + indirect_parents.append(parent) + parent = s + s = ss + + # check if se have reached the starting point + if len(stack) == 0: + return False + + def dfs_time(self, s=-2, e=-1): + begin = time.time() + self.dfs(s, e) + end = time.time() + return end - begin + + def bfs_time(self, s=-2): + begin = time.time() + self.bfs(s) + end = time.time() + return end - begin + class Graph: - def __init__(self): - self.graph = {} - - # adding vertices and edges - # adding the weight is optional - # handels repetition - def add_pair(self, u, v, w = 1): - # check if the u exists - if self.graph.get(u): - # if there already is a edge - if self.graph[u].count([w,v]) == 0: - self.graph[u].append([w, v]) - else: - # if u does not exist - self.graph[u] = [[w, v]] - # add the other way - if self.graph.get(v): - # if there already is a edge - if self.graph[v].count([w,u]) == 0: - self.graph[v].append([w, u]) - else: - # if u does not exist - self.graph[v] = [[w, u]] - - # handels if the input does not exist - def remove_pair(self, u, v): - if self.graph.get(u): - for _ in self.graph[u]: - if _[1] == v: - self.graph[u].remove(_) - # the other way round - if self.graph.get(v): - for _ in self.graph[v]: - if _[1] == u: - self.graph[v].remove(_) - - # if no destination is meant the defaut value is -1 - def dfs(self, s = -2, d = -1): - if s == d: - return [] - stack = [] - visited = [] - if s == -2: - s = list(self.graph.keys())[0] - stack.append(s) - visited.append(s) - ss = s - - while True: - # check if there is any non isolated nodes - if len(self.graph[s]) != 0: - ss = s - for __ in self.graph[s]: - if visited.count(__[1]) < 1: - if __[1] == d: - visited.append(d) - return visited - else: - stack.append(__[1]) - visited.append(__[1]) - ss =__[1] - break - - # check if all the children are visited - if s == ss : - stack.pop() - if len(stack) != 0: - s = stack[len(stack) - 1] - else: - s = ss - - # check if se have reached the starting point - if len(stack) == 0: - return visited - - # c is the count of nodes you want and if you leave it or pass -1 to the funtion the count - # will be random from 10 to 10000 - def fill_graph_randomly(self, c = -1): - if c == -1: - c = (math.floor(rand.random() * 10000)) + 10 - for _ in range(c): - # every vertex has max 100 edges - e = math.floor(rand.random() * 102) + 1 - for __ in range(e): - n = math.floor(rand.random() * (c)) + 1 - if n == _: - continue - self.add_pair(_, n, 1) - - def bfs(self, s = -2): - d = deque() - visited = [] - if s == -2: - s = list(self.graph.keys())[0] - d.append(s) - visited.append(s) - while d: - s = d.popleft() - if len(self.graph[s]) != 0: - for __ in self.graph[s]: - if visited.count(__[1]) < 1: - d.append(__[1]) - visited.append(__[1]) - return visited - def degree(self, u): - return len(self.graph[u]) - - def cycle_nodes(self): - stack = [] - visited = [] - s = list(self.graph.keys())[0] - stack.append(s) - visited.append(s) - parent = -2 - indirect_parents = [] - ss = s - on_the_way_back = False - anticipating_nodes = set() - - while True: - # check if there is any non isolated nodes - if len(self.graph[s]) != 0: - ss = s - for __ in self.graph[s]: - if visited.count(__[1]) > 0 and __[1] != parent and indirect_parents.count(__[1]) > 0 and not on_the_way_back: - l = len(stack) - 1 - while True and l >= 0: - if stack[l] == __[1]: - anticipating_nodes.add(__[1]) - break - else: - anticipating_nodes.add(stack[l]) - l -= 1 - if visited.count(__[1]) < 1: - stack.append(__[1]) - visited.append(__[1]) - ss =__[1] - break - - # check if all the children are visited - if s == ss : - stack.pop() - on_the_way_back = True - if len(stack) != 0: - s = stack[len(stack) - 1] - else: - on_the_way_back = False - indirect_parents.append(parent) - parent = s - s = ss - - # check if se have reached the starting point - if len(stack) == 0: - return list(anticipating_nodes) - - def has_cycle(self): - stack = [] - visited = [] - s = list(self.graph.keys())[0] - stack.append(s) - visited.append(s) - parent = -2 - indirect_parents = [] - ss = s - on_the_way_back = False - anticipating_nodes = set() - - while True: - # check if there is any non isolated nodes - if len(self.graph[s]) != 0: - ss = s - for __ in self.graph[s]: - if visited.count(__[1]) > 0 and __[1] != parent and indirect_parents.count(__[1]) > 0 and not on_the_way_back: - l = len(stack) - 1 - while True and l >= 0: - if stack[l] == __[1]: - anticipating_nodes.add(__[1]) - break - else: - return True - anticipating_nodes.add(stack[l]) - l -= 1 - if visited.count(__[1]) < 1: - stack.append(__[1]) - visited.append(__[1]) - ss =__[1] - break - - # check if all the children are visited - if s == ss : - stack.pop() - on_the_way_back = True - if len(stack) != 0: - s = stack[len(stack) - 1] - else: - on_the_way_back = False - indirect_parents.append(parent) - parent = s - s = ss - - # check if se have reached the starting point - if len(stack) == 0: - return False - def all_nodes(self): - return list(self.graph) - - def dfs_time(self, s = -2, e = -1): - begin = time.time() - self.dfs(s,e) - end = time.time() - return end - begin - - def bfs_time(self, s = -2): - begin = time.time() - self.bfs(s) - end = time.time() - return end - begin + def __init__(self): + self.graph = {} + + # adding vertices and edges + # adding the weight is optional + # handels repetition + def add_pair(self, u, v, w=1): + # check if the u exists + if self.graph.get(u): + # if there already is a edge + if self.graph[u].count([w, v]) == 0: + self.graph[u].append([w, v]) + else: + # if u does not exist + self.graph[u] = [[w, v]] + # add the other way + if self.graph.get(v): + # if there already is a edge + if self.graph[v].count([w, u]) == 0: + self.graph[v].append([w, u]) + else: + # if u does not exist + self.graph[v] = [[w, u]] + + # handels if the input does not exist + def remove_pair(self, u, v): + if self.graph.get(u): + for _ in self.graph[u]: + if _[1] == v: + self.graph[u].remove(_) + # the other way round + if self.graph.get(v): + for _ in self.graph[v]: + if _[1] == u: + self.graph[v].remove(_) + + # if no destination is meant the defaut value is -1 + def dfs(self, s=-2, d=-1): + if s == d: + return [] + stack = [] + visited = [] + if s == -2: + s = list(self.graph.keys())[0] + stack.append(s) + visited.append(s) + ss = s + + while True: + # check if there is any non isolated nodes + if len(self.graph[s]) != 0: + ss = s + for __ in self.graph[s]: + if visited.count(__[1]) < 1: + if __[1] == d: + visited.append(d) + return visited + else: + stack.append(__[1]) + visited.append(__[1]) + ss = __[1] + break + + # check if all the children are visited + if s == ss: + stack.pop() + if len(stack) != 0: + s = stack[len(stack) - 1] + else: + s = ss + + # check if se have reached the starting point + if len(stack) == 0: + return visited + + # c is the count of nodes you want and if you leave it or pass -1 to the funtion the count + # will be random from 10 to 10000 + def fill_graph_randomly(self, c=-1): + if c == -1: + c = (math.floor(rand.random() * 10000)) + 10 + for _ in range(c): + # every vertex has max 100 edges + e = math.floor(rand.random() * 102) + 1 + for __ in range(e): + n = math.floor(rand.random() * (c)) + 1 + if n == _: + continue + self.add_pair(_, n, 1) + + def bfs(self, s=-2): + d = deque() + visited = [] + if s == -2: + s = list(self.graph.keys())[0] + d.append(s) + visited.append(s) + while d: + s = d.popleft() + if len(self.graph[s]) != 0: + for __ in self.graph[s]: + if visited.count(__[1]) < 1: + d.append(__[1]) + visited.append(__[1]) + return visited + + def degree(self, u): + return len(self.graph[u]) + + def cycle_nodes(self): + stack = [] + visited = [] + s = list(self.graph.keys())[0] + stack.append(s) + visited.append(s) + parent = -2 + indirect_parents = [] + ss = s + on_the_way_back = False + anticipating_nodes = set() + + while True: + # check if there is any non isolated nodes + if len(self.graph[s]) != 0: + ss = s + for __ in self.graph[s]: + if visited.count(__[1]) > 0 and __[1] != parent and indirect_parents.count(__[1]) > 0 and not on_the_way_back: + l = len(stack) - 1 + while True and l >= 0: + if stack[l] == __[1]: + anticipating_nodes.add(__[1]) + break + else: + anticipating_nodes.add(stack[l]) + l -= 1 + if visited.count(__[1]) < 1: + stack.append(__[1]) + visited.append(__[1]) + ss = __[1] + break + + # check if all the children are visited + if s == ss: + stack.pop() + on_the_way_back = True + if len(stack) != 0: + s = stack[len(stack) - 1] + else: + on_the_way_back = False + indirect_parents.append(parent) + parent = s + s = ss + + # check if se have reached the starting point + if len(stack) == 0: + return list(anticipating_nodes) + + def has_cycle(self): + stack = [] + visited = [] + s = list(self.graph.keys())[0] + stack.append(s) + visited.append(s) + parent = -2 + indirect_parents = [] + ss = s + on_the_way_back = False + anticipating_nodes = set() + + while True: + # check if there is any non isolated nodes + if len(self.graph[s]) != 0: + ss = s + for __ in self.graph[s]: + if visited.count(__[1]) > 0 and __[1] != parent and indirect_parents.count(__[1]) > 0 and not on_the_way_back: + l = len(stack) - 1 + while True and l >= 0: + if stack[l] == __[1]: + anticipating_nodes.add(__[1]) + break + else: + return True + anticipating_nodes.add(stack[l]) + l -= 1 + if visited.count(__[1]) < 1: + stack.append(__[1]) + visited.append(__[1]) + ss = __[1] + break + + # check if all the children are visited + if s == ss: + stack.pop() + on_the_way_back = True + if len(stack) != 0: + s = stack[len(stack) - 1] + else: + on_the_way_back = False + indirect_parents.append(parent) + parent = s + s = ss + + # check if se have reached the starting point + if len(stack) == 0: + return False + + def all_nodes(self): + return list(self.graph) + + def dfs_time(self, s=-2, e=-1): + begin = time.time() + self.dfs(s, e) + end = time.time() + return end - begin + + def bfs_time(self, s=-2): + begin = time.time() + self.bfs(s) + end = time.time() + return end - begin diff --git a/graphs/a_star.py b/graphs/a_star.py index 584222e6f62b..d5f636151640 100644 --- a/graphs/a_star.py +++ b/graphs/a_star.py @@ -1,7 +1,7 @@ from __future__ import print_function grid = [[0, 1, 0, 0, 0, 0], - [0, 1, 0, 0, 0, 0],#0 are free path whereas 1's are obstacles + [0, 1, 0, 0, 0, 0], # 0 are free path whereas 1's are obstacles [0, 1, 0, 0, 0, 0], [0, 1, 0, 0, 1, 0], [0, 0, 0, 0, 1, 0]] @@ -14,31 +14,29 @@ [5, 4, 3, 2, 1, 0]]''' init = [0, 0] -goal = [len(grid)-1, len(grid[0])-1] #all coordinates are given in format [y,x] +goal = [len(grid) - 1, len(grid[0]) - 1] # all coordinates are given in format [y,x] cost = 1 -#the cost map which pushes the path closer to the goal +# the cost map which pushes the path closer to the goal heuristic = [[0 for row in range(len(grid[0]))] for col in range(len(grid))] -for i in range(len(grid)): - for j in range(len(grid[0])): +for i in range(len(grid)): + for j in range(len(grid[0])): heuristic[i][j] = abs(i - goal[0]) + abs(j - goal[1]) if grid[i][j] == 1: - heuristic[i][j] = 99 #added extra penalty in the heuristic map + heuristic[i][j] = 99 # added extra penalty in the heuristic map +# the actions we can take +delta = [[-1, 0], # go up + [0, -1], # go left + [1, 0], # go down + [0, 1]] # go right -#the actions we can take -delta = [[-1, 0 ], # go up - [ 0, -1], # go left - [ 1, 0 ], # go down - [ 0, 1 ]] # go right - -#function to search the path -def search(grid,init,goal,cost,heuristic): - - closed = [[0 for col in range(len(grid[0]))] for row in range(len(grid))]# the referrence grid +# function to search the path +def search(grid, init, goal, cost, heuristic): + closed = [[0 for col in range(len(grid[0]))] for row in range(len(grid))] # the referrence grid closed[init[0]][init[1]] = 1 - action = [[0 for col in range(len(grid[0]))] for row in range(len(grid))]#the action grid + action = [[0 for col in range(len(grid[0]))] for row in range(len(grid))] # the action grid x = init[0] y = init[1] @@ -47,14 +45,14 @@ def search(grid,init,goal,cost,heuristic): cell = [[f, g, x, y]] found = False # flag that is set when search is complete - resign = False # flag set if we can't find expand + resign = False # flag set if we can't find expand while not found and not resign: if len(cell) == 0: resign = True return "FAIL" else: - cell.sort()#to choose the least costliest action so as to move closer to the goal + cell.sort() # to choose the least costliest action so as to move closer to the goal cell.reverse() next = cell.pop() x = next[2] @@ -62,14 +60,13 @@ def search(grid,init,goal,cost,heuristic): g = next[1] f = next[0] - if x == goal[0] and y == goal[1]: found = True else: - for i in range(len(delta)):#to try out different valid actions + for i in range(len(delta)): # to try out different valid actions x2 = x + delta[i][0] y2 = y + delta[i][1] - if x2 >= 0 and x2 < len(grid) and y2 >=0 and y2 < len(grid[0]): + if x2 >= 0 and x2 < len(grid) and y2 >= 0 and y2 < len(grid[0]): if closed[x2][y2] == 0 and grid[x2][y2] == 0: g2 = g + cost f2 = g2 + heuristic[x2][y2] @@ -79,7 +76,7 @@ def search(grid,init,goal,cost,heuristic): invpath = [] x = goal[0] y = goal[1] - invpath.append([x, y])#we get the reverse path from here + invpath.append([x, y]) # we get the reverse path from here while x != init[0] or y != init[1]: x2 = x - delta[action[x][y]][0] y2 = y - delta[action[x][y]][1] @@ -89,14 +86,14 @@ def search(grid,init,goal,cost,heuristic): path = [] for i in range(len(invpath)): - path.append(invpath[len(invpath) - 1 - i]) + path.append(invpath[len(invpath) - 1 - i]) print("ACTION MAP") for i in range(len(action)): print(action[i]) - + return path - -a = search(grid,init,goal,cost,heuristic) -for i in range(len(a)): - print(a[i]) + +a = search(grid, init, goal, cost, heuristic) +for i in range(len(a)): + print(a[i]) diff --git a/graphs/articulation_points.py b/graphs/articulation_points.py index 1173c4ea373c..897a8a874104 100644 --- a/graphs/articulation_points.py +++ b/graphs/articulation_points.py @@ -39,6 +39,7 @@ def dfs(root, at, parent, outEdgeCount): if isArt[x] == True: print(x) + # Adjacency list of graph -l = {0:[1,2], 1:[0,2], 2:[0,1,3,5], 3:[2,4], 4:[3], 5:[2,6,8], 6:[5,7], 7:[6,8], 8:[5,7]} +l = {0: [1, 2], 1: [0, 2], 2: [0, 1, 3, 5], 3: [2, 4], 4: [3], 5: [2, 6, 8], 6: [5, 7], 7: [6, 8], 8: [5, 7]} computeAP(l) diff --git a/graphs/basic_graphs.py b/graphs/basic_graphs.py index 3b3abeb1720d..97035d0d167e 100644 --- a/graphs/basic_graphs.py +++ b/graphs/basic_graphs.py @@ -83,7 +83,6 @@ def dfs(G, s): Q - Traveral Stack -------------------------------------------------------------------------------- """ -from collections import deque def bfs(G, s): diff --git a/graphs/bellman_ford.py b/graphs/bellman_ford.py index 82db80546b94..66fe0701eae5 100644 --- a/graphs/bellman_ford.py +++ b/graphs/bellman_ford.py @@ -1,54 +1,55 @@ from __future__ import print_function + def printDist(dist, V): - print("\nVertex Distance") - for i in range(V): - if dist[i] != float('inf') : - print(i,"\t",int(dist[i]),end = "\t") - else: - print(i,"\t","INF",end="\t") - print() + print("\nVertex Distance") + for i in range(V): + if dist[i] != float('inf'): + print(i, "\t", int(dist[i]), end="\t") + else: + print(i, "\t", "INF", end="\t") + print() + def BellmanFord(graph, V, E, src): - mdist=[float('inf') for i in range(V)] - mdist[src] = 0.0 - - for i in range(V-1): - for j in range(V): - u = graph[j]["src"] - v = graph[j]["dst"] - w = graph[j]["weight"] - - if mdist[u] != float('inf') and mdist[u] + w < mdist[v]: - mdist[v] = mdist[u] + w - for j in range(V): - u = graph[j]["src"] - v = graph[j]["dst"] - w = graph[j]["weight"] - - if mdist[u] != float('inf') and mdist[u] + w < mdist[v]: - print("Negative cycle found. Solution not possible.") - return - - printDist(mdist, V) - - - -#MAIN + mdist = [float('inf') for i in range(V)] + mdist[src] = 0.0 + + for i in range(V - 1): + for j in range(V): + u = graph[j]["src"] + v = graph[j]["dst"] + w = graph[j]["weight"] + + if mdist[u] != float('inf') and mdist[u] + w < mdist[v]: + mdist[v] = mdist[u] + w + for j in range(V): + u = graph[j]["src"] + v = graph[j]["dst"] + w = graph[j]["weight"] + + if mdist[u] != float('inf') and mdist[u] + w < mdist[v]: + print("Negative cycle found. Solution not possible.") + return + + printDist(mdist, V) + + +# MAIN V = int(input("Enter number of vertices: ")) E = int(input("Enter number of edges: ")) graph = [dict() for j in range(E)] for i in range(V): - graph[i][i] = 0.0 + graph[i][i] = 0.0 for i in range(E): - print("\nEdge ",i+1) - src = int(input("Enter source:")) - dst = int(input("Enter destination:")) - weight = float(input("Enter weight:")) - graph[i] = {"src": src,"dst": dst, "weight": weight} - + print("\nEdge ", i + 1) + src = int(input("Enter source:")) + dst = int(input("Enter destination:")) + weight = float(input("Enter weight:")) + graph[i] = {"src": src, "dst": dst, "weight": weight} + gsrc = int(input("\nEnter shortest path source:")) BellmanFord(graph, V, E, gsrc) diff --git a/graphs/bfs_shortest_path.py b/graphs/bfs_shortest_path.py index 5853351a53a3..ad44caf23c64 100644 --- a/graphs/bfs_shortest_path.py +++ b/graphs/bfs_shortest_path.py @@ -1,21 +1,22 @@ graph = {'A': ['B', 'C', 'E'], - 'B': ['A','D', 'E'], + 'B': ['A', 'D', 'E'], 'C': ['A', 'F', 'G'], 'D': ['B'], - 'E': ['A', 'B','D'], + 'E': ['A', 'B', 'D'], 'F': ['C'], 'G': ['C']} + def bfs_shortest_path(graph, start, goal): # keep track of explored nodes explored = [] # keep track of all the paths to be checked queue = [[start]] - + # return path if start is goal if start == goal: return "That was easy! Start = goal" - + # keeps looping until all possible paths have been checked while queue: # pop the first path from the queue @@ -33,11 +34,12 @@ def bfs_shortest_path(graph, start, goal): # return path if neighbour is goal if neighbour == goal: return new_path - + # mark node as explored explored.append(node) - + # in case there's no path between the 2 nodes return "So sorry, but a connecting path doesn't exist :(" - + + bfs_shortest_path(graph, 'G', 'D') # returns ['G', 'C', 'A', 'B', 'D'] diff --git a/graphs/breadth_first_search.py b/graphs/breadth_first_search.py index 3992e2d4d892..d4998bb5a33a 100644 --- a/graphs/breadth_first_search.py +++ b/graphs/breadth_first_search.py @@ -13,7 +13,7 @@ def __init__(self): # for printing the Graph vertexes def printGraph(self): for i in self.vertex.keys(): - print(i,' -> ', ' -> '.join([str(j) for j in self.vertex[i]])) + print(i, ' -> ', ' -> '.join([str(j) for j in self.vertex[i]])) # for adding the edge beween two vertexes def addEdge(self, fromVertex, toVertex): @@ -37,7 +37,7 @@ def BFS(self, startVertex): while queue: startVertex = queue.pop(0) - print(startVertex, end = ' ') + print(startVertex, end=' ') # mark all adjacent nodes as visited and print them for i in self.vertex[startVertex]: @@ -45,6 +45,7 @@ def BFS(self, startVertex): queue.append(i) visited[i] = True + if __name__ == '__main__': g = Graph() g.addEdge(0, 1) diff --git a/graphs/check_bipartite_graph_bfs.py b/graphs/check_bipartite_graph_bfs.py index 1b9c32c6ccc4..e175f68043a1 100644 --- a/graphs/check_bipartite_graph_bfs.py +++ b/graphs/check_bipartite_graph_bfs.py @@ -11,7 +11,7 @@ def checkBipartite(l): color = [-1] * len(l) def bfs(): - while(queue): + while (queue): u = queue.pop(0) visited[u] = True @@ -38,6 +38,7 @@ def bfs(): return True + # Adjacency List of graph -l = {0:[1,3], 1:[0,2], 2:[1,3], 3:[0,2]} +l = {0: [1, 3], 1: [0, 2], 2: [1, 3], 3: [0, 2]} print(checkBipartite(l)) diff --git a/graphs/check_bipartite_graph_dfs.py b/graphs/check_bipartite_graph_dfs.py index eeb3a84b7a15..6fe54a6723c5 100644 --- a/graphs/check_bipartite_graph_dfs.py +++ b/graphs/check_bipartite_graph_dfs.py @@ -26,8 +26,8 @@ def dfs(v, c): return False return True - + # Adjacency list of graph -l = {0:[1,3], 1:[0,2], 2:[1,3], 3:[0,2], 4: []} +l = {0: [1, 3], 1: [0, 2], 2: [1, 3], 3: [0, 2], 4: []} print(check_bipartite_dfs(l)) diff --git a/graphs/depth_first_search.py b/graphs/depth_first_search.py index 98faf61354f9..dd2c4224c8ae 100644 --- a/graphs/depth_first_search.py +++ b/graphs/depth_first_search.py @@ -13,7 +13,7 @@ def __init__(self): def printGraph(self): print(self.vertex) for i in self.vertex.keys(): - print(i,' -> ', ' -> '.join([str(j) for j in self.vertex[i]])) + print(i, ' -> ', ' -> '.join([str(j) for j in self.vertex[i]])) # for adding the edge beween two vertexes def addEdge(self, fromVertex, toVertex): @@ -37,13 +37,14 @@ def DFSRec(self, startVertex, visited): # mark start vertex as visited visited[startVertex] = True - print(startVertex, end = ' ') + print(startVertex, end=' ') # Recur for all the vertexes that are adjacent to this node for i in self.vertex.keys(): if visited[i] == False: self.DFSRec(i, visited) + if __name__ == '__main__': g = Graph() g.addEdge(0, 1) @@ -63,4 +64,4 @@ def DFSRec(self, startVertex, visited): # 2  ->  0 -> 3 # 3  ->  3 # DFS: - # 0 1 2 3 + #  0 1 2 3 diff --git a/graphs/dijkstra_2.py b/graphs/dijkstra_2.py index a6c340e8a68d..92fad8208113 100644 --- a/graphs/dijkstra_2.py +++ b/graphs/dijkstra_2.py @@ -1,57 +1,57 @@ from __future__ import print_function + def printDist(dist, V): - print("\nVertex Distance") - for i in range(V): - if dist[i] != float('inf') : - print(i,"\t",int(dist[i]),end = "\t") - else: - print(i,"\t","INF",end="\t") - print() + print("\nVertex Distance") + for i in range(V): + if dist[i] != float('inf'): + print(i, "\t", int(dist[i]), end="\t") + else: + print(i, "\t", "INF", end="\t") + print() + def minDist(mdist, vset, V): - minVal = float('inf') - minInd = -1 - for i in range(V): - if (not vset[i]) and mdist[i] < minVal : - minInd = i - minVal = mdist[i] - return minInd + minVal = float('inf') + minInd = -1 + for i in range(V): + if (not vset[i]) and mdist[i] < minVal: + minInd = i + minVal = mdist[i] + return minInd + def Dijkstra(graph, V, src): - mdist=[float('inf') for i in range(V)] - vset = [False for i in range(V)] - mdist[src] = 0.0 - - for i in range(V-1): - u = minDist(mdist, vset, V) - vset[u] = True - - for v in range(V): - if (not vset[v]) and graph[u][v]!=float('inf') and mdist[u] + graph[u][v] < mdist[v]: - mdist[v] = mdist[u] + graph[u][v] + mdist = [float('inf') for i in range(V)] + vset = [False for i in range(V)] + mdist[src] = 0.0 + + for i in range(V - 1): + u = minDist(mdist, vset, V) + vset[u] = True - + for v in range(V): + if (not vset[v]) and graph[u][v] != float('inf') and mdist[u] + graph[u][v] < mdist[v]: + mdist[v] = mdist[u] + graph[u][v] - printDist(mdist, V) + printDist(mdist, V) - -#MAIN +# MAIN V = int(input("Enter number of vertices: ")) E = int(input("Enter number of edges: ")) graph = [[float('inf') for i in range(V)] for j in range(V)] for i in range(V): - graph[i][i] = 0.0 + graph[i][i] = 0.0 for i in range(E): - print("\nEdge ",i+1) - src = int(input("Enter source:")) - dst = int(input("Enter destination:")) - weight = float(input("Enter weight:")) - graph[src][dst] = weight + print("\nEdge ", i + 1) + src = int(input("Enter source:")) + dst = int(input("Enter destination:")) + weight = float(input("Enter weight:")) + graph[src][dst] = weight gsrc = int(input("\nEnter shortest path source:")) Dijkstra(graph, V, gsrc) diff --git a/graphs/dijkstra_algorithm.py b/graphs/dijkstra_algorithm.py index 985c7f6c1301..3de9235f55ae 100644 --- a/graphs/dijkstra_algorithm.py +++ b/graphs/dijkstra_algorithm.py @@ -3,8 +3,11 @@ # References: https://en.wikipedia.org/wiki/Dijkstra%27s_algorithm from __future__ import print_function + import math import sys + + # For storing the vertex set to retreive node with the lowest distance @@ -13,7 +16,7 @@ class PriorityQueue: def __init__(self): self.cur_size = 0 self.array = [] - self.pos = {} # To store the pos of node in array + self.pos = {} # To store the pos of node in array def isEmpty(self): return self.cur_size == 0 @@ -79,8 +82,8 @@ def decrease_key(self, tup, new_d): class Graph: def __init__(self, num): - self.adjList = {} # To store graph: u -> (v,w) - self.num_nodes = num # Number of nodes in graph + self.adjList = {} # To store graph: u -> (v,w) + self.num_nodes = num # Number of nodes in graph # To store the distance from source vertex self.dist = [0] * self.num_nodes self.par = [-1] * self.num_nodes # To store the path diff --git a/graphs/edmonds_karp_multiple_source_and_sink.py b/graphs/edmonds_karp_multiple_source_and_sink.py index d231ac2c4cc3..92b87bd41353 100644 --- a/graphs/edmonds_karp_multiple_source_and_sink.py +++ b/graphs/edmonds_karp_multiple_source_and_sink.py @@ -28,14 +28,13 @@ def _normalizeGraph(self, sources, sinks): for i in sources: maxInputFlow += sum(self.graph[i]) - size = len(self.graph) + 1 for room in self.graph: room.insert(0, 0) self.graph.insert(0, [0] * size) for i in sources: self.graph[0][i + 1] = maxInputFlow - self.sourceIndex = 0 + self.sourceIndex = 0 size = len(self.graph) + 1 for room in self.graph: @@ -45,7 +44,6 @@ def _normalizeGraph(self, sources, sinks): self.graph[i + 1][size - 1] = maxInputFlow self.sinkIndex = size - 1 - def findMaximumFlow(self): if self.maximumFlowAlgorithm is None: raise Exception("You need to set maximum flow algorithm before.") @@ -80,7 +78,6 @@ def _algorithm(self): pass - class MaximumFlowAlgorithmExecutor(FlowNetworkAlgorithmExecutor): def __init__(self, flowNetwork): super(MaximumFlowAlgorithmExecutor, self).__init__(flowNetwork) @@ -93,6 +90,7 @@ def getMaximumFlow(self): return self.maximumFlow + class PushRelabelExecutor(MaximumFlowAlgorithmExecutor): def __init__(self, flowNetwork): super(PushRelabelExecutor, self).__init__(flowNetwork) @@ -135,7 +133,7 @@ def processVertex(self, vertexIndex): while self.excesses[vertexIndex] > 0: for neighbourIndex in range(self.verticesCount): # if it's neighbour and current vertex is higher - if self.graph[vertexIndex][neighbourIndex] - self.preflow[vertexIndex][neighbourIndex] > 0\ + if self.graph[vertexIndex][neighbourIndex] - self.preflow[vertexIndex][neighbourIndex] > 0 \ and self.heights[vertexIndex] > self.heights[neighbourIndex]: self.push(vertexIndex, neighbourIndex) @@ -159,6 +157,7 @@ def relabel(self, vertexIndex): if minHeight is not None: self.heights[vertexIndex] = minHeight + 1 + if __name__ == '__main__': entrances = [0] exits = [3] diff --git a/graphs/even_tree.py b/graphs/even_tree.py index 9383ea9a13c1..18e3d82054d7 100644 --- a/graphs/even_tree.py +++ b/graphs/even_tree.py @@ -13,6 +13,7 @@ components containing an even number of nodes. """ from __future__ import print_function + # pylint: disable=invalid-name from collections import defaultdict diff --git a/graphs/finding_bridges.py b/graphs/finding_bridges.py index 56533dd48bde..da1c0e0daff4 100644 --- a/graphs/finding_bridges.py +++ b/graphs/finding_bridges.py @@ -1,7 +1,7 @@ # Finding Bridges in Undirected Graph def computeBridges(l): id = 0 - n = len(l) # No of vertices in graph + n = len(l) # No of vertices in graph low = [0] * n visited = [False] * n @@ -26,6 +26,7 @@ def dfs(at, parent, bridges, id): if (not visited[i]): dfs(i, -1, bridges, id) print(bridges) - -l = {0:[1,2], 1:[0,2], 2:[0,1,3,5], 3:[2,4], 4:[3], 5:[2,6,8], 6:[5,7], 7:[6,8], 8:[5,7]} + + +l = {0: [1, 2], 1: [0, 2], 2: [0, 1, 3, 5], 3: [2, 4], 4: [3], 5: [2, 6, 8], 6: [5, 7], 7: [6, 8], 8: [5, 7]} computeBridges(l) diff --git a/graphs/floyd_warshall.py b/graphs/floyd_warshall.py index fae8b19b351a..4a2b90fcf00c 100644 --- a/graphs/floyd_warshall.py +++ b/graphs/floyd_warshall.py @@ -1,48 +1,47 @@ from __future__ import print_function -def printDist(dist, V): - print("\nThe shortest path matrix using Floyd Warshall algorithm\n") - for i in range(V): - for j in range(V): - if dist[i][j] != float('inf') : - print(int(dist[i][j]),end = "\t") - else: - print("INF",end="\t") - print() +def printDist(dist, V): + print("\nThe shortest path matrix using Floyd Warshall algorithm\n") + for i in range(V): + for j in range(V): + if dist[i][j] != float('inf'): + print(int(dist[i][j]), end="\t") + else: + print("INF", end="\t") + print() def FloydWarshall(graph, V): - dist=[[float('inf') for i in range(V)] for j in range(V)] - - for i in range(V): - for j in range(V): - dist[i][j] = graph[i][j] + dist = [[float('inf') for i in range(V)] for j in range(V)] + + for i in range(V): + for j in range(V): + dist[i][j] = graph[i][j] - for k in range(V): - for i in range(V): - for j in range(V): - if dist[i][k]!=float('inf') and dist[k][j]!=float('inf') and dist[i][k]+dist[k][j] < dist[i][j]: - dist[i][j] = dist[i][k] + dist[k][j] + for k in range(V): + for i in range(V): + for j in range(V): + if dist[i][k] != float('inf') and dist[k][j] != float('inf') and dist[i][k] + dist[k][j] < dist[i][j]: + dist[i][j] = dist[i][k] + dist[k][j] - printDist(dist, V) + printDist(dist, V) - -#MAIN +# MAIN V = int(input("Enter number of vertices: ")) E = int(input("Enter number of edges: ")) graph = [[float('inf') for i in range(V)] for j in range(V)] for i in range(V): - graph[i][i] = 0.0 + graph[i][i] = 0.0 for i in range(E): - print("\nEdge ",i+1) - src = int(input("Enter source:")) - dst = int(input("Enter destination:")) - weight = float(input("Enter weight:")) - graph[src][dst] = weight + print("\nEdge ", i + 1) + src = int(input("Enter source:")) + dst = int(input("Enter destination:")) + weight = float(input("Enter weight:")) + graph[src][dst] = weight FloydWarshall(graph, V) diff --git a/graphs/graph_list.py b/graphs/graph_list.py index 0c981c39d320..bf7866c7fa99 100644 --- a/graphs/graph_list.py +++ b/graphs/graph_list.py @@ -2,6 +2,8 @@ # encoding=utf8 from __future__ import print_function + + # Author: OMKAR PATHAK # We can use Python's dictionary for constructing the graph. @@ -18,8 +20,9 @@ def addEdge(self, fromVertex, toVertex): self.List[fromVertex] = [toVertex] def printList(self): - for i in self.List: - print((i,'->',' -> '.join([str(j) for j in self.List[i]]))) + for i in self.List: + print((i, '->', ' -> '.join([str(j) for j in self.List[i]]))) + if __name__ == '__main__': al = AdjacencyList() diff --git a/graphs/graph_matrix.py b/graphs/graph_matrix.py index de25301d6dd1..e75bb379542a 100644 --- a/graphs/graph_matrix.py +++ b/graphs/graph_matrix.py @@ -5,7 +5,7 @@ class Graph: def __init__(self, vertex): self.vertex = vertex - self.graph = [[0] * vertex for i in range(vertex) ] + self.graph = [[0] * vertex for i in range(vertex)] def add_edge(self, u, v): self.graph[u - 1][v - 1] = 1 @@ -19,14 +19,11 @@ def show(self): print(' ') - - g = Graph(100) -g.add_edge(1,4) -g.add_edge(4,2) -g.add_edge(4,5) -g.add_edge(2,5) -g.add_edge(5,3) +g.add_edge(1, 4) +g.add_edge(4, 2) +g.add_edge(4, 5) +g.add_edge(2, 5) +g.add_edge(5, 3) g.show() - diff --git a/graphs/kahns_algorithm_long.py b/graphs/kahns_algorithm_long.py index 453b5706f6da..62601da0ca8f 100644 --- a/graphs/kahns_algorithm_long.py +++ b/graphs/kahns_algorithm_long.py @@ -12,19 +12,20 @@ def longestDistance(l): if indegree[i] == 0: queue.append(i) - while(queue): + while (queue): vertex = queue.pop(0) for x in l[vertex]: indegree[x] -= 1 if longDist[vertex] + 1 > longDist[x]: - longDist[x] = longDist[vertex] + 1 + longDist[x] = longDist[vertex] + 1 if indegree[x] == 0: queue.append(x) print(max(longDist)) + # Adjacency list of Graph -l = {0:[2,3,4], 1:[2,7], 2:[5], 3:[5,7], 4:[7], 5:[6], 6:[7], 7:[]} +l = {0: [2, 3, 4], 1: [2, 7], 2: [5], 3: [5, 7], 4: [7], 5: [6], 6: [7], 7: []} longestDistance(l) diff --git a/graphs/kahns_algorithm_topo.py b/graphs/kahns_algorithm_topo.py index 8c182c4e902c..daa17204f194 100644 --- a/graphs/kahns_algorithm_topo.py +++ b/graphs/kahns_algorithm_topo.py @@ -13,7 +13,7 @@ def topologicalSort(l): if indegree[i] == 0: queue.append(i) - while(queue): + while (queue): vertex = queue.pop(0) cnt += 1 topo.append(vertex) @@ -27,6 +27,7 @@ def topologicalSort(l): else: print(topo) + # Adjacency List of Graph -l = {0:[1,2], 1:[3], 2:[3], 3:[4,5], 4:[], 5:[]} +l = {0: [1, 2], 1: [3], 2: [3], 3: [4, 5], 4: [], 5: []} topologicalSort(l) diff --git a/graphs/minimum_spanning_tree_kruskal.py b/graphs/minimum_spanning_tree_kruskal.py index 81d64f421a31..a4596b8cbcd1 100644 --- a/graphs/minimum_spanning_tree_kruskal.py +++ b/graphs/minimum_spanning_tree_kruskal.py @@ -1,32 +1,35 @@ from __future__ import print_function -num_nodes, num_edges = list(map(int,input().split())) + +num_nodes, num_edges = list(map(int, input().split())) edges = [] for i in range(num_edges): - node1, node2, cost = list(map(int,input().split())) - edges.append((i,node1,node2,cost)) + node1, node2, cost = list(map(int, input().split())) + edges.append((i, node1, node2, cost)) edges = sorted(edges, key=lambda edge: edge[3]) parent = [i for i in range(num_nodes)] + def find_parent(i): - if(i != parent[i]): - parent[i] = find_parent(parent[i]) - return parent[i] + if (i != parent[i]): + parent[i] = find_parent(parent[i]) + return parent[i] + minimum_spanning_tree_cost = 0 minimum_spanning_tree = [] for edge in edges: - parent_a = find_parent(edge[1]) - parent_b = find_parent(edge[2]) - if(parent_a != parent_b): - minimum_spanning_tree_cost += edge[3] - minimum_spanning_tree.append(edge) - parent[parent_a] = parent_b + parent_a = find_parent(edge[1]) + parent_b = find_parent(edge[2]) + if (parent_a != parent_b): + minimum_spanning_tree_cost += edge[3] + minimum_spanning_tree.append(edge) + parent[parent_a] = parent_b print(minimum_spanning_tree_cost) for edge in minimum_spanning_tree: - print(edge) + print(edge) diff --git a/graphs/minimum_spanning_tree_prims.py b/graphs/minimum_spanning_tree_prims.py index 7b1ad0e743f7..943376bd1593 100644 --- a/graphs/minimum_spanning_tree_prims.py +++ b/graphs/minimum_spanning_tree_prims.py @@ -1,9 +1,10 @@ import sys from collections import defaultdict -def PrimsAlgorithm(l): +def PrimsAlgorithm(l): nodePosition = [] + def getPosition(vertex): return nodePosition[vertex] @@ -36,11 +37,11 @@ def topToBottom(heap, start, size, positions): def bottomToTop(val, index, heap, position): temp = position[index] - while(index != 0): + while (index != 0): if index % 2 == 0: - parent = int( (index-2) / 2 ) + parent = int((index - 2) / 2) else: - parent = int( (index-1) / 2 ) + parent = int((index - 1) / 2) if val < heap[parent]: heap[index] = heap[parent] @@ -69,9 +70,9 @@ def deleteMinimum(heap, positions): return temp visited = [0 for i in range(len(l))] - Nbr_TV = [-1 for i in range(len(l))] # Neighboring Tree Vertex of selected vertex + Nbr_TV = [-1 for i in range(len(l))] # Neighboring Tree Vertex of selected vertex # Minimum Distance of explored vertex with neighboring vertex of partial tree formed in graph - Distance_TV = [] # Heap of Distance of vertices from their neighboring vertex + Distance_TV = [] # Heap of Distance of vertices from their neighboring vertex Positions = [] for x in range(len(l)): @@ -84,8 +85,8 @@ def deleteMinimum(heap, positions): visited[0] = 1 Distance_TV[0] = sys.maxsize for x in l[0]: - Nbr_TV[ x[0] ] = 0 - Distance_TV[ x[0] ] = x[1] + Nbr_TV[x[0]] = 0 + Distance_TV[x[0]] = x[1] heapify(Distance_TV, Positions) for i in range(1, len(l)): @@ -94,18 +95,19 @@ def deleteMinimum(heap, positions): TreeEdges.append((Nbr_TV[vertex], vertex)) visited[vertex] = 1 for v in l[vertex]: - if visited[v[0]] == 0 and v[1] < Distance_TV[ getPosition(v[0]) ]: - Distance_TV[ getPosition(v[0]) ] = v[1] + if visited[v[0]] == 0 and v[1] < Distance_TV[getPosition(v[0])]: + Distance_TV[getPosition(v[0])] = v[1] bottomToTop(v[1], getPosition(v[0]), Distance_TV, Positions) - Nbr_TV[ v[0] ] = vertex + Nbr_TV[v[0]] = vertex return TreeEdges + # < --------- Prims Algorithm --------- > n = int(input("Enter number of vertices: ")) e = int(input("Enter number of edges: ")) adjlist = defaultdict(list) for x in range(e): l = [int(x) for x in input().split()] - adjlist[l[0]].append([ l[1], l[2] ]) - adjlist[l[1]].append([ l[0], l[2] ]) + adjlist[l[0]].append([l[1], l[2]]) + adjlist[l[1]].append([l[0], l[2]]) print(PrimsAlgorithm(adjlist)) diff --git a/graphs/multi_hueristic_astar.py b/graphs/multi_hueristic_astar.py index 1acd098f327d..ffd2b0f9f418 100644 --- a/graphs/multi_hueristic_astar.py +++ b/graphs/multi_hueristic_astar.py @@ -1,266 +1,276 @@ from __future__ import print_function + import heapq + import numpy as np try: - xrange # Python 2 + xrange # Python 2 except NameError: xrange = range # Python 3 class PriorityQueue: - def __init__(self): - self.elements = [] - self.set = set() - - def minkey(self): - if not self.empty(): - return self.elements[0][0] - else: - return float('inf') - - def empty(self): - return len(self.elements) == 0 - - def put(self, item, priority): - if item not in self.set: - heapq.heappush(self.elements, (priority, item)) - self.set.add(item) - else: - # update - # print("update", item) - temp = [] - (pri, x) = heapq.heappop(self.elements) - while x != item: - temp.append((pri, x)) - (pri, x) = heapq.heappop(self.elements) - temp.append((priority, item)) - for (pro, xxx) in temp: - heapq.heappush(self.elements, (pro, xxx)) - - def remove_element(self, item): - if item in self.set: - self.set.remove(item) - temp = [] - (pro, x) = heapq.heappop(self.elements) - while x != item: - temp.append((pro, x)) - (pro, x) = heapq.heappop(self.elements) - for (prito, yyy) in temp: - heapq.heappush(self.elements, (prito, yyy)) - - def top_show(self): - return self.elements[0][1] - - def get(self): - (priority, item) = heapq.heappop(self.elements) - self.set.remove(item) - return (priority, item) + def __init__(self): + self.elements = [] + self.set = set() + + def minkey(self): + if not self.empty(): + return self.elements[0][0] + else: + return float('inf') + + def empty(self): + return len(self.elements) == 0 + + def put(self, item, priority): + if item not in self.set: + heapq.heappush(self.elements, (priority, item)) + self.set.add(item) + else: + # update + # print("update", item) + temp = [] + (pri, x) = heapq.heappop(self.elements) + while x != item: + temp.append((pri, x)) + (pri, x) = heapq.heappop(self.elements) + temp.append((priority, item)) + for (pro, xxx) in temp: + heapq.heappush(self.elements, (pro, xxx)) + + def remove_element(self, item): + if item in self.set: + self.set.remove(item) + temp = [] + (pro, x) = heapq.heappop(self.elements) + while x != item: + temp.append((pro, x)) + (pro, x) = heapq.heappop(self.elements) + for (prito, yyy) in temp: + heapq.heappush(self.elements, (prito, yyy)) + + def top_show(self): + return self.elements[0][1] + + def get(self): + (priority, item) = heapq.heappop(self.elements) + self.set.remove(item) + return (priority, item) + def consistent_hueristic(P, goal): - # euclidean distance - a = np.array(P) - b = np.array(goal) - return np.linalg.norm(a - b) + # euclidean distance + a = np.array(P) + b = np.array(goal) + return np.linalg.norm(a - b) + def hueristic_2(P, goal): - # integer division by time variable - return consistent_hueristic(P, goal) // t + # integer division by time variable + return consistent_hueristic(P, goal) // t + def hueristic_1(P, goal): - # manhattan distance - return abs(P[0] - goal[0]) + abs(P[1] - goal[1]) + # manhattan distance + return abs(P[0] - goal[0]) + abs(P[1] - goal[1]) + def key(start, i, goal, g_function): - ans = g_function[start] + W1 * hueristics[i](start, goal) - return ans - + ans = g_function[start] + W1 * hueristics[i](start, goal) + return ans + + def do_something(back_pointer, goal, start): - grid = np.chararray((n, n)) - for i in range(n): - for j in range(n): - grid[i][j] = '*' - - for i in range(n): - for j in range(n): - if (j, (n-1)-i) in blocks: - grid[i][j] = "#" - - grid[0][(n-1)] = "-" - x = back_pointer[goal] - while x != start: - (x_c, y_c) = x - # print(x) - grid[(n-1)-y_c][x_c] = "-" - x = back_pointer[x] - grid[(n-1)][0] = "-" - - - for i in xrange(n): - for j in range(n): - if (i, j) == (0, n-1): - print(grid[i][j], end=' ') - print("<-- End position", end=' ') - else: - print(grid[i][j], end=' ') - print() - print("^") - print("Start position") - print() - print("# is an obstacle") - print("- is the path taken by algorithm") - print("PATH TAKEN BY THE ALGORITHM IS:-") - x = back_pointer[goal] - while x != start: - print(x, end=' ') - x = back_pointer[x] - print(x) - quit() + grid = np.chararray((n, n)) + for i in range(n): + for j in range(n): + grid[i][j] = '*' + + for i in range(n): + for j in range(n): + if (j, (n - 1) - i) in blocks: + grid[i][j] = "#" + + grid[0][(n - 1)] = "-" + x = back_pointer[goal] + while x != start: + (x_c, y_c) = x + # print(x) + grid[(n - 1) - y_c][x_c] = "-" + x = back_pointer[x] + grid[(n - 1)][0] = "-" + + for i in xrange(n): + for j in range(n): + if (i, j) == (0, n - 1): + print(grid[i][j], end=' ') + print("<-- End position", end=' ') + else: + print(grid[i][j], end=' ') + print() + print("^") + print("Start position") + print() + print("# is an obstacle") + print("- is the path taken by algorithm") + print("PATH TAKEN BY THE ALGORITHM IS:-") + x = back_pointer[goal] + while x != start: + print(x, end=' ') + x = back_pointer[x] + print(x) + quit() + def valid(p): - if p[0] < 0 or p[0] > n-1: - return False - if p[1] < 0 or p[1] > n-1: - return False - return True - -def expand_state(s, j, visited, g_function, close_list_anchor, close_list_inad, open_list, back_pointer): - for itera in range(n_hueristic): - open_list[itera].remove_element(s) - # print("s", s) - # print("j", j) - (x, y) = s - left = (x-1, y) - right = (x+1, y) - up = (x, y+1) - down = (x, y-1) - - for neighbours in [left, right, up, down]: - if neighbours not in blocks: - if valid(neighbours) and neighbours not in visited: - # print("neighbour", neighbours) - visited.add(neighbours) - back_pointer[neighbours] = -1 - g_function[neighbours] = float('inf') - - if valid(neighbours) and g_function[neighbours] > g_function[s] + 1: - g_function[neighbours] = g_function[s] + 1 - back_pointer[neighbours] = s - if neighbours not in close_list_anchor: - open_list[0].put(neighbours, key(neighbours, 0, goal, g_function)) - if neighbours not in close_list_inad: - for var in range(1,n_hueristic): - if key(neighbours, var, goal, g_function) <= W2 * key(neighbours, 0, goal, g_function): - # print("why not plssssssssss") - open_list[j].put(neighbours, key(neighbours, var, goal, g_function)) - - - # print + if p[0] < 0 or p[0] > n - 1: + return False + if p[1] < 0 or p[1] > n - 1: + return False + return True + + +def expand_state(s, j, visited, g_function, close_list_anchor, close_list_inad, open_list, back_pointer): + for itera in range(n_hueristic): + open_list[itera].remove_element(s) + # print("s", s) + # print("j", j) + (x, y) = s + left = (x - 1, y) + right = (x + 1, y) + up = (x, y + 1) + down = (x, y - 1) + + for neighbours in [left, right, up, down]: + if neighbours not in blocks: + if valid(neighbours) and neighbours not in visited: + # print("neighbour", neighbours) + visited.add(neighbours) + back_pointer[neighbours] = -1 + g_function[neighbours] = float('inf') + + if valid(neighbours) and g_function[neighbours] > g_function[s] + 1: + g_function[neighbours] = g_function[s] + 1 + back_pointer[neighbours] = s + if neighbours not in close_list_anchor: + open_list[0].put(neighbours, key(neighbours, 0, goal, g_function)) + if neighbours not in close_list_inad: + for var in range(1, n_hueristic): + if key(neighbours, var, goal, g_function) <= W2 * key(neighbours, 0, goal, g_function): + # print("why not plssssssssss") + open_list[j].put(neighbours, key(neighbours, var, goal, g_function)) + + +# print def make_common_ground(): - some_list = [] - # block 1 - for x in range(1, 5): - for y in range(1, 6): - some_list.append((x, y)) - - # line - for x in range(15, 20): - some_list.append((x, 17)) - - # block 2 big - for x in range(10, 19): - for y in range(1, 15): - some_list.append((x, y)) - - # L block - for x in range(1, 4): - for y in range(12, 19): - some_list.append((x, y)) - for x in range(3, 13): - for y in range(16, 19): - some_list.append((x, y)) - return some_list + some_list = [] + # block 1 + for x in range(1, 5): + for y in range(1, 6): + some_list.append((x, y)) -hueristics = {0: consistent_hueristic, 1: hueristic_1, 2: hueristic_2} + # line + for x in range(15, 20): + some_list.append((x, 17)) -blocks_blk = [(0, 1),(1, 1),(2, 1),(3, 1),(4, 1),(5, 1),(6, 1),(7, 1),(8, 1),(9, 1),(10, 1),(11, 1),(12, 1),(13, 1),(14, 1),(15, 1),(16, 1),(17, 1),(18, 1), (19, 1)] -blocks_no = [] -blocks_all = make_common_ground() + # block 2 big + for x in range(10, 19): + for y in range(1, 15): + some_list.append((x, y)) + + # L block + for x in range(1, 4): + for y in range(12, 19): + some_list.append((x, y)) + for x in range(3, 13): + for y in range(16, 19): + some_list.append((x, y)) + return some_list +hueristics = {0: consistent_hueristic, 1: hueristic_1, 2: hueristic_2} +blocks_blk = [(0, 1), (1, 1), (2, 1), (3, 1), (4, 1), (5, 1), (6, 1), (7, 1), (8, 1), (9, 1), (10, 1), (11, 1), (12, 1), (13, 1), (14, 1), (15, 1), (16, 1), (17, 1), (18, 1), (19, 1)] +blocks_no = [] +blocks_all = make_common_ground() blocks = blocks_blk # hyper parameters W1 = 1 W2 = 1 n = 20 -n_hueristic = 3 # one consistent and two other inconsistent +n_hueristic = 3 # one consistent and two other inconsistent # start and end destination start = (0, 0) -goal = (n-1, n-1) +goal = (n - 1, n - 1) t = 1 + + def multi_a_star(start, goal, n_hueristic): - g_function = {start: 0, goal: float('inf')} - back_pointer = {start:-1, goal:-1} - open_list = [] - visited = set() - - for i in range(n_hueristic): - open_list.append(PriorityQueue()) - open_list[i].put(start, key(start, i, goal, g_function)) - - close_list_anchor = [] - close_list_inad = [] - while open_list[0].minkey() < float('inf'): - for i in range(1, n_hueristic): - # print("i", i) - # print(open_list[0].minkey(), open_list[i].minkey()) - if open_list[i].minkey() <= W2 * open_list[0].minkey(): - global t - t += 1 - # print("less prio") - if g_function[goal] <= open_list[i].minkey(): - if g_function[goal] < float('inf'): - do_something(back_pointer, goal, start) - else: - _, get_s = open_list[i].top_show() - visited.add(get_s) - expand_state(get_s, i, visited, g_function, close_list_anchor, close_list_inad, open_list, back_pointer) - close_list_inad.append(get_s) - else: - # print("more prio") - if g_function[goal] <= open_list[0].minkey(): - if g_function[goal] < float('inf'): - do_something(back_pointer, goal, start) - else: - # print("hoolla") - get_s = open_list[0].top_show() - visited.add(get_s) - expand_state(get_s, 0, visited, g_function, close_list_anchor, close_list_inad, open_list, back_pointer) - close_list_anchor.append(get_s) - print("No path found to goal") - print() - for i in range(n-1,-1, -1): - for j in range(n): - if (j, i) in blocks: - print('#', end=' ') - elif (j, i) in back_pointer: - if (j, i) == (n-1, n-1): - print('*', end=' ') - else: - print('-', end=' ') - else: - print('*', end=' ') - if (j, i) == (n-1, n-1): - print('<-- End position', end=' ') - print() - print("^") - print("Start position") - print() - print("# is an obstacle") - print("- is the path taken by algorithm") + g_function = {start: 0, goal: float('inf')} + back_pointer = {start: -1, goal: -1} + open_list = [] + visited = set() + + for i in range(n_hueristic): + open_list.append(PriorityQueue()) + open_list[i].put(start, key(start, i, goal, g_function)) + + close_list_anchor = [] + close_list_inad = [] + while open_list[0].minkey() < float('inf'): + for i in range(1, n_hueristic): + # print("i", i) + # print(open_list[0].minkey(), open_list[i].minkey()) + if open_list[i].minkey() <= W2 * open_list[0].minkey(): + global t + t += 1 + # print("less prio") + if g_function[goal] <= open_list[i].minkey(): + if g_function[goal] < float('inf'): + do_something(back_pointer, goal, start) + else: + _, get_s = open_list[i].top_show() + visited.add(get_s) + expand_state(get_s, i, visited, g_function, close_list_anchor, close_list_inad, open_list, back_pointer) + close_list_inad.append(get_s) + else: + # print("more prio") + if g_function[goal] <= open_list[0].minkey(): + if g_function[goal] < float('inf'): + do_something(back_pointer, goal, start) + else: + # print("hoolla") + get_s = open_list[0].top_show() + visited.add(get_s) + expand_state(get_s, 0, visited, g_function, close_list_anchor, close_list_inad, open_list, back_pointer) + close_list_anchor.append(get_s) + print("No path found to goal") + print() + for i in range(n - 1, -1, -1): + for j in range(n): + if (j, i) in blocks: + print('#', end=' ') + elif (j, i) in back_pointer: + if (j, i) == (n - 1, n - 1): + print('*', end=' ') + else: + print('-', end=' ') + else: + print('*', end=' ') + if (j, i) == (n - 1, n - 1): + print('<-- End position', end=' ') + print() + print("^") + print("Start position") + print() + print("# is an obstacle") + print("- is the path taken by algorithm") + + multi_a_star(start, goal, n_hueristic) diff --git a/graphs/page_rank.py b/graphs/page_rank.py index 59f15a99e6b2..3020c965c7ca 100644 --- a/graphs/page_rank.py +++ b/graphs/page_rank.py @@ -12,8 +12,8 @@ ''' graph = [[0, 1, 1], - [0, 0, 1], - [1, 0, 0]] + [0, 0, 1], + [1, 0, 0]] class Node: @@ -21,17 +21,17 @@ def __init__(self, name): self.name = name self.inbound = [] self.outbound = [] - + def add_inbound(self, node): self.inbound.append(node) - + def add_outbound(self, node): self.outbound.append(node) - + def __repr__(self): return 'Node {}: Inbound: {} ; Outbound: {}'.format(self.name, - self.inbound, - self.outbound) + self.inbound, + self.outbound) def page_rank(nodes, limit=3, d=0.85): @@ -44,9 +44,9 @@ def page_rank(nodes, limit=3, d=0.85): outbounds[node.name] = len(node.outbound) for i in range(limit): - print("======= Iteration {} =======".format(i+1)) + print("======= Iteration {} =======".format(i + 1)) for j, node in enumerate(nodes): - ranks[node.name] = (1 - d) + d * sum([ ranks[ib]/outbounds[ib] for ib in node.inbound ]) + ranks[node.name] = (1 - d) + d * sum([ranks[ib] / outbounds[ib] for ib in node.inbound]) print(ranks) @@ -54,7 +54,7 @@ def main(): names = list(input('Enter Names of the Nodes: ').split()) nodes = [Node(name) for name in names] - + for ri, row in enumerate(graph): for ci, col in enumerate(row): if col == 1: @@ -69,4 +69,4 @@ def main(): if __name__ == '__main__': - main() \ No newline at end of file + main() diff --git a/graphs/prim.py b/graphs/prim.py index f7e08278966d..38d9d9edca42 100644 --- a/graphs/prim.py +++ b/graphs/prim.py @@ -76,4 +76,4 @@ def prim(graph, root): v.key = u.edges[v.id] for i in range(1, len(graph)): A.append([graph[i].id, graph[i].pi.id]) - return(A) + return (A) diff --git a/graphs/scc_kosaraju.py b/graphs/scc_kosaraju.py index 1f13ebaba36b..2f56ce70acb7 100644 --- a/graphs/scc_kosaraju.py +++ b/graphs/scc_kosaraju.py @@ -1,20 +1,22 @@ from __future__ import print_function + # n - no of nodes, m - no of edges -n, m = list(map(int,input().split())) +n, m = list(map(int, input().split())) -g = [[] for i in range(n)] #graph -r = [[] for i in range(n)] #reversed graph +g = [[] for i in range(n)] # graph +r = [[] for i in range(n)] # reversed graph # input graph data (edges) for i in range(m): - u, v = list(map(int,input().split())) + u, v = list(map(int, input().split())) g[u].append(v) r[v].append(u) stack = [] -visit = [False]*n +visit = [False] * n scc = [] component = [] + def dfs(u): global g, r, scc, component, visit, stack if visit[u]: return @@ -23,6 +25,7 @@ def dfs(u): dfs(v) stack.append(u) + def dfs2(u): global g, r, scc, component, visit, stack if visit[u]: return @@ -31,11 +34,12 @@ def dfs2(u): for v in r[u]: dfs2(v) + def kosaraju(): global g, r, scc, component, visit, stack for i in range(n): dfs(i) - visit = [False]*n + visit = [False] * n for i in stack[::-1]: if visit[i]: continue component = [] @@ -43,4 +47,5 @@ def kosaraju(): scc.append(component) return scc + print(kosaraju()) diff --git a/hashes/chaos_machine.py b/hashes/chaos_machine.py index f0a305bfeade..a7995e0c02ad 100644 --- a/hashes/chaos_machine.py +++ b/hashes/chaos_machine.py @@ -2,12 +2,14 @@ from __future__ import print_function try: - input = raw_input # Python 2 + input = raw_input # Python 2 except NameError: - pass # Python 3 + pass # Python 3 # Chaos Machine (K, t, m) -K = [0.33, 0.44, 0.55, 0.44, 0.33]; t = 3; m = 5 +K = [0.33, 0.44, 0.55, 0.44, 0.33]; +t = 3; +m = 5 # Buffer Space (with Parameters Space) buffer_space, params_space = [], [] @@ -15,75 +17,80 @@ # Machine Time machine_time = 0 + def push(seed): - global buffer_space, params_space, machine_time, \ - K, m, t + global buffer_space, params_space, machine_time, \ + K, m, t + + # Choosing Dynamical Systems (All) + for key, value in enumerate(buffer_space): + # Evolution Parameter + e = float(seed / value) - # Choosing Dynamical Systems (All) - for key, value in enumerate(buffer_space): - # Evolution Parameter - e = float(seed / value) + # Control Theory: Orbit Change + value = (buffer_space[(key + 1) % m] + e) % 1 - # Control Theory: Orbit Change - value = (buffer_space[(key + 1) % m] + e) % 1 + # Control Theory: Trajectory Change + r = (params_space[key] + e) % 1 + 3 - # Control Theory: Trajectory Change - r = (params_space[key] + e) % 1 + 3 + # Modification (Transition Function) - Jumps + buffer_space[key] = \ + round(float(r * value * (1 - value)), 10) + params_space[key] = \ + r # Saving to Parameters Space - # Modification (Transition Function) - Jumps - buffer_space[key] = \ - round(float(r * value * (1 - value)), 10) - params_space[key] = \ - r # Saving to Parameters Space + # Logistic Map + assert max(buffer_space) < 1 + assert max(params_space) < 4 - # Logistic Map - assert max(buffer_space) < 1 - assert max(params_space) < 4 + # Machine Time + machine_time += 1 - # Machine Time - machine_time += 1 def pull(): - global buffer_space, params_space, machine_time, \ - K, m, t + global buffer_space, params_space, machine_time, \ + K, m, t + + # PRNG (Xorshift by George Marsaglia) + def xorshift(X, Y): + X ^= Y >> 13 + Y ^= X << 17 + X ^= Y >> 5 + return X - # PRNG (Xorshift by George Marsaglia) - def xorshift(X, Y): - X ^= Y >> 13 - Y ^= X << 17 - X ^= Y >> 5 - return X + # Choosing Dynamical Systems (Increment) + key = machine_time % m - # Choosing Dynamical Systems (Increment) - key = machine_time % m + # Evolution (Time Length) + for i in range(0, t): + # Variables (Position + Parameters) + r = params_space[key] + value = buffer_space[key] - # Evolution (Time Length) - for i in range(0, t): - # Variables (Position + Parameters) - r = params_space[key] - value = buffer_space[key] + # Modification (Transition Function) - Flow + buffer_space[key] = \ + round(float(r * value * (1 - value)), 10) + params_space[key] = \ + (machine_time * 0.01 + r * 1.01) % 1 + 3 - # Modification (Transition Function) - Flow - buffer_space[key] = \ - round(float(r * value * (1 - value)), 10) - params_space[key] = \ - (machine_time * 0.01 + r * 1.01) % 1 + 3 + # Choosing Chaotic Data + X = int(buffer_space[(key + 2) % m] * (10 ** 10)) + Y = int(buffer_space[(key - 2) % m] * (10 ** 10)) - # Choosing Chaotic Data - X = int(buffer_space[(key + 2) % m] * (10 ** 10)) - Y = int(buffer_space[(key - 2) % m] * (10 ** 10)) + # Machine Time + machine_time += 1 - # Machine Time - machine_time += 1 + return xorshift(X, Y) % 0xFFFFFFFF - return xorshift(X, Y) % 0xFFFFFFFF def reset(): - global buffer_space, params_space, machine_time, \ - K, m, t + global buffer_space, params_space, machine_time, \ + K, m, t + + buffer_space = K; + params_space = [0] * m + machine_time = 0 - buffer_space = K; params_space = [0] * m - machine_time = 0 ####################################### @@ -92,15 +99,17 @@ def reset(): # Pushing Data (Input) import random + message = random.sample(range(0xFFFFFFFF), 100) for chunk in message: - push(chunk) + push(chunk) # for controlling inp = "" # Pulling Data (Output) while inp in ("e", "E"): - print("%s" % format(pull(), '#04x')) - print(buffer_space); print(params_space) - inp = input("(e)exit? ").strip() + print("%s" % format(pull(), '#04x')) + print(buffer_space); + print(params_space) + inp = input("(e)exit? ").strip() diff --git a/hashes/md5.py b/hashes/md5.py index d3f15510874e..1e4fead96326 100644 --- a/hashes/md5.py +++ b/hashes/md5.py @@ -1,155 +1,165 @@ from __future__ import print_function + import math + def rearrange(bitString32): - """[summary] - Regroups the given binary string. - - Arguments: - bitString32 {[string]} -- [32 bit binary] - - Raises: - ValueError -- [if the given string not are 32 bit binary string] - - Returns: - [string] -- [32 bit binary string] - """ - - if len(bitString32) != 32: - raise ValueError("Need length 32") - newString = "" - for i in [3,2,1,0]: - newString += bitString32[8*i:8*i+8] - return newString + """[summary] + Regroups the given binary string. + + Arguments: + bitString32 {[string]} -- [32 bit binary] + + Raises: + ValueError -- [if the given string not are 32 bit binary string] + + Returns: + [string] -- [32 bit binary string] + """ + + if len(bitString32) != 32: + raise ValueError("Need length 32") + newString = "" + for i in [3, 2, 1, 0]: + newString += bitString32[8 * i:8 * i + 8] + return newString + def reformatHex(i): - """[summary] - Converts the given integer into 8-digit hex number. + """[summary] + Converts the given integer into 8-digit hex number. + + Arguments: + i {[int]} -- [integer] + """ - Arguments: - i {[int]} -- [integer] - """ + hexrep = format(i, '08x') + thing = "" + for i in [3, 2, 1, 0]: + thing += hexrep[2 * i:2 * i + 2] + return thing - hexrep = format(i,'08x') - thing = "" - for i in [3,2,1,0]: - thing += hexrep[2*i:2*i+2] - return thing def pad(bitString): - """[summary] - Fills up the binary string to a 512 bit binary string - - Arguments: - bitString {[string]} -- [binary string] - - Returns: - [string] -- [binary string] - """ - - startLength = len(bitString) - bitString += '1' - while len(bitString) % 512 != 448: - bitString += '0' - lastPart = format(startLength,'064b') - bitString += rearrange(lastPart[32:]) + rearrange(lastPart[:32]) - return bitString + """[summary] + Fills up the binary string to a 512 bit binary string + + Arguments: + bitString {[string]} -- [binary string] + + Returns: + [string] -- [binary string] + """ + + startLength = len(bitString) + bitString += '1' + while len(bitString) % 512 != 448: + bitString += '0' + lastPart = format(startLength, '064b') + bitString += rearrange(lastPart[32:]) + rearrange(lastPart[:32]) + return bitString + def getBlock(bitString): - """[summary] - Iterator: - Returns by each call a list of length 16 with the 32 bit - integer blocks. - - Arguments: - bitString {[string]} -- [binary string >= 512] - """ - - currPos = 0 - while currPos < len(bitString): - currPart = bitString[currPos:currPos+512] - mySplits = [] - for i in range(16): - mySplits.append(int(rearrange(currPart[32*i:32*i+32]),2)) - yield mySplits - currPos += 512 + """[summary] + Iterator: + Returns by each call a list of length 16 with the 32 bit + integer blocks. + + Arguments: + bitString {[string]} -- [binary string >= 512] + """ + + currPos = 0 + while currPos < len(bitString): + currPart = bitString[currPos:currPos + 512] + mySplits = [] + for i in range(16): + mySplits.append(int(rearrange(currPart[32 * i:32 * i + 32]), 2)) + yield mySplits + currPos += 512 + def not32(i): - i_str = format(i,'032b') - new_str = '' - for c in i_str: - new_str += '1' if c=='0' else '0' - return int(new_str,2) + i_str = format(i, '032b') + new_str = '' + for c in i_str: + new_str += '1' if c == '0' else '0' + return int(new_str, 2) -def sum32(a,b): - return (a + b) % 2**32 -def leftrot32(i,s): - return (i << s) ^ (i >> (32-s)) +def sum32(a, b): + return (a + b) % 2 ** 32 + + +def leftrot32(i, s): + return (i << s) ^ (i >> (32 - s)) + def md5me(testString): - """[summary] - Returns a 32-bit hash code of the string 'testString' - - Arguments: - testString {[string]} -- [message] - """ - - bs ='' - for i in testString: - bs += format(ord(i),'08b') - bs = pad(bs) - - tvals = [int(2**32 * abs(math.sin(i+1))) for i in range(64)] - - a0 = 0x67452301 - b0 = 0xefcdab89 - c0 = 0x98badcfe - d0 = 0x10325476 - - s = [7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, \ - 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20, \ - 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23, \ - 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21 ] - - for m in getBlock(bs): - A = a0 - B = b0 - C = c0 - D = d0 - for i in range(64): - if i <= 15: - #f = (B & C) | (not32(B) & D) - f = D ^ (B & (C ^ D)) - g = i - elif i<= 31: - #f = (D & B) | (not32(D) & C) - f = C ^ (D & (B ^ C)) - g = (5*i+1) % 16 - elif i <= 47: - f = B ^ C ^ D - g = (3*i+5) % 16 - else: - f = C ^ (B | not32(D)) - g = (7*i) % 16 - dtemp = D - D = C - C = B - B = sum32(B,leftrot32((A + f + tvals[i] + m[g]) % 2**32, s[i])) - A = dtemp - a0 = sum32(a0, A) - b0 = sum32(b0, B) - c0 = sum32(c0, C) - d0 = sum32(d0, D) - - digest = reformatHex(a0) + reformatHex(b0) + reformatHex(c0) + reformatHex(d0) - return digest + """[summary] + Returns a 32-bit hash code of the string 'testString' + + Arguments: + testString {[string]} -- [message] + """ + + bs = '' + for i in testString: + bs += format(ord(i), '08b') + bs = pad(bs) + + tvals = [int(2 ** 32 * abs(math.sin(i + 1))) for i in range(64)] + + a0 = 0x67452301 + b0 = 0xefcdab89 + c0 = 0x98badcfe + d0 = 0x10325476 + + s = [7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, \ + 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20, \ + 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23, \ + 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21] + + for m in getBlock(bs): + A = a0 + B = b0 + C = c0 + D = d0 + for i in range(64): + if i <= 15: + # f = (B & C) | (not32(B) & D) + f = D ^ (B & (C ^ D)) + g = i + elif i <= 31: + # f = (D & B) | (not32(D) & C) + f = C ^ (D & (B ^ C)) + g = (5 * i + 1) % 16 + elif i <= 47: + f = B ^ C ^ D + g = (3 * i + 5) % 16 + else: + f = C ^ (B | not32(D)) + g = (7 * i) % 16 + dtemp = D + D = C + C = B + B = sum32(B, leftrot32((A + f + tvals[i] + m[g]) % 2 ** 32, s[i])) + A = dtemp + a0 = sum32(a0, A) + b0 = sum32(b0, B) + c0 = sum32(c0, C) + d0 = sum32(d0, D) + + digest = reformatHex(a0) + reformatHex(b0) + reformatHex(c0) + reformatHex(d0) + return digest + def test(): - assert md5me("") == "d41d8cd98f00b204e9800998ecf8427e" - assert md5me("The quick brown fox jumps over the lazy dog") == "9e107d9d372bb6826bd81d3542a419d6" - print("Success.") + assert md5me("") == "d41d8cd98f00b204e9800998ecf8427e" + assert md5me("The quick brown fox jumps over the lazy dog") == "9e107d9d372bb6826bd81d3542a419d6" + print("Success.") if __name__ == "__main__": - test() + test() diff --git a/hashes/sha1.py b/hashes/sha1.py index 4c78ad3a89e5..95a499ff29bf 100644 --- a/hashes/sha1.py +++ b/hashes/sha1.py @@ -24,8 +24,8 @@ """ import argparse +import hashlib # hashlib is only used inside the Test class import struct -import hashlib #hashlib is only used inside the Test class import unittest @@ -33,6 +33,7 @@ class SHA1Hash: """ Class to contain the entire pipeline for SHA1 Hashing Algorithm """ + def __init__(self, data): """ Inititates the variables data and h. h is a list of 5 8-digit Hexadecimal @@ -54,7 +55,7 @@ def padding(self): """ Pads the input message with zeros so that padded_data has 64 bytes or 512 bits """ - padding = b'\x80' + b'\x00'*(63 - (len(self.data) + 8) % 64) + padding = b'\x80' + b'\x00' * (63 - (len(self.data) + 8) % 64) padded_data = self.data + padding + struct.pack('>Q', 8 * len(self.data)) return padded_data @@ -62,7 +63,7 @@ def split_blocks(self): """ Returns a list of bytestrings each of length 64 """ - return [self.padded_data[i:i+64] for i in range(0, len(self.padded_data), 64)] + return [self.padded_data[i:i + 64] for i in range(0, len(self.padded_data), 64)] # @staticmethod def expand_block(self, block): @@ -72,7 +73,7 @@ def expand_block(self, block): """ w = list(struct.unpack('>16L', block)) + [0] * 64 for i in range(16, 80): - w[i] = self.rotate((w[i-3] ^ w[i-8] ^ w[i-14] ^ w[i-16]), 1) + w[i] = self.rotate((w[i - 3] ^ w[i - 8] ^ w[i - 14] ^ w[i - 16]), 1) return w def final_hash(self): @@ -102,20 +103,21 @@ def final_hash(self): elif 60 <= i < 80: f = b ^ c ^ d k = 0xCA62C1D6 - a, b, c, d, e = self.rotate(a, 5) + f + e + k + expanded_block[i] & 0xffffffff,\ + a, b, c, d, e = self.rotate(a, 5) + f + e + k + expanded_block[i] & 0xffffffff, \ a, self.rotate(b, 30), c, d - self.h = self.h[0] + a & 0xffffffff,\ - self.h[1] + b & 0xffffffff,\ - self.h[2] + c & 0xffffffff,\ - self.h[3] + d & 0xffffffff,\ + self.h = self.h[0] + a & 0xffffffff, \ + self.h[1] + b & 0xffffffff, \ + self.h[2] + c & 0xffffffff, \ + self.h[3] + d & 0xffffffff, \ self.h[4] + e & 0xffffffff - return '%08x%08x%08x%08x%08x' %tuple(self.h) + return '%08x%08x%08x%08x%08x' % tuple(self.h) class SHA1HashTest(unittest.TestCase): """ Test class for the SHA1Hash class. Inherits the TestCase class from unittest """ + def testMatchHashes(self): msg = bytes('Test String', 'utf-8') self.assertEqual(SHA1Hash(msg).final_hash(), hashlib.sha1(msg).hexdigest()) @@ -135,7 +137,7 @@ def main(): parser.add_argument('--file', dest='input_file', help='Hash contents of a file') args = parser.parse_args() input_string = args.input_string - #In any case hash input should be a bytestring + # In any case hash input should be a bytestring if args.input_file: with open(args.input_file, 'rb') as f: hash_input = f.read() diff --git a/ju_dan/main.py b/ju_dan/main.py new file mode 100644 index 000000000000..1ce004b5d597 --- /dev/null +++ b/ju_dan/main.py @@ -0,0 +1,126 @@ +#!/usr/bin/env python3 +# -*- coding: UTF-8 -*- + + +import logging +import time + +import itchat +import pymysql + + +# 注册对文本消息进行监听,对群聊进行监听 +@itchat.msg_register(itchat.content.INCOME_MSG, isGroupChat=True) +def handle_content(msg): + try: + msg_type = msg['MsgType'] + if msg_type == 1: + content = msg['Text'] + else: + content = '非文本内容' + # 内容最多1024个字符 + content = custom_str(content, 1024) + time_stamp = msg['CreateTime'] + create_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time_stamp)) + current_talk_group_id = msg['User']['UserName'] + # 内容最多64个字符 + current_talk_group_name = custom_str(msg['User']['NickName'], 64) + from_user_id = msg['ActualUserName'] + # 发送者昵称最多1024个字符 + from_user_name = custom_str(msg['ActualNickName'], 64) + sql = "INSERT INTO wx_group_chat(msg_type, content, sender_id, sender_name,\ + group_id, group_name, time_stamp, create_time) \ + VALUE ('%d','%s','%s','%s','%s','%s','%d','%s');"\ + % (int(msg_type), content, from_user_id, from_user_name,\ + current_talk_group_id, current_talk_group_name, int(time_stamp), create_time) + exe_db(db, sql, logger) + except Exception as e: + logger.debug(e) + return + + +def custom_str(source, length): + """ + 处理字符串长度, 并将'字符转义\' + :param source: + :param length: + :return: + """ + if len(source) > length: + fix_length_sub_str = source[0:length-1] + else: + fix_length_sub_str = source + return fix_length_sub_str.replace("'", "\\'") + + +def build_logs(): + # 设置log名称 + log_name = "wx.log" + # 定义logger + logger = logging.getLogger() + # 设置级别为debug + logger.setLevel(level=logging.DEBUG) + # 设置 logging文件名称 + handler = logging.FileHandler(log_name) + # 设置级别为debug + handler.setLevel(logging.DEBUG) + # 设置log的格式 + formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') + # 将格式压进logger + handler.setFormatter(formatter) + console = logging.StreamHandler() + console.setLevel(logging.DEBUG) + # 写入logger + logger.addHandler(handler) + logger.addHandler(console) + # 将logger返回 + return logger + + +def connect_mysql(logger): + try: + #db = pymysql.connect(host='47.93.206.227', port=3306, user='root', passwd='root', db='spring_clould', charset='utf8mb4') + db = pymysql.connect(host='127.0.0.1', port=3306, user='root', passwd='root', db='spring_clould', charset='utf8mb4') + return db + except Exception as e: + logger.debug('MySQL数据库连接失败') + logger.debug(e) + + +def exe_db(db, sql, logger): + try: + cursor = db.cursor() + db.ping(reconnect=True) + cursor.execute(sql) + db.commit() + except Exception as e: + logger.debug('SQL执行失败,请至日志查看该SQL记录') + logger.debug(sql) + logger.debug(e) + + +def select_db(db, sql, logger): + try: + cursor = db.cursor() + db.ping(reconnect=True) + cursor.execute(sql) + return cursor.fetchall() + except Exception as e: + logger.debug('SQL执行失败,请至日志查看该SQL记录') + logger.debug(sql) + logger.debug(e) + + +def main(): + # 手机扫码登录 + itchat.auto_login(hotReload=True, enableCmdQR=2) + global logger + logger = build_logs() + global db + db = connect_mysql(logger) + # 持续运行 + itchat.run() + + +if __name__ == '__main__': + main() diff --git a/linear_algebra_python/src/lib.py b/linear_algebra_python/src/lib.py index 281991a93b2d..b1a605b9cd1c 100644 --- a/linear_algebra_python/src/lib.py +++ b/linear_algebra_python/src/lib.py @@ -20,7 +20,6 @@ - function randomMatrix(W,H,a,b) """ - import math import random @@ -45,13 +44,15 @@ class Vector(object): changeComponent(pos,value) : changes the specified component. TODO: compare-operator """ - def __init__(self,components=[]): + + def __init__(self, components=[]): """ input: components or nothing simple constructor for init the vector """ self.__components = list(components) - def set(self,components): + + def set(self, components): """ input: new components changes the components of the vector. @@ -61,34 +62,39 @@ def set(self,components): self.__components = list(components) else: raise Exception("please give any vector") + def __str__(self): """ returns a string representation of the vector """ return "(" + ",".join(map(str, self.__components)) + ")" - def component(self,i): + + def component(self, i): """ input: index (start at 0) output: the i-th component of the vector. """ - if type(i) is int and -len(self.__components) <= i < len(self.__components) : + if type(i) is int and -len(self.__components) <= i < len(self.__components): return self.__components[i] else: raise Exception("index out of range") + def __len__(self): """ returns the size of the vector """ return len(self.__components) + def eulidLength(self): """ returns the eulidean length of the vector """ summe = 0 for c in self.__components: - summe += c**2 + summe += c ** 2 return math.sqrt(summe) - def __add__(self,other): + + def __add__(self, other): """ input: other vector assumes: other vector has the same size @@ -100,7 +106,8 @@ def __add__(self,other): return Vector(result) else: raise Exception("must have the same size") - def __sub__(self,other): + + def __sub__(self, other): """ input: other vector assumes: other vector has the same size @@ -110,73 +117,77 @@ def __sub__(self,other): if size == len(other): result = [self.__components[i] - other.component(i) for i in range(size)] return result - else: # error case + else: # error case raise Exception("must have the same size") - def __mul__(self,other): + + def __mul__(self, other): """ mul implements the scalar multiplication and the dot-product """ - if isinstance(other,float) or isinstance(other,int): - ans = [c*other for c in self.__components] + if isinstance(other, float) or isinstance(other, int): + ans = [c * other for c in self.__components] return ans - elif (isinstance(other,Vector) and (len(self) == len(other))): + elif (isinstance(other, Vector) and (len(self) == len(other))): size = len(self) summe = 0 for i in range(size): summe += self.__components[i] * other.component(i) return summe - else: # error case + else: # error case raise Exception("invalide operand!") + def copy(self): """ copies this vector and returns it. """ return Vector(self.__components) - def changeComponent(self,pos,value): + + def changeComponent(self, pos, value): """ input: an index (pos) and a value changes the specified component (pos) with the 'value' """ - #precondition + # precondition assert (-len(self.__components) <= pos < len(self.__components)) self.__components[pos] = value - + + def zeroVector(dimension): """ returns a zero-vector of size 'dimension' - """ - #precondition - assert(isinstance(dimension,int)) - return Vector([0]*dimension) + """ + # precondition + assert (isinstance(dimension, int)) + return Vector([0] * dimension) -def unitBasisVector(dimension,pos): +def unitBasisVector(dimension, pos): """ returns a unit basis vector with a One at index 'pos' (indexing at 0) """ - #precondition - assert(isinstance(dimension,int) and (isinstance(pos,int))) - ans = [0]*dimension + # precondition + assert (isinstance(dimension, int) and (isinstance(pos, int))) + ans = [0] * dimension ans[pos] = 1 return Vector(ans) - -def axpy(scalar,x,y): + +def axpy(scalar, x, y): """ input: a 'scalar' and two vectors 'x' and 'y' output: a vector computes the axpy operation """ # precondition - assert(isinstance(x,Vector) and (isinstance(y,Vector)) \ - and (isinstance(scalar,int) or isinstance(scalar,float))) - return (x*scalar + y) - + assert (isinstance(x, Vector) and (isinstance(y, Vector)) \ + and (isinstance(scalar, int) or isinstance(scalar, float))) + return (x * scalar + y) + -def randomVector(N,a,b): +def randomVector(N, a, b): """ input: size (N) of the vector. random range (a,b) @@ -184,7 +195,7 @@ def randomVector(N,a,b): random integer components between 'a' and 'b'. """ random.seed(None) - ans = [random.randint(a,b) for i in range(N)] + ans = [random.randint(a, b) for i in range(N)] return Vector(ans) @@ -205,7 +216,8 @@ class Matrix(object): operator + : implements the matrix-addition. operator - _ implements the matrix-subtraction """ - def __init__(self,matrix,w,h): + + def __init__(self, matrix, w, h): """ simple constructor for initialzes the matrix with components. @@ -213,6 +225,7 @@ def __init__(self,matrix,w,h): self.__matrix = matrix self.__width = w self.__height = h + def __str__(self): """ returns a string representation of this @@ -222,58 +235,64 @@ def __str__(self): for i in range(self.__height): ans += "|" for j in range(self.__width): - if j < self.__width -1: + if j < self.__width - 1: ans += str(self.__matrix[i][j]) + "," else: ans += str(self.__matrix[i][j]) + "|\n" return ans - def changeComponent(self,x,y, value): + + def changeComponent(self, x, y, value): """ changes the x-y component of this matrix """ if x >= 0 and x < self.__height and y >= 0 and y < self.__width: self.__matrix[x][y] = value else: - raise Exception ("changeComponent: indices out of bounds") - def component(self,x,y): + raise Exception("changeComponent: indices out of bounds") + + def component(self, x, y): """ returns the specified (x,y) component """ if x >= 0 and x < self.__height and y >= 0 and y < self.__width: return self.__matrix[x][y] else: - raise Exception ("changeComponent: indices out of bounds") + raise Exception("changeComponent: indices out of bounds") + def width(self): """ getter for the width """ return self.__width + def height(self): """ getter for the height """ return self.__height - def __mul__(self,other): + + def __mul__(self, other): """ implements the matrix-vector multiplication. implements the matrix-scalar multiplication """ - if isinstance(other, Vector): # vector-matrix + if isinstance(other, Vector): # vector-matrix if (len(other) == self.__width): ans = zeroVector(self.__height) for i in range(self.__height): summe = 0 for j in range(self.__width): summe += other.component(j) * self.__matrix[i][j] - ans.changeComponent(i,summe) + ans.changeComponent(i, summe) summe = 0 return ans else: raise Exception("vector must have the same size as the " + "number of columns of the matrix!") - elif isinstance(other,int) or isinstance(other,float): # matrix-scalar + elif isinstance(other, int) or isinstance(other, float): # matrix-scalar matrix = [[self.__matrix[i][j] * other for j in range(self.__width)] for i in range(self.__height)] - return Matrix(matrix,self.__width,self.__height) - def __add__(self,other): + return Matrix(matrix, self.__width, self.__height) + + def __add__(self, other): """ implements the matrix-addition. """ @@ -282,12 +301,13 @@ def __add__(self,other): for i in range(self.__height): row = [] for j in range(self.__width): - row.append(self.__matrix[i][j] + other.component(i,j)) + row.append(self.__matrix[i][j] + other.component(i, j)) matrix.append(row) - return Matrix(matrix,self.__width,self.__height) + return Matrix(matrix, self.__width, self.__height) else: raise Exception("matrix must have the same dimension!") - def __sub__(self,other): + + def __sub__(self, other): """ implements the matrix-subtraction. """ @@ -296,28 +316,26 @@ def __sub__(self,other): for i in range(self.__height): row = [] for j in range(self.__width): - row.append(self.__matrix[i][j] - other.component(i,j)) + row.append(self.__matrix[i][j] - other.component(i, j)) matrix.append(row) - return Matrix(matrix,self.__width,self.__height) + return Matrix(matrix, self.__width, self.__height) else: raise Exception("matrix must have the same dimension!") - + def squareZeroMatrix(N): """ returns a square zero-matrix of dimension NxN """ - ans = [[0]*N for i in range(N)] - return Matrix(ans,N,N) - - -def randomMatrix(W,H,a,b): + ans = [[0] * N for i in range(N)] + return Matrix(ans, N, N) + + +def randomMatrix(W, H, a, b): """ returns a random matrix WxH with integer components between 'a' and 'b' """ random.seed(None) - matrix = [[random.randint(a,b) for j in range(W)] for i in range(H)] - return Matrix(matrix,W,H) - - + matrix = [[random.randint(a, b) for j in range(W)] for i in range(H)] + return Matrix(matrix, W, H) diff --git a/linear_algebra_python/src/tests.py b/linear_algebra_python/src/tests.py index a26eb92653e2..2d543b83507b 100644 --- a/linear_algebra_python/src/tests.py +++ b/linear_algebra_python/src/tests.py @@ -9,125 +9,145 @@ """ import unittest + from lib import * + class Test(unittest.TestCase): def test_component(self): """ test for method component """ - x = Vector([1,2,3]) - self.assertEqual(x.component(0),1) - self.assertEqual(x.component(2),3) + x = Vector([1, 2, 3]) + self.assertEqual(x.component(0), 1) + self.assertEqual(x.component(2), 3) try: y = Vector() self.assertTrue(False) except: self.assertTrue(True) + def test_str(self): """ test for toString() method """ - x = Vector([0,0,0,0,0,1]) - self.assertEqual(str(x),"(0,0,0,0,0,1)") + x = Vector([0, 0, 0, 0, 0, 1]) + self.assertEqual(str(x), "(0,0,0,0,0,1)") + def test_size(self): """ test for size()-method """ - x = Vector([1,2,3,4]) - self.assertEqual(len(x),4) + x = Vector([1, 2, 3, 4]) + self.assertEqual(len(x), 4) + def test_euclidLength(self): """ test for the eulidean length """ - x = Vector([1,2]) - self.assertAlmostEqual(x.eulidLength(),2.236,3) + x = Vector([1, 2]) + self.assertAlmostEqual(x.eulidLength(), 2.236, 3) + def test_add(self): """ test for + operator """ - x = Vector([1,2,3]) - y = Vector([1,1,1]) - self.assertEqual((x+y).component(0),2) - self.assertEqual((x+y).component(1),3) - self.assertEqual((x+y).component(2),4) + x = Vector([1, 2, 3]) + y = Vector([1, 1, 1]) + self.assertEqual((x + y).component(0), 2) + self.assertEqual((x + y).component(1), 3) + self.assertEqual((x + y).component(2), 4) + def test_sub(self): """ test for - operator """ - x = Vector([1,2,3]) - y = Vector([1,1,1]) - self.assertEqual((x-y).component(0),0) - self.assertEqual((x-y).component(1),1) - self.assertEqual((x-y).component(2),2) + x = Vector([1, 2, 3]) + y = Vector([1, 1, 1]) + self.assertEqual((x - y).component(0), 0) + self.assertEqual((x - y).component(1), 1) + self.assertEqual((x - y).component(2), 2) + def test_mul(self): """ test for * operator """ - x = Vector([1,2,3]) - a = Vector([2,-1,4]) # for test of dot-product - b = Vector([1,-2,-1]) - self.assertEqual(str(x*3.0),"(3.0,6.0,9.0)") - self.assertEqual((a*b),0) + x = Vector([1, 2, 3]) + a = Vector([2, -1, 4]) # for test of dot-product + b = Vector([1, -2, -1]) + self.assertEqual(str(x * 3.0), "(3.0,6.0,9.0)") + self.assertEqual((a * b), 0) + def test_zeroVector(self): """ test for the global function zeroVector(...) """ self.assertTrue(str(zeroVector(10)).count("0") == 10) + def test_unitBasisVector(self): """ test for the global function unitBasisVector(...) """ - self.assertEqual(str(unitBasisVector(3,1)),"(0,1,0)") + self.assertEqual(str(unitBasisVector(3, 1)), "(0,1,0)") + def test_axpy(self): """ test for the global function axpy(...) (operation) """ - x = Vector([1,2,3]) - y = Vector([1,0,1]) - self.assertEqual(str(axpy(2,x,y)),"(3,4,7)") + x = Vector([1, 2, 3]) + y = Vector([1, 0, 1]) + self.assertEqual(str(axpy(2, x, y)), "(3,4,7)") + def test_copy(self): """ test for the copy()-method """ - x = Vector([1,0,0,0,0,0]) + x = Vector([1, 0, 0, 0, 0, 0]) y = x.copy() - self.assertEqual(str(x),str(y)) + self.assertEqual(str(x), str(y)) + def test_changeComponent(self): """ test for the changeComponent(...)-method """ - x = Vector([1,0,0]) - x.changeComponent(0,0) - x.changeComponent(1,1) - self.assertEqual(str(x),"(0,1,0)") + x = Vector([1, 0, 0]) + x.changeComponent(0, 0) + x.changeComponent(1, 1) + self.assertEqual(str(x), "(0,1,0)") + def test_str_matrix(self): - A = Matrix([[1,2,3],[2,4,5],[6,7,8]],3,3) - self.assertEqual("|1,2,3|\n|2,4,5|\n|6,7,8|\n",str(A)) + A = Matrix([[1, 2, 3], [2, 4, 5], [6, 7, 8]], 3, 3) + self.assertEqual("|1,2,3|\n|2,4,5|\n|6,7,8|\n", str(A)) + def test__mul__matrix(self): - A = Matrix([[1,2,3],[4,5,6],[7,8,9]],3,3) - x = Vector([1,2,3]) - self.assertEqual("(14,32,50)",str(A*x)) - self.assertEqual("|2,4,6|\n|8,10,12|\n|14,16,18|\n",str(A*2)) + A = Matrix([[1, 2, 3], [4, 5, 6], [7, 8, 9]], 3, 3) + x = Vector([1, 2, 3]) + self.assertEqual("(14,32,50)", str(A * x)) + self.assertEqual("|2,4,6|\n|8,10,12|\n|14,16,18|\n", str(A * 2)) + def test_changeComponent_matrix(self): - A = Matrix([[1,2,3],[2,4,5],[6,7,8]],3,3) - A.changeComponent(0,2,5) - self.assertEqual("|1,2,5|\n|2,4,5|\n|6,7,8|\n",str(A)) + A = Matrix([[1, 2, 3], [2, 4, 5], [6, 7, 8]], 3, 3) + A.changeComponent(0, 2, 5) + self.assertEqual("|1,2,5|\n|2,4,5|\n|6,7,8|\n", str(A)) + def test_component_matrix(self): - A = Matrix([[1,2,3],[2,4,5],[6,7,8]],3,3) - self.assertEqual(7,A.component(2,1),0.01) + A = Matrix([[1, 2, 3], [2, 4, 5], [6, 7, 8]], 3, 3) + self.assertEqual(7, A.component(2, 1), 0.01) + def test__add__matrix(self): - A = Matrix([[1,2,3],[2,4,5],[6,7,8]],3,3) - B = Matrix([[1,2,7],[2,4,5],[6,7,10]],3,3) - self.assertEqual("|2,4,10|\n|4,8,10|\n|12,14,18|\n",str(A+B)) + A = Matrix([[1, 2, 3], [2, 4, 5], [6, 7, 8]], 3, 3) + B = Matrix([[1, 2, 7], [2, 4, 5], [6, 7, 10]], 3, 3) + self.assertEqual("|2,4,10|\n|4,8,10|\n|12,14,18|\n", str(A + B)) + def test__sub__matrix(self): - A = Matrix([[1,2,3],[2,4,5],[6,7,8]],3,3) - B = Matrix([[1,2,7],[2,4,5],[6,7,10]],3,3) - self.assertEqual("|0,0,-4|\n|0,0,0|\n|0,0,-2|\n",str(A-B)) + A = Matrix([[1, 2, 3], [2, 4, 5], [6, 7, 8]], 3, 3) + B = Matrix([[1, 2, 7], [2, 4, 5], [6, 7, 10]], 3, 3) + self.assertEqual("|0,0,-4|\n|0,0,0|\n|0,0,-2|\n", str(A - B)) + def test_squareZeroMatrix(self): - self.assertEqual('|0,0,0,0,0|\n|0,0,0,0,0|\n|0,0,0,0,0|\n|0,0,0,0,0|' - +'\n|0,0,0,0,0|\n',str(squareZeroMatrix(5))) - + self.assertEqual('|0,0,0,0,0|\n|0,0,0,0,0|\n|0,0,0,0,0|\n|0,0,0,0,0|' + + '\n|0,0,0,0,0|\n', str(squareZeroMatrix(5))) + if __name__ == "__main__": unittest.main() diff --git a/machine_learning/Random Forest Classification/random_forest_classification.py b/machine_learning/Random Forest Classification/random_forest_classification.py index d5dde4b13822..54a1bc67a6b1 100644 --- a/machine_learning/Random Forest Classification/random_forest_classification.py +++ b/machine_learning/Random Forest Classification/random_forest_classification.py @@ -1,8 +1,8 @@ # Random Forest Classification +import matplotlib.pyplot as plt # Importing the libraries import numpy as np -import matplotlib.pyplot as plt import pandas as pd # Importing the dataset @@ -12,17 +12,20 @@ # Splitting the dataset into the Training set and Test set from sklearn.cross_validation import train_test_split -X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.25, random_state = 0) + +X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=0) # Feature Scaling from sklearn.preprocessing import StandardScaler + sc = StandardScaler() X_train = sc.fit_transform(X_train) X_test = sc.transform(X_test) # Fitting Random Forest Classification to the Training set from sklearn.ensemble import RandomForestClassifier -classifier = RandomForestClassifier(n_estimators = 10, criterion = 'entropy', random_state = 0) + +classifier = RandomForestClassifier(n_estimators=10, criterion='entropy', random_state=0) classifier.fit(X_train, y_train) # Predicting the Test set results @@ -30,20 +33,22 @@ # Making the Confusion Matrix from sklearn.metrics import confusion_matrix + cm = confusion_matrix(y_test, y_pred) # Visualising the Training set results from matplotlib.colors import ListedColormap + X_set, y_set = X_train, y_train -X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), - np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) +X1, X2 = np.meshgrid(np.arange(start=X_set[:, 0].min() - 1, stop=X_set[:, 0].max() + 1, step=0.01), + np.arange(start=X_set[:, 1].min() - 1, stop=X_set[:, 1].max() + 1, step=0.01)) plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), - alpha = 0.75, cmap = ListedColormap(('red', 'green'))) + alpha=0.75, cmap=ListedColormap(('red', 'green'))) plt.xlim(X1.min(), X1.max()) plt.ylim(X2.min(), X2.max()) for i, j in enumerate(np.unique(y_set)): plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], - c = ListedColormap(('red', 'green'))(i), label = j) + c=ListedColormap(('red', 'green'))(i), label=j) plt.title('Random Forest Classification (Training set)') plt.xlabel('Age') plt.ylabel('Estimated Salary') @@ -52,18 +57,19 @@ # Visualising the Test set results from matplotlib.colors import ListedColormap + X_set, y_set = X_test, y_test -X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), - np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) +X1, X2 = np.meshgrid(np.arange(start=X_set[:, 0].min() - 1, stop=X_set[:, 0].max() + 1, step=0.01), + np.arange(start=X_set[:, 1].min() - 1, stop=X_set[:, 1].max() + 1, step=0.01)) plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), - alpha = 0.75, cmap = ListedColormap(('red', 'green'))) + alpha=0.75, cmap=ListedColormap(('red', 'green'))) plt.xlim(X1.min(), X1.max()) plt.ylim(X2.min(), X2.max()) for i, j in enumerate(np.unique(y_set)): plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], - c = ListedColormap(('red', 'green'))(i), label = j) + c=ListedColormap(('red', 'green'))(i), label=j) plt.title('Random Forest Classification (Test set)') plt.xlabel('Age') plt.ylabel('Estimated Salary') plt.legend() -plt.show() \ No newline at end of file +plt.show() diff --git a/machine_learning/Random Forest Regression/random_forest_regression.py b/machine_learning/Random Forest Regression/random_forest_regression.py index fce58b1fe283..9137cb2f683c 100644 --- a/machine_learning/Random Forest Regression/random_forest_regression.py +++ b/machine_learning/Random Forest Regression/random_forest_regression.py @@ -1,8 +1,8 @@ # Random Forest Regression +import matplotlib.pyplot as plt # Importing the libraries import numpy as np -import matplotlib.pyplot as plt import pandas as pd # Importing the dataset @@ -24,7 +24,8 @@ # Fitting Random Forest Regression to the dataset from sklearn.ensemble import RandomForestRegressor -regressor = RandomForestRegressor(n_estimators = 10, random_state = 0) + +regressor = RandomForestRegressor(n_estimators=10, random_state=0) regressor.fit(X, y) # Predicting a new result @@ -33,9 +34,9 @@ # Visualising the Random Forest Regression results (higher resolution) X_grid = np.arange(min(X), max(X), 0.01) X_grid = X_grid.reshape((len(X_grid), 1)) -plt.scatter(X, y, color = 'red') -plt.plot(X_grid, regressor.predict(X_grid), color = 'blue') +plt.scatter(X, y, color='red') +plt.plot(X_grid, regressor.predict(X_grid), color='blue') plt.title('Truth or Bluff (Random Forest Regression)') plt.xlabel('Position level') plt.ylabel('Salary') -plt.show() \ No newline at end of file +plt.show() diff --git a/machine_learning/decision_tree.py b/machine_learning/decision_tree.py index 71849904ccf2..e39e57731f1e 100644 --- a/machine_learning/decision_tree.py +++ b/machine_learning/decision_tree.py @@ -7,8 +7,9 @@ import numpy as np + class Decision_Tree: - def __init__(self, depth = 5, min_leaf_size = 5): + def __init__(self, depth=5, min_leaf_size=5): self.depth = depth self.decision_boundary = 0 self.left = None @@ -60,8 +61,7 @@ def train(self, X, y): return best_split = 0 - min_error = self.mean_squared_error(X,np.mean(y)) * 2 - + min_error = self.mean_squared_error(X, np.mean(y)) * 2 """ loop over all possible splits for the decision tree. find the best split. @@ -88,8 +88,8 @@ def train(self, X, y): right_y = y[best_split:] self.decision_boundary = X[best_split] - self.left = Decision_Tree(depth = self.depth - 1, min_leaf_size = self.min_leaf_size) - self.right = Decision_Tree(depth = self.depth - 1, min_leaf_size = self.min_leaf_size) + self.left = Decision_Tree(depth=self.depth - 1, min_leaf_size=self.min_leaf_size) + self.right = Decision_Tree(depth=self.depth - 1, min_leaf_size=self.min_leaf_size) self.left.train(left_X, left_y) self.right.train(right_X, right_y) else: @@ -115,6 +115,7 @@ def predict(self, x): print("Error: Decision tree not yet trained") return None + def main(): """ In this demonstration we're generating a sample data set from the sin function in numpy. @@ -124,8 +125,8 @@ def main(): X = np.arange(-1., 1., 0.005) y = np.sin(X) - tree = Decision_Tree(depth = 10, min_leaf_size = 10) - tree.train(X,y) + tree = Decision_Tree(depth=10, min_leaf_size=10) + tree.train(X, y) test_cases = (np.random.rand(10) * 2) - 1 predictions = np.array([tree.predict(x) for x in test_cases]) @@ -135,6 +136,6 @@ def main(): print("Predictions: " + str(predictions)) print("Average error: " + str(avg_error)) - + if __name__ == '__main__': - main() \ No newline at end of file + main() diff --git a/machine_learning/gradient_descent.py b/machine_learning/gradient_descent.py index 6387d4939205..357aa1c4d6cb 100644 --- a/machine_learning/gradient_descent.py +++ b/machine_learning/gradient_descent.py @@ -2,6 +2,7 @@ Implementation of gradient descent algorithm for minimizing cost of a linear hypothesis function. """ from __future__ import print_function, division + import numpy # List of input, output pairs @@ -33,7 +34,7 @@ def _hypothesis_value(data_input_tuple): """ hyp_val = 0 for i in range(len(parameter_vector) - 1): - hyp_val += data_input_tuple[i]*parameter_vector[i+1] + hyp_val += data_input_tuple[i] * parameter_vector[i + 1] hyp_val += parameter_vector[0] return hyp_val @@ -76,7 +77,7 @@ def summation_of_cost_derivative(index, end=m): if index == -1: summation_value += _error(i) else: - summation_value += _error(i)*train_data[i][0][index] + summation_value += _error(i) * train_data[i][0][index] return summation_value @@ -86,7 +87,7 @@ def get_cost_derivative(index): :return: derivative wrt to that index Note: If index is -1, this means we are calculating summation wrt to biased parameter. """ - cost_derivative_value = summation_of_cost_derivative(index, m)/m + cost_derivative_value = summation_of_cost_derivative(index, m) / m return cost_derivative_value @@ -100,9 +101,9 @@ def run_gradient_descent(): j += 1 temp_parameter_vector = [0, 0, 0, 0] for i in range(0, len(parameter_vector)): - cost_derivative = get_cost_derivative(i-1) + cost_derivative = get_cost_derivative(i - 1) temp_parameter_vector[i] = parameter_vector[i] - \ - LEARNING_RATE*cost_derivative + LEARNING_RATE * cost_derivative if numpy.allclose(parameter_vector, temp_parameter_vector, atol=absolute_error_limit, rtol=relative_error_limit): break diff --git a/machine_learning/k_means_clust.py b/machine_learning/k_means_clust.py index 368739a45fe9..5e3c2252f30d 100644 --- a/machine_learning/k_means_clust.py +++ b/machine_learning/k_means_clust.py @@ -47,73 +47,80 @@ ''' from __future__ import print_function -from sklearn.metrics import pairwise_distances + import numpy as np +from sklearn.metrics import pairwise_distances TAG = 'K-MEANS-CLUST/ ' + def get_initial_centroids(data, k, seed=None): '''Randomly choose k data points as initial centroids''' - if seed is not None: # useful for obtaining consistent results + if seed is not None: # useful for obtaining consistent results np.random.seed(seed) - n = data.shape[0] # number of data points - + n = data.shape[0] # number of data points + # Pick K indices from range [0, N). rand_indices = np.random.randint(0, n, k) - + # Keep centroids as dense format, as many entries will be nonzero due to averaging. # As long as at least one document in a cluster contains a word, # it will carry a nonzero weight in the TF-IDF vector of the centroid. - centroids = data[rand_indices,:] - + centroids = data[rand_indices, :] + return centroids -def centroid_pairwise_dist(X,centroids): - return pairwise_distances(X,centroids,metric='euclidean') + +def centroid_pairwise_dist(X, centroids): + return pairwise_distances(X, centroids, metric='euclidean') + def assign_clusters(data, centroids): - # Compute distances between each data point and the set of centroids: # Fill in the blank (RHS only) - distances_from_centroids = centroid_pairwise_dist(data,centroids) - + distances_from_centroids = centroid_pairwise_dist(data, centroids) + # Compute cluster assignments for each data point: # Fill in the blank (RHS only) - cluster_assignment = np.argmin(distances_from_centroids,axis=1) - + cluster_assignment = np.argmin(distances_from_centroids, axis=1) + return cluster_assignment + def revise_centroids(data, k, cluster_assignment): new_centroids = [] for i in range(k): # Select all data points that belong to cluster i. Fill in the blank (RHS only) - member_data_points = data[cluster_assignment==i] + member_data_points = data[cluster_assignment == i] # Compute the mean of the data points. Fill in the blank (RHS only) centroid = member_data_points.mean(axis=0) new_centroids.append(centroid) new_centroids = np.array(new_centroids) - + return new_centroids + def compute_heterogeneity(data, k, centroids, cluster_assignment): - heterogeneity = 0.0 for i in range(k): - + # Select all data points that belong to cluster i. Fill in the blank (RHS only) - member_data_points = data[cluster_assignment==i, :] - - if member_data_points.shape[0] > 0: # check if i-th cluster is non-empty + member_data_points = data[cluster_assignment == i, :] + + if member_data_points.shape[0] > 0: # check if i-th cluster is non-empty # Compute distances from centroid to data points (RHS only) distances = pairwise_distances(member_data_points, [centroids[i]], metric='euclidean') - squared_distances = distances**2 + squared_distances = distances ** 2 heterogeneity += np.sum(squared_distances) - + return heterogeneity + from matplotlib import pyplot as plt + + def plot_heterogeneity(heterogeneity, k): - plt.figure(figsize=(7,4)) + plt.figure(figsize=(7, 4)) plt.plot(heterogeneity, linewidth=4) plt.xlabel('# Iterations') plt.ylabel('Heterogeneity') @@ -121,6 +128,7 @@ def plot_heterogeneity(heterogeneity, k): plt.rcParams.update({'font.size': 16}) plt.show() + def kmeans(data, k, initial_centroids, maxiter=500, record_heterogeneity=None, verbose=False): '''This function runs k-means on given data and initial set of centroids. maxiter: maximum number of iterations to run.(default=500) @@ -129,45 +137,47 @@ def kmeans(data, k, initial_centroids, maxiter=500, record_heterogeneity=None, v verbose: if True, print how many data points changed their cluster labels in each iteration''' centroids = initial_centroids[:] prev_cluster_assignment = None - - for itr in range(maxiter): + + for itr in range(maxiter): if verbose: print(itr, end='') - + # 1. Make cluster assignments using nearest centroids - cluster_assignment = assign_clusters(data,centroids) - + cluster_assignment = assign_clusters(data, centroids) + # 2. Compute a new centroid for each of the k clusters, averaging all data points assigned to that cluster. - centroids = revise_centroids(data,k, cluster_assignment) - + centroids = revise_centroids(data, k, cluster_assignment) + # Check for convergence: if none of the assignments changed, stop if prev_cluster_assignment is not None and \ - (prev_cluster_assignment==cluster_assignment).all(): + (prev_cluster_assignment == cluster_assignment).all(): break - + # Print number of new assignments if prev_cluster_assignment is not None: - num_changed = np.sum(prev_cluster_assignment!=cluster_assignment) + num_changed = np.sum(prev_cluster_assignment != cluster_assignment) if verbose: - print(' {0:5d} elements changed their cluster assignment.'.format(num_changed)) - - # Record heterogeneity convergence metric + print(' {0:5d} elements changed their cluster assignment.'.format(num_changed)) + + # Record heterogeneity convergence metric if record_heterogeneity is not None: # YOUR CODE HERE - score = compute_heterogeneity(data,k,centroids,cluster_assignment) + score = compute_heterogeneity(data, k, centroids, cluster_assignment) record_heterogeneity.append(score) - + prev_cluster_assignment = cluster_assignment[:] - + return centroids, cluster_assignment + # Mock test below -if False: # change to true to run this test case. +if False: # change to true to run this test case. import sklearn.datasets as ds + dataset = ds.load_iris() k = 3 heterogeneity = [] initial_centroids = get_initial_centroids(dataset['data'], k, seed=0) centroids, cluster_assignment = kmeans(dataset['data'], k, initial_centroids, maxiter=400, - record_heterogeneity=heterogeneity, verbose=True) + record_heterogeneity=heterogeneity, verbose=True) plot_heterogeneity(heterogeneity, k) diff --git a/machine_learning/linear_regression.py b/machine_learning/linear_regression.py index 8c23f1f77908..e43fd171623f 100644 --- a/machine_learning/linear_regression.py +++ b/machine_learning/linear_regression.py @@ -9,8 +9,8 @@ """ from __future__ import print_function -import requests import numpy as np +import requests def collect_dataset(): diff --git a/machine_learning/logistic_regression.py b/machine_learning/logistic_regression.py index 71952e792e81..1c018f53dd56 100644 --- a/machine_learning/logistic_regression.py +++ b/machine_learning/logistic_regression.py @@ -12,12 +12,12 @@ ''' Implementing logistic regression for classification problem Helpful resources : 1.Coursera ML course 2.https://medium.com/@martinpella/logistic-regression-from-scratch-in-python-124c5636b8ac''' -import numpy as np import matplotlib.pyplot as plt +import numpy as np +from sklearn import datasets -# get_ipython().run_line_magic('matplotlib', 'inline') -from sklearn import datasets +# get_ipython().run_line_magic('matplotlib', 'inline') # In[67]: @@ -35,11 +35,11 @@ def cost_function(h, y): # here alpha is the learning rate, X is the feature matrix,y is the target matrix def logistic_reg( - alpha, - X, - y, - max_iterations=70000, - ): + alpha, + X, + y, + max_iterations=70000, +): converged = False iterations = 0 theta = np.zeros(X.shape[1]) @@ -57,8 +57,8 @@ def logistic_reg( iterations += 1 # update iterations if iterations == max_iterations: - print ('Maximum iterations exceeded!') - print ('Minimal cost function J=', J) + print('Maximum iterations exceeded!') + print('Minimal cost function J=', J) converged = True return theta @@ -73,7 +73,7 @@ def logistic_reg( alpha = 0.1 theta = logistic_reg(alpha, X, y, max_iterations=70000) - print (theta) + print(theta) def predict_prob(X): @@ -96,6 +96,6 @@ def predict_prob(X): [0.5], linewidths=1, colors='black', - ) + ) plt.legend() diff --git a/machine_learning/perceptron.py b/machine_learning/perceptron.py index fe1032aff4af..f5aab282e876 100644 --- a/machine_learning/perceptron.py +++ b/machine_learning/perceptron.py @@ -30,7 +30,7 @@ def trannig(self): sample.insert(0, self.bias) for i in range(self.col_sample): - self.weight.append(random.random()) + self.weight.append(random.random()) self.weight.insert(0, self.bias) @@ -46,16 +46,15 @@ def trannig(self): if y != self.exit[i]: for j in range(self.col_sample + 1): - self.weight[j] = self.weight[j] + self.learn_rate * (self.exit[i] - y) * self.sample[i][j] erro = True - #print('Epoch: \n',epoch_count) + # print('Epoch: \n',epoch_count) epoch_count = epoch_count + 1 # if you want controle the epoch or just by erro if erro == False: - print(('\nEpoch:\n',epoch_count)) + print(('\nEpoch:\n', epoch_count)) print('------------------------\n') - #if epoch_count > self.epoch_number or not erro: + # if epoch_count > self.epoch_number or not erro: break def sort(self, sample): @@ -66,7 +65,7 @@ def sort(self, sample): y = self.sign(u) - if y == -1: + if y == -1: print(('Sample: ', sample)) print('classification: P1') else: @@ -113,7 +112,7 @@ def sign(self, u): exit = [-1, -1, -1, 1, 1, -1, 1, -1, 1, 1, -1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, 1, 1, 1, 1, -1, -1, 1, -1, 1] -network = Perceptron(sample=samples, exit = exit, learn_rate=0.01, epoch_number=1000, bias=-1) +network = Perceptron(sample=samples, exit=exit, learn_rate=0.01, epoch_number=1000, bias=-1) network.trannig() diff --git a/machine_learning/scoring_functions.py b/machine_learning/scoring_functions.py index a2d97b09ded2..4c01891f1747 100755 --- a/machine_learning/scoring_functions.py +++ b/machine_learning/scoring_functions.py @@ -14,7 +14,8 @@ and types of data """ -#Mean Absolute Error + +# Mean Absolute Error def mae(predict, actual): predict = np.array(predict) actual = np.array(actual) @@ -24,7 +25,8 @@ def mae(predict, actual): return score -#Mean Squared Error + +# Mean Squared Error def mse(predict, actual): predict = np.array(predict) actual = np.array(actual) @@ -35,7 +37,8 @@ def mse(predict, actual): score = square_diff.mean() return score -#Root Mean Squared Error + +# Root Mean Squared Error def rmse(predict, actual): predict = np.array(predict) actual = np.array(actual) @@ -46,13 +49,14 @@ def rmse(predict, actual): score = np.sqrt(mean_square_diff) return score -#Root Mean Square Logarithmic Error + +# Root Mean Square Logarithmic Error def rmsle(predict, actual): predict = np.array(predict) actual = np.array(actual) - log_predict = np.log(predict+1) - log_actual = np.log(actual+1) + log_predict = np.log(predict + 1) + log_actual = np.log(actual + 1) difference = log_predict - log_actual square_diff = np.square(difference) @@ -62,14 +66,15 @@ def rmsle(predict, actual): return score -#Mean Bias Deviation + +# Mean Bias Deviation def mbd(predict, actual): predict = np.array(predict) actual = np.array(actual) difference = predict - actual - numerator = np.sum(difference) / len(predict) - denumerator = np.sum(actual) / len(predict) + numerator = np.sum(difference) / len(predict) + denumerator = np.sum(actual) / len(predict) print(numerator) print(denumerator) diff --git a/maths/3n+1.py b/maths/3n+1.py index 6424fe0d8f15..af5cec728d1a 100644 --- a/maths/3n+1.py +++ b/maths/3n+1.py @@ -1,19 +1,21 @@ def main(): - def n31(a):# a = initial number + def n31(a): # a = initial number c = 0 l = [a] while a != 1: - if a % 2 == 0:#if even divide it by 2 + if a % 2 == 0: # if even divide it by 2 a = a // 2 - elif a % 2 == 1:#if odd 3n+1 - a = 3*a +1 - c += 1#counter + elif a % 2 == 1: # if odd 3n+1 + a = 3 * a + 1 + c += 1 # counter l += [a] - return l , c + return l, c + print(n31(43)) - print(n31(98)[0][-1])# = a - print("It took {0} steps.".format(n31(13)[1]))#optional finish + print(n31(98)[0][-1]) # = a + print("It took {0} steps.".format(n31(13)[1])) # optional finish + if __name__ == '__main__': main() diff --git a/maths/Binary_Exponentiation.py b/maths/Binary_Exponentiation.py index 2411cd58a76b..0b9d4560261a 100644 --- a/maths/Binary_Exponentiation.py +++ b/maths/Binary_Exponentiation.py @@ -1,25 +1,23 @@ -#Author : Junth Basnet -#Time Complexity : O(logn) +# Author : Junth Basnet +# Time Complexity : O(logn) def binary_exponentiation(a, n): - if (n == 0): return 1 - + elif (n % 2 == 1): return binary_exponentiation(a, n - 1) * a - + else: b = binary_exponentiation(a, n / 2) return b * b - + try: base = int(input('Enter Base : ')) power = int(input("Enter Power : ")) except ValueError: - print ("Invalid literal for integer") + print("Invalid literal for integer") result = binary_exponentiation(base, power) print("{}^({}) : {}".format(base, power, result)) - diff --git a/maths/Find_Max.py b/maths/Find_Max.py index 0ce49a68c348..b69872e7abc1 100644 --- a/maths/Find_Max.py +++ b/maths/Find_Max.py @@ -3,12 +3,14 @@ def find_max(nums): max = nums[0] for x in nums: - if x > max: - max = x + if x > max: + max = x print(max) + def main(): - find_max([2, 4, 9, 7, 19, 94, 5]) + find_max([2, 4, 9, 7, 19, 94, 5]) + if __name__ == '__main__': - main() + main() diff --git a/maths/Find_Min.py b/maths/Find_Min.py index 86207984e3da..f30e7a588d2a 100644 --- a/maths/Find_Min.py +++ b/maths/Find_Min.py @@ -6,7 +6,8 @@ def findMin(x): minNum = i return minNum - print(findMin([0,1,2,3,4,5,-3,24,-56])) # = -56 + print(findMin([0, 1, 2, 3, 4, 5, -3, 24, -56])) # = -56 + if __name__ == '__main__': main() diff --git a/maths/Hanoi.py b/maths/Hanoi.py index dd04d0fa58d8..40eafcc9709e 100644 --- a/maths/Hanoi.py +++ b/maths/Hanoi.py @@ -17,8 +17,8 @@ def Tower_Of_Hanoi(n, source, dest, by, mouvement): return mouvement else: - mouvement = mouvement + Tower_Of_Hanoi(n-1, source, by, dest, 0) + mouvement = mouvement + Tower_Of_Hanoi(n - 1, source, by, dest, 0) logging.debug('Move the plate from', source, 'to', dest) - mouvement = mouvement + 1 + Tower_Of_Hanoi(n-1, by, dest, source, 0) + mouvement = mouvement + 1 + Tower_Of_Hanoi(n - 1, by, dest, source, 0) return mouvement diff --git a/maths/Prime_Check.py b/maths/Prime_Check.py index 8c5c181689dd..84b5bf508586 100644 --- a/maths/Prime_Check.py +++ b/maths/Prime_Check.py @@ -37,11 +37,11 @@ def test_primes(self): def test_not_primes(self): self.assertFalse(primeCheck(-19), - "Negative numbers are not prime.") + "Negative numbers are not prime.") self.assertFalse(primeCheck(0), - "Zero doesn't have any divider, primes must have two") + "Zero doesn't have any divider, primes must have two") self.assertFalse(primeCheck(1), - "One just have 1 divider, primes must have two.") + "One just have 1 divider, primes must have two.") self.assertFalse(primeCheck(2 * 2)) self.assertFalse(primeCheck(2 * 3)) self.assertFalse(primeCheck(3 * 3)) @@ -51,4 +51,3 @@ def test_not_primes(self): if __name__ == '__main__': unittest.main() - diff --git a/maths/abs.py b/maths/abs.py index 6d0596478d5f..ba7b704fb340 100644 --- a/maths/abs.py +++ b/maths/abs.py @@ -11,8 +11,10 @@ def absVal(num): else: return num + def main(): - print(absVal(-34)) # = 34 + print(absVal(-34)) # = 34 + if __name__ == '__main__': main() diff --git a/maths/abs_Max.py b/maths/abs_Max.py index 7ff9e4d3ca09..0f30a0f57d90 100644 --- a/maths/abs_Max.py +++ b/maths/abs_Max.py @@ -5,7 +5,7 @@ def absMax(x): >>absMax([3,-10,-2]) -10 """ - j =x[0] + j = x[0] for i in x: if abs(i) > abs(j): j = i @@ -13,8 +13,8 @@ def absMax(x): def main(): - a = [1,2,-11] - print(absMax(a)) # = -11 + a = [1, 2, -11] + print(absMax(a)) # = -11 if __name__ == '__main__': diff --git a/maths/abs_Min.py b/maths/abs_Min.py index 67d510551907..07e5f873646b 100644 --- a/maths/abs_Min.py +++ b/maths/abs_Min.py @@ -1,4 +1,6 @@ from Maths.abs import absVal + + def absMin(x): """ # >>>absMin([0,5,1,11]) @@ -12,9 +14,11 @@ def absMin(x): j = i return j + def main(): - a = [-3,-1,2,-11] + a = [-3, -1, 2, -11] print(absMin(a)) # = -1 + if __name__ == '__main__': - main() \ No newline at end of file + main() diff --git a/maths/average.py b/maths/average.py index dc70836b5e83..ac497904badd 100644 --- a/maths/average.py +++ b/maths/average.py @@ -2,13 +2,15 @@ def average(nums): sum = 0 n = 0 for x in nums: - sum += x - n += 1 + sum += x + n += 1 avg = sum / n print(avg) + def main(): - average([2, 4, 6, 8, 20, 50, 70]) + average([2, 4, 6, 8, 20, 50, 70]) + if __name__ == '__main__': - main() + main() diff --git a/maths/basic_maths.py b/maths/basic_maths.py index 6e8c919a001d..8361ecabf6b3 100644 --- a/maths/basic_maths.py +++ b/maths/basic_maths.py @@ -1,74 +1,78 @@ -import math - -def primeFactors(n): - pf = [] - while n % 2 == 0: - pf.append(2) - n = int(n / 2) - - for i in range(3, int(math.sqrt(n))+1, 2): - while n % i == 0: - pf.append(i) - n = int(n / i) - - if n > 2: - pf.append(n) - - return pf - -def numberOfDivisors(n): - div = 1 - - temp = 1 - while n % 2 == 0: - temp += 1 - n = int(n / 2) - div = div * (temp) - - for i in range(3, int(math.sqrt(n))+1, 2): - temp = 1 - while n % i == 0: - temp += 1 - n = int(n / i) - div = div * (temp) - - return div - -def sumOfDivisors(n): - s = 1 - - temp = 1 - while n % 2 == 0: - temp += 1 - n = int(n / 2) - if temp > 1: - s *= (2**temp - 1) / (2 - 1) - - for i in range(3, int(math.sqrt(n))+1, 2): - temp = 1 - while n % i == 0: - temp += 1 - n = int(n / i) - if temp > 1: - s *= (i**temp - 1) / (i - 1) - - return s - -def eulerPhi(n): - l = primeFactors(n) - l = set(l) - s = n - for x in l: - s *= (x - 1)/x - return s - -def main(): - print(primeFactors(100)) - print(numberOfDivisors(100)) - print(sumOfDivisors(100)) - print(eulerPhi(100)) - -if __name__ == '__main__': - main() - - \ No newline at end of file +import math + + +def primeFactors(n): + pf = [] + while n % 2 == 0: + pf.append(2) + n = int(n / 2) + + for i in range(3, int(math.sqrt(n)) + 1, 2): + while n % i == 0: + pf.append(i) + n = int(n / i) + + if n > 2: + pf.append(n) + + return pf + + +def numberOfDivisors(n): + div = 1 + + temp = 1 + while n % 2 == 0: + temp += 1 + n = int(n / 2) + div = div * (temp) + + for i in range(3, int(math.sqrt(n)) + 1, 2): + temp = 1 + while n % i == 0: + temp += 1 + n = int(n / i) + div = div * (temp) + + return div + + +def sumOfDivisors(n): + s = 1 + + temp = 1 + while n % 2 == 0: + temp += 1 + n = int(n / 2) + if temp > 1: + s *= (2 ** temp - 1) / (2 - 1) + + for i in range(3, int(math.sqrt(n)) + 1, 2): + temp = 1 + while n % i == 0: + temp += 1 + n = int(n / i) + if temp > 1: + s *= (i ** temp - 1) / (i - 1) + + return s + + +def eulerPhi(n): + l = primeFactors(n) + l = set(l) + s = n + for x in l: + s *= (x - 1) / x + return s + + +def main(): + print(primeFactors(100)) + print(numberOfDivisors(100)) + print(sumOfDivisors(100)) + print(eulerPhi(100)) + + +if __name__ == '__main__': + main() diff --git a/maths/extended_euclidean_algorithm.py b/maths/extended_euclidean_algorithm.py index f5a3cc88e474..e77f161fe7c1 100644 --- a/maths/extended_euclidean_algorithm.py +++ b/maths/extended_euclidean_algorithm.py @@ -6,15 +6,22 @@ import sys + # Finds 2 numbers a and b such that it satisfies # the equation am + bn = gcd(m, n) (a.k.a Bezout's Identity) def extended_euclidean_algorithm(m, n): - a = 0; aprime = 1; b = 1; bprime = 0 - q = 0; r = 0 + a = 0; + aprime = 1; + b = 1; + bprime = 0 + q = 0; + r = 0 if m > n: - c = m; d = n + c = m; + d = n else: - c = n; d = m + c = n; + d = m while True: q = int(c / d) @@ -26,19 +33,20 @@ def extended_euclidean_algorithm(m, n): t = aprime aprime = a - a = t - q*a + a = t - q * a t = bprime bprime = b - b = t - q*b + b = t - q * b pair = None if m > n: - pair = (a,b) + pair = (a, b) else: - pair = (b,a) + pair = (b, a) return pair + def main(): if len(sys.argv) < 3: print('2 integer arguments required') @@ -47,5 +55,6 @@ def main(): n = int(sys.argv[2]) print(extended_euclidean_algorithm(m, n)) + if __name__ == '__main__': main() diff --git a/maths/factorial_python.py b/maths/factorial_python.py index 376983e08dab..62d8ee43f354 100644 --- a/maths/factorial_python.py +++ b/maths/factorial_python.py @@ -4,16 +4,16 @@ num = 10 # uncomment to take input from the user -#num = int(input("Enter a number: ")) +# num = int(input("Enter a number: ")) factorial = 1 # check if the number is negative, positive or zero if num < 0: - print("Sorry, factorial does not exist for negative numbers") + print("Sorry, factorial does not exist for negative numbers") elif num == 0: - print("The factorial of 0 is 1") + print("The factorial of 0 is 1") else: - for i in range(1,num + 1): - factorial = factorial*i - print("The factorial of",num,"is",factorial) + for i in range(1, num + 1): + factorial = factorial * i + print("The factorial of", num, "is", factorial) diff --git a/maths/factorial_recursive.py b/maths/factorial_recursive.py index 41391a2718f6..97eb25ea745d 100644 --- a/maths/factorial_recursive.py +++ b/maths/factorial_recursive.py @@ -1,13 +1,14 @@ def fact(n): - """ - Return 1, if n is 1 or below, - otherwise, return n * fact(n-1). - """ - return 1 if n <= 1 else n * fact(n-1) + """ + Return 1, if n is 1 or below, + otherwise, return n * fact(n-1). + """ + return 1 if n <= 1 else n * fact(n - 1) + """ Shown factorial for i, where i ranges from 1 to 20. """ -for i in range(1,21): - print(i, ": ", fact(i), sep='') +for i in range(1, 21): + print(i, ": ", fact(i), sep='') diff --git a/maths/fermat_little_theorem.py b/maths/fermat_little_theorem.py index 93af98684894..10d69ecf610e 100644 --- a/maths/fermat_little_theorem.py +++ b/maths/fermat_little_theorem.py @@ -5,13 +5,12 @@ def binary_exponentiation(a, n, mod): - if (n == 0): return 1 - + elif (n % 2 == 1): return (binary_exponentiation(a, n - 1, mod) * a) % mod - + else: b = binary_exponentiation(a, n / 2, mod) return (b * b) % mod diff --git a/maths/fibonacci.py b/maths/fibonacci.py index 0a0611f21379..de6ff5a5d7b9 100644 --- a/maths/fibonacci.py +++ b/maths/fibonacci.py @@ -1,120 +1,121 @@ -# fibonacci.py -""" -1. Calculates the iterative fibonacci sequence - -2. Calculates the fibonacci sequence with a formula - an = [ Phin - (phi)n ]/Sqrt[5] - reference-->Su, Francis E., et al. "Fibonacci Number Formula." Math Fun Facts. -""" -import math -import functools -import time -from decimal import getcontext, Decimal - -getcontext().prec = 100 - - -def timer_decorator(func): - @functools.wraps(func) - def timer_wrapper(*args, **kwargs): - start = time.time() - func(*args, **kwargs) - end = time.time() - if int(end - start) > 0: - print(f'Run time for {func.__name__}: {(end - start):0.2f}s') - else: - print(f'Run time for {func.__name__}: {(end - start)*1000:0.2f}ms') - return func(*args, **kwargs) - return timer_wrapper - - -# define Python user-defined exceptions -class Error(Exception): - """Base class for other exceptions""" - - -class ValueTooLargeError(Error): - """Raised when the input value is too large""" - - -class ValueTooSmallError(Error): - """Raised when the input value is not greater than one""" - - -class ValueLessThanZero(Error): - """Raised when the input value is less than zero""" - - -def _check_number_input(n, min_thresh, max_thresh=None): - """ - :param n: single integer - :type n: int - :param min_thresh: min threshold, single integer - :type min_thresh: int - :param max_thresh: max threshold, single integer - :type max_thresh: int - :return: boolean - """ - try: - if n >= min_thresh and max_thresh is None: - return True - elif min_thresh <= n <= max_thresh: - return True - elif n < 0: - raise ValueLessThanZero - elif n < min_thresh: - raise ValueTooSmallError - elif n > max_thresh: - raise ValueTooLargeError - except ValueLessThanZero: - print("Incorrect Input: number must not be less than 0") - except ValueTooSmallError: - print(f'Incorrect Input: input number must be > {min_thresh} for the recursive calculation') - except ValueTooLargeError: - print(f'Incorrect Input: input number must be < {max_thresh} for the recursive calculation') - return False - - -@timer_decorator -def fib_iterative(n): - """ - :param n: calculate Fibonacci to the nth integer - :type n:int - :return: Fibonacci sequence as a list - """ - n = int(n) - if _check_number_input(n, 2): - seq_out = [0, 1] - a, b = 0, 1 - for _ in range(n-len(seq_out)): - a, b = b, a+b - seq_out.append(b) - return seq_out - - -@timer_decorator -def fib_formula(n): - """ - :param n: calculate Fibonacci to the nth integer - :type n:int - :return: Fibonacci sequence as a list - """ - seq_out = [0, 1] - n = int(n) - if _check_number_input(n, 2, 1000000): - sqrt = Decimal(math.sqrt(5)) - phi_1 = Decimal(1 + sqrt) / Decimal(2) - phi_2 = Decimal(1 - sqrt) / Decimal(2) - for i in range(2, n): - temp_out = ((phi_1**Decimal(i)) - (phi_2**Decimal(i))) * (Decimal(sqrt) ** Decimal(-1)) - seq_out.append(int(temp_out)) - return seq_out - - -if __name__ == '__main__': - num = 20 - # print(f'{fib_recursive(num)}\n') - # print(f'{fib_iterative(num)}\n') - # print(f'{fib_formula(num)}\n') - fib_iterative(num) - fib_formula(num) +# fibonacci.py +""" +1. Calculates the iterative fibonacci sequence + +2. Calculates the fibonacci sequence with a formula + an = [ Phin - (phi)n ]/Sqrt[5] + reference-->Su, Francis E., et al. "Fibonacci Number Formula." Math Fun Facts. +""" +import functools +import math +import time +from decimal import getcontext, Decimal + +getcontext().prec = 100 + + +def timer_decorator(func): + @functools.wraps(func) + def timer_wrapper(*args, **kwargs): + start = time.time() + func(*args, **kwargs) + end = time.time() + if int(end - start) > 0: + print(f'Run time for {func.__name__}: {(end - start):0.2f}s') + else: + print(f'Run time for {func.__name__}: {(end - start) * 1000:0.2f}ms') + return func(*args, **kwargs) + + return timer_wrapper + + +# define Python user-defined exceptions +class Error(Exception): + """Base class for other exceptions""" + + +class ValueTooLargeError(Error): + """Raised when the input value is too large""" + + +class ValueTooSmallError(Error): + """Raised when the input value is not greater than one""" + + +class ValueLessThanZero(Error): + """Raised when the input value is less than zero""" + + +def _check_number_input(n, min_thresh, max_thresh=None): + """ + :param n: single integer + :type n: int + :param min_thresh: min threshold, single integer + :type min_thresh: int + :param max_thresh: max threshold, single integer + :type max_thresh: int + :return: boolean + """ + try: + if n >= min_thresh and max_thresh is None: + return True + elif min_thresh <= n <= max_thresh: + return True + elif n < 0: + raise ValueLessThanZero + elif n < min_thresh: + raise ValueTooSmallError + elif n > max_thresh: + raise ValueTooLargeError + except ValueLessThanZero: + print("Incorrect Input: number must not be less than 0") + except ValueTooSmallError: + print(f'Incorrect Input: input number must be > {min_thresh} for the recursive calculation') + except ValueTooLargeError: + print(f'Incorrect Input: input number must be < {max_thresh} for the recursive calculation') + return False + + +@timer_decorator +def fib_iterative(n): + """ + :param n: calculate Fibonacci to the nth integer + :type n:int + :return: Fibonacci sequence as a list + """ + n = int(n) + if _check_number_input(n, 2): + seq_out = [0, 1] + a, b = 0, 1 + for _ in range(n - len(seq_out)): + a, b = b, a + b + seq_out.append(b) + return seq_out + + +@timer_decorator +def fib_formula(n): + """ + :param n: calculate Fibonacci to the nth integer + :type n:int + :return: Fibonacci sequence as a list + """ + seq_out = [0, 1] + n = int(n) + if _check_number_input(n, 2, 1000000): + sqrt = Decimal(math.sqrt(5)) + phi_1 = Decimal(1 + sqrt) / Decimal(2) + phi_2 = Decimal(1 - sqrt) / Decimal(2) + for i in range(2, n): + temp_out = ((phi_1 ** Decimal(i)) - (phi_2 ** Decimal(i))) * (Decimal(sqrt) ** Decimal(-1)) + seq_out.append(int(temp_out)) + return seq_out + + +if __name__ == '__main__': + num = 20 + # print(f'{fib_recursive(num)}\n') + # print(f'{fib_iterative(num)}\n') + # print(f'{fib_formula(num)}\n') + fib_iterative(num) + fib_formula(num) diff --git a/maths/fibonacci_sequence_recursion.py b/maths/fibonacci_sequence_recursion.py index 9190e7fc7a40..c841167699c9 100644 --- a/maths/fibonacci_sequence_recursion.py +++ b/maths/fibonacci_sequence_recursion.py @@ -4,11 +4,13 @@ def recur_fibo(n): if n <= 1: return n else: - (recur_fibo(n-1) + recur_fibo(n-2)) + (recur_fibo(n - 1) + recur_fibo(n - 2)) + def isPositiveInteger(limit): return limit >= 0 + def main(): limit = int(input("How many terms to include in fibonacci series: ")) if isPositiveInteger(limit): @@ -17,5 +19,6 @@ def main(): else: print("Please enter a positive integer: ") + if __name__ == '__main__': main() diff --git a/maths/greater_common_divisor.py b/maths/greater_common_divisor.py index 15adaca1fb8d..343ab7963619 100644 --- a/maths/greater_common_divisor.py +++ b/maths/greater_common_divisor.py @@ -2,14 +2,16 @@ def gcd(a, b): return b if a == 0 else gcd(b % a, a) + def main(): try: nums = input("Enter two Integers separated by comma (,): ").split(',') - num1 = int(nums[0]); num2 = int(nums[1]) + num1 = int(nums[0]); + num2 = int(nums[1]) except (IndexError, UnboundLocalError, ValueError): print("Wrong Input") print(f"gcd({num1}, {num2}) = {gcd(num1, num2)}") + if __name__ == '__main__': main() - diff --git a/maths/lucasSeries.py b/maths/lucasSeries.py index 91ea1ba72a56..c9adc10f0986 100644 --- a/maths/lucasSeries.py +++ b/maths/lucasSeries.py @@ -1,13 +1,14 @@ # Lucas Sequence Using Recursion def recur_luc(n): - if n == 1: - return n - if n == 0: - return 2 - return (recur_luc(n-1) + recur_luc(n-2)) - + if n == 1: + return n + if n == 0: + return 2 + return (recur_luc(n - 1) + recur_luc(n - 2)) + + limit = int(input("How many terms to include in Lucas series:")) print("Lucas series:") for i in range(limit): - print(recur_luc(i)) + print(recur_luc(i)) diff --git a/maths/modular_exponential.py b/maths/modular_exponential.py index b3f4c00bd5d8..5971951c17bc 100644 --- a/maths/modular_exponential.py +++ b/maths/modular_exponential.py @@ -1,20 +1,20 @@ def modularExponential(base, power, mod): - if power < 0: - return -1 - base %= mod - result = 1 + if power < 0: + return -1 + base %= mod + result = 1 - while power > 0: - if power & 1: - result = (result * base) % mod - power = power >> 1 - base = (base * base) % mod - return result + while power > 0: + if power & 1: + result = (result * base) % mod + power = power >> 1 + base = (base * base) % mod + return result def main(): - print(modularExponential(3, 200, 13)) + print(modularExponential(3, 200, 13)) if __name__ == '__main__': - main() + main() diff --git a/maths/newton_raphson.py b/maths/newton_raphson.py index c08bcedc9a4d..c77a0c83668b 100644 --- a/maths/newton_raphson.py +++ b/maths/newton_raphson.py @@ -10,23 +10,24 @@ import math as m + def calc_derivative(f, a, h=0.001): ''' Calculates derivative at point a for function f using finite difference method ''' - return (f(a+h)-f(a-h))/(2*h) + return (f(a + h) - f(a - h)) / (2 * h) + -def newton_raphson(f, x0=0, maxiter=100, step=0.0001, maxerror=1e-6,logsteps=False): - - a = x0 #set the initial guess +def newton_raphson(f, x0=0, maxiter=100, step=0.0001, maxerror=1e-6, logsteps=False): + a = x0 # set the initial guess steps = [a] error = abs(f(a)) - f1 = lambda x:calc_derivative(f, x, h=step) #Derivative of f(x) + f1 = lambda x: calc_derivative(f, x, h=step) # Derivative of f(x) for _ in range(maxiter): if f1(a) == 0: raise ValueError("No converging solution found") - a = a - f(a)/f1(a) #Calculate the next estimate + a = a - f(a) / f1(a) # Calculate the next estimate if logsteps: steps.append(a) error = abs(f(a)) @@ -35,16 +36,18 @@ def newton_raphson(f, x0=0, maxiter=100, step=0.0001, maxerror=1e-6,logsteps=Fal else: raise ValueError("Itheration limit reached, no converging solution found") if logsteps: - #If logstep is true, then log intermediate steps + # If logstep is true, then log intermediate steps return a, error, steps return a, error - + + if __name__ == '__main__': import matplotlib.pyplot as plt - f = lambda x:m.tanh(x)**2-m.exp(3*x) + + f = lambda x: m.tanh(x) ** 2 - m.exp(3 * x) solution, error, steps = newton_raphson(f, x0=10, maxiter=1000, step=1e-6, logsteps=True) plt.plot([abs(f(x)) for x in steps]) plt.xlabel("step") plt.ylabel("error") plt.show() - print("solution = {%f}, error = {%f}" % (solution, error)) \ No newline at end of file + print("solution = {%f}, error = {%f}" % (solution, error)) diff --git a/maths/segmented_sieve.py b/maths/segmented_sieve.py index 52ca6fbe601d..0d00acf3b99c 100644 --- a/maths/segmented_sieve.py +++ b/maths/segmented_sieve.py @@ -1,46 +1,48 @@ -import math - -def sieve(n): - in_prime = [] - start = 2 - end = int(math.sqrt(n)) # Size of every segment - temp = [True] * (end + 1) - prime = [] - - while(start <= end): - if temp[start] == True: - in_prime.append(start) - for i in range(start*start, end+1, start): - if temp[i] == True: - temp[i] = False - start += 1 - prime += in_prime - - low = end + 1 - high = low + end - 1 - if high > n: - high = n - - while(low <= n): - temp = [True] * (high-low+1) - for each in in_prime: - - t = math.floor(low / each) * each - if t < low: - t += each - - for j in range(t, high+1, each): - temp[j - low] = False - - for j in range(len(temp)): - if temp[j] == True: - prime.append(j+low) - - low = high + 1 - high = low + end - 1 - if high > n: - high = n - - return prime - -print(sieve(10**6)) \ No newline at end of file +import math + + +def sieve(n): + in_prime = [] + start = 2 + end = int(math.sqrt(n)) # Size of every segment + temp = [True] * (end + 1) + prime = [] + + while (start <= end): + if temp[start] == True: + in_prime.append(start) + for i in range(start * start, end + 1, start): + if temp[i] == True: + temp[i] = False + start += 1 + prime += in_prime + + low = end + 1 + high = low + end - 1 + if high > n: + high = n + + while (low <= n): + temp = [True] * (high - low + 1) + for each in in_prime: + + t = math.floor(low / each) * each + if t < low: + t += each + + for j in range(t, high + 1, each): + temp[j - low] = False + + for j in range(len(temp)): + if temp[j] == True: + prime.append(j + low) + + low = high + 1 + high = low + end - 1 + if high > n: + high = n + + return prime + + +print(sieve(10 ** 6)) diff --git a/maths/sieve_of_eratosthenes.py b/maths/sieve_of_eratosthenes.py index 26c17fa6ffec..5a510e8b9f10 100644 --- a/maths/sieve_of_eratosthenes.py +++ b/maths/sieve_of_eratosthenes.py @@ -1,24 +1,26 @@ -import math -n = int(input("Enter n: ")) - -def sieve(n): - l = [True] * (n+1) - prime = [] - start = 2 - end = int(math.sqrt(n)) - while(start <= end): - if l[start] == True: - prime.append(start) - for i in range(start*start, n+1, start): - if l[i] == True: - l[i] = False - start += 1 - - for j in range(end+1,n+1): - if l[j] == True: - prime.append(j) - - return prime - -print(sieve(n)) - +import math + +n = int(input("Enter n: ")) + + +def sieve(n): + l = [True] * (n + 1) + prime = [] + start = 2 + end = int(math.sqrt(n)) + while (start <= end): + if l[start] == True: + prime.append(start) + for i in range(start * start, n + 1, start): + if l[i] == True: + l[i] = False + start += 1 + + for j in range(end + 1, n + 1): + if l[j] == True: + prime.append(j) + + return prime + + +print(sieve(n)) diff --git a/maths/simpson_rule.py b/maths/simpson_rule.py index 091c86c17f1b..27dde18b0a6f 100644 --- a/maths/simpson_rule.py +++ b/maths/simpson_rule.py @@ -1,4 +1,3 @@ - ''' Numerical integration or quadrature for a smooth function f with known values at x_i @@ -12,38 +11,42 @@ def method_2(boundary, steps): -# "Simpson Rule" -# int(f) = delta_x/2 * (b-a)/3*(f1 + 4f2 + 2f_3 + ... + fn) - h = (boundary[1] - boundary[0]) / steps - a = boundary[0] - b = boundary[1] - x_i = makePoints(a,b,h) - y = 0.0 - y += (h/3.0)*f(a) - cnt = 2 - for i in x_i: - y += (h/3)*(4-2*(cnt%2))*f(i) - cnt += 1 - y += (h/3.0)*f(b) - return y - -def makePoints(a,b,h): - x = a + h - while x < (b-h): - yield x - x = x + h - -def f(x): #enter your function here - y = (x-0)*(x-0) - return y + # "Simpson Rule" + # int(f) = delta_x/2 * (b-a)/3*(f1 + 4f2 + 2f_3 + ... + fn) + h = (boundary[1] - boundary[0]) / steps + a = boundary[0] + b = boundary[1] + x_i = makePoints(a, b, h) + y = 0.0 + y += (h / 3.0) * f(a) + cnt = 2 + for i in x_i: + y += (h / 3) * (4 - 2 * (cnt % 2)) * f(i) + cnt += 1 + y += (h / 3.0) * f(b) + return y + + +def makePoints(a, b, h): + x = a + h + while x < (b - h): + yield x + x = x + h + + +def f(x): # enter your function here + y = (x - 0) * (x - 0) + return y + def main(): - a = 0.0 #Lower bound of integration - b = 1.0 #Upper bound of integration - steps = 10.0 #define number of steps or resolution - boundary = [a, b] #define boundary of integration - y = method_2(boundary, steps) - print('y = {0}'.format(y)) + a = 0.0 # Lower bound of integration + b = 1.0 # Upper bound of integration + steps = 10.0 # define number of steps or resolution + boundary = [a, b] # define boundary of integration + y = method_2(boundary, steps) + print('y = {0}'.format(y)) + if __name__ == '__main__': - main() + main() diff --git a/maths/tests/__init__.py b/maths/tests/__init__.py index 2c4a6048556c..8b137891791f 100644 --- a/maths/tests/__init__.py +++ b/maths/tests/__init__.py @@ -1 +1 @@ -from .. import fibonacci + diff --git a/maths/tests/test_fibonacci.py b/maths/tests/test_fibonacci.py index 7d36c755e346..4254698b3c6d 100644 --- a/maths/tests/test_fibonacci.py +++ b/maths/tests/test_fibonacci.py @@ -1,34 +1,34 @@ -""" -To run with slash: -1. run pip install slash (may need to install C++ builds from Visual Studio website) -2. In the command prompt navigate to your project folder -3. then type--> slash run -vv -k tags:fibonacci .. - -vv indicates the level of verbosity (how much stuff you want the test to spit out after running) - -k is a way to select the tests you want to run. This becomes much more important in large scale projects. -""" - -import slash -from .. import fibonacci - -default_fib = [0, 1, 1, 2, 3, 5, 8] - - -@slash.tag('fibonacci') -@slash.parametrize(('n', 'seq'), [(2, [0, 1]), (3, [0, 1, 1]), (9, [0, 1, 1, 2, 3, 5, 8, 13, 21])]) -def test_different_sequence_lengths(n, seq): - """Test output of varying fibonacci sequence lengths""" - iterative = fibonacci.fib_iterative(n) - formula = fibonacci.fib_formula(n) - assert iterative == seq - assert formula == seq - - -@slash.tag('fibonacci') -@slash.parametrize('n', [7.3, 7.8, 7.0]) -def test_float_input_iterative(n): - """Test when user enters a float value""" - iterative = fibonacci.fib_iterative(n) - formula = fibonacci.fib_formula(n) - assert iterative == default_fib - assert formula == default_fib - +""" +To run with slash: +1. run pip install slash (may need to install C++ builds from Visual Studio website) +2. In the command prompt navigate to your project folder +3. then type--> slash run -vv -k tags:fibonacci .. + -vv indicates the level of verbosity (how much stuff you want the test to spit out after running) + -k is a way to select the tests you want to run. This becomes much more important in large scale projects. +""" + +import slash + +from .. import fibonacci + +default_fib = [0, 1, 1, 2, 3, 5, 8] + + +@slash.tag('fibonacci') +@slash.parametrize(('n', 'seq'), [(2, [0, 1]), (3, [0, 1, 1]), (9, [0, 1, 1, 2, 3, 5, 8, 13, 21])]) +def test_different_sequence_lengths(n, seq): + """Test output of varying fibonacci sequence lengths""" + iterative = fibonacci.fib_iterative(n) + formula = fibonacci.fib_formula(n) + assert iterative == seq + assert formula == seq + + +@slash.tag('fibonacci') +@slash.parametrize('n', [7.3, 7.8, 7.0]) +def test_float_input_iterative(n): + """Test when user enters a float value""" + iterative = fibonacci.fib_iterative(n) + formula = fibonacci.fib_formula(n) + assert iterative == default_fib + assert formula == default_fib diff --git a/maths/trapezoidal_rule.py b/maths/trapezoidal_rule.py index 52310c1ed3b0..a1d1668532c8 100644 --- a/maths/trapezoidal_rule.py +++ b/maths/trapezoidal_rule.py @@ -9,38 +9,43 @@ ''' from __future__ import print_function + def method_1(boundary, steps): -# "extended trapezoidal rule" -# int(f) = dx/2 * (f1 + 2f2 + ... + fn) - h = (boundary[1] - boundary[0]) / steps - a = boundary[0] - b = boundary[1] - x_i = makePoints(a,b,h) - y = 0.0 - y += (h/2.0)*f(a) - for i in x_i: - #print(i) - y += h*f(i) - y += (h/2.0)*f(b) - return y - -def makePoints(a,b,h): - x = a + h - while x < (b-h): - yield x - x = x + h - -def f(x): #enter your function here - y = (x-0)*(x-0) - return y + # "extended trapezoidal rule" + # int(f) = dx/2 * (f1 + 2f2 + ... + fn) + h = (boundary[1] - boundary[0]) / steps + a = boundary[0] + b = boundary[1] + x_i = makePoints(a, b, h) + y = 0.0 + y += (h / 2.0) * f(a) + for i in x_i: + # print(i) + y += h * f(i) + y += (h / 2.0) * f(b) + return y + + +def makePoints(a, b, h): + x = a + h + while x < (b - h): + yield x + x = x + h + + +def f(x): # enter your function here + y = (x - 0) * (x - 0) + return y + def main(): - a = 0.0 #Lower bound of integration - b = 1.0 #Upper bound of integration - steps = 10.0 #define number of steps or resolution - boundary = [a, b] #define boundary of integration - y = method_1(boundary, steps) - print('y = {0}'.format(y)) + a = 0.0 # Lower bound of integration + b = 1.0 # Upper bound of integration + steps = 10.0 # define number of steps or resolution + boundary = [a, b] # define boundary of integration + y = method_1(boundary, steps) + print('y = {0}'.format(y)) + if __name__ == '__main__': - main() + main() diff --git a/matrix/matrix_multiplication_addition.py b/matrix/matrix_multiplication_addition.py index dd50db729e43..7cf0304f8db0 100644 --- a/matrix/matrix_multiplication_addition.py +++ b/matrix/matrix_multiplication_addition.py @@ -10,9 +10,11 @@ def add(matrix_a, matrix_b): matrix_c.append(list_1) return matrix_c -def scalarMultiply(matrix , n): + +def scalarMultiply(matrix, n): return [[x * n for x in row] for row in matrix] + def multiply(matrix_a, matrix_b): matrix_c = [] n = len(matrix_a) @@ -26,25 +28,30 @@ def multiply(matrix_a, matrix_b): matrix_c.append(list_1) return matrix_c + def identity(n): - return [[int(row == column) for column in range(n)] for row in range(n)] + return [[int(row == column) for column in range(n)] for row in range(n)] + def transpose(matrix): - return map(list , zip(*matrix)) + return map(list, zip(*matrix)) + def minor(matrix, row, column): minor = matrix[:row] + matrix[row + 1:] minor = [row[:column] + row[column + 1:] for row in minor] return minor + def determinant(matrix): if len(matrix) == 1: return matrix[0][0] - + res = 0 for x in range(len(matrix)): - res += matrix[0][x] * determinant(minor(matrix , 0 , x)) * (-1) ** x + res += matrix[0][x] * determinant(minor(matrix, 0, x)) * (-1) ** x return res + def inverse(matrix): det = determinant(matrix) if det == 0: return None @@ -52,11 +59,12 @@ def inverse(matrix): matrixMinor = [[] for _ in range(len(matrix))] for i in range(len(matrix)): for j in range(len(matrix)): - matrixMinor[i].append(determinant(minor(matrix , i , j))) - + matrixMinor[i].append(determinant(minor(matrix, i, j))) + cofactors = [[x * (-1) ** (row + col) for col, x in enumerate(matrixMinor[row])] for row in range(len(matrix))] adjugate = transpose(cofactors) - return scalarMultiply(adjugate , 1/det) + return scalarMultiply(adjugate, 1 / det) + def main(): matrix_a = [[12, 10], [3, 9]] @@ -67,9 +75,10 @@ def main(): print(add(matrix_a, matrix_b)) print(multiply(matrix_a, matrix_b)) print(identity(5)) - print(minor(matrix_c , 1 , 2)) + print(minor(matrix_c, 1, 2)) print(determinant(matrix_b)) print(inverse(matrix_d)) + if __name__ == '__main__': main() diff --git a/networking_flow/ford_fulkerson.py b/networking_flow/ford_fulkerson.py index d51f1f0661b3..a92af22677dd 100644 --- a/networking_flow/ford_fulkerson.py +++ b/networking_flow/ford_fulkerson.py @@ -4,14 +4,15 @@ (1) Start with initial flow as 0; (2) Choose augmenting path from source to sink and add path to flow; """ - + + def BFS(graph, s, t, parent): # Return True if there is node that has not iterated. - visited = [False]*len(graph) - queue=[] + visited = [False] * len(graph) + queue = [] queue.append(s) visited[s] = True - + while queue: u = queue.pop(0) for ind in range(len(graph[u])): @@ -21,36 +22,38 @@ def BFS(graph, s, t, parent): parent[ind] = u return True if visited[t] else False - + + def FordFulkerson(graph, source, sink): # This array is filled by BFS and to store path - parent = [-1]*(len(graph)) - max_flow = 0 - while BFS(graph, source, sink, parent) : + parent = [-1] * (len(graph)) + max_flow = 0 + while BFS(graph, source, sink, parent): path_flow = float("Inf") s = sink - while(s != source): + while (s != source): # Find the minimum value in select path - path_flow = min (path_flow, graph[parent[s]][s]) + path_flow = min(path_flow, graph[parent[s]][s]) s = parent[s] - max_flow += path_flow + max_flow += path_flow v = sink - while(v != source): + while (v != source): u = parent[v] graph[u][v] -= path_flow graph[v][u] += path_flow v = parent[v] return max_flow + graph = [[0, 16, 13, 0, 0, 0], - [0, 0, 10 ,12, 0, 0], + [0, 0, 10, 12, 0, 0], [0, 4, 0, 0, 14, 0], [0, 0, 9, 0, 0, 20], [0, 0, 0, 7, 0, 4], [0, 0, 0, 0, 0, 0]] source, sink = 0, 5 -print(FordFulkerson(graph, source, sink)) \ No newline at end of file +print(FordFulkerson(graph, source, sink)) diff --git a/networking_flow/minimum_cut.py b/networking_flow/minimum_cut.py index 8ad6e03b00c6..f1f995c0459d 100644 --- a/networking_flow/minimum_cut.py +++ b/networking_flow/minimum_cut.py @@ -1,12 +1,12 @@ # Minimum cut on Ford_Fulkerson algorithm. - + def BFS(graph, s, t, parent): # Return True if there is node that has not iterated. - visited = [False]*len(graph) - queue=[] + visited = [False] * len(graph) + queue = [] queue.append(s) visited[s] = True - + while queue: u = queue.pop(0) for ind in range(len(graph[u])): @@ -16,26 +16,27 @@ def BFS(graph, s, t, parent): parent[ind] = u return True if visited[t] else False - + + def mincut(graph, source, sink): # This array is filled by BFS and to store path - parent = [-1]*(len(graph)) - max_flow = 0 + parent = [-1] * (len(graph)) + max_flow = 0 res = [] - temp = [i[:] for i in graph] # Record orignial cut, copy. - while BFS(graph, source, sink, parent) : + temp = [i[:] for i in graph] # Record orignial cut, copy. + while BFS(graph, source, sink, parent): path_flow = float("Inf") s = sink - while(s != source): + while (s != source): # Find the minimum value in select path - path_flow = min (path_flow, graph[parent[s]][s]) + path_flow = min(path_flow, graph[parent[s]][s]) s = parent[s] - max_flow += path_flow + max_flow += path_flow v = sink - - while(v != source): + + while (v != source): u = parent[v] graph[u][v] -= path_flow graph[v][u] += path_flow @@ -44,16 +45,17 @@ def mincut(graph, source, sink): for i in range(len(graph)): for j in range(len(graph[0])): if graph[i][j] == 0 and temp[i][j] > 0: - res.append((i,j)) + res.append((i, j)) return res + graph = [[0, 16, 13, 0, 0, 0], - [0, 0, 10 ,12, 0, 0], + [0, 0, 10, 12, 0, 0], [0, 4, 0, 0, 14, 0], [0, 0, 9, 0, 0, 20], [0, 0, 0, 7, 0, 4], [0, 0, 0, 0, 0, 0]] source, sink = 0, 5 -print(mincut(graph, source, sink)) \ No newline at end of file +print(mincut(graph, source, sink)) diff --git a/neural_network/bpnn.py b/neural_network/bpnn.py index 92deaee19c6e..da017fa14ab0 100644 --- a/neural_network/bpnn.py +++ b/neural_network/bpnn.py @@ -19,18 +19,20 @@ ''' -import numpy as np import matplotlib.pyplot as plt +import numpy as np def sigmoid(x): return 1 / (1 + np.exp(-1 * x)) + class DenseLayer(): ''' Layers of BP neural network ''' - def __init__(self,units,activation=None,learning_rate=None,is_input_layer=False): + + def __init__(self, units, activation=None, learning_rate=None, is_input_layer=False): ''' common connected layer of bp network :param units: numbers of neural units @@ -47,21 +49,21 @@ def __init__(self,units,activation=None,learning_rate=None,is_input_layer=False) self.learn_rate = learning_rate self.is_input_layer = is_input_layer - def initializer(self,back_units): - self.weight = np.asmatrix(np.random.normal(0,0.5,(self.units,back_units))) - self.bias = np.asmatrix(np.random.normal(0,0.5,self.units)).T + def initializer(self, back_units): + self.weight = np.asmatrix(np.random.normal(0, 0.5, (self.units, back_units))) + self.bias = np.asmatrix(np.random.normal(0, 0.5, self.units)).T if self.activation is None: self.activation = sigmoid def cal_gradient(self): if self.activation == sigmoid: - gradient_mat = np.dot(self.output ,(1- self.output).T) + gradient_mat = np.dot(self.output, (1 - self.output).T) gradient_activation = np.diag(np.diag(gradient_mat)) else: gradient_activation = 1 return gradient_activation - def forward_propagation(self,xdata): + def forward_propagation(self, xdata): self.xdata = xdata if self.is_input_layer: # input layer @@ -69,22 +71,22 @@ def forward_propagation(self,xdata): self.output = xdata return xdata else: - self.wx_plus_b = np.dot(self.weight,self.xdata) - self.bias + self.wx_plus_b = np.dot(self.weight, self.xdata) - self.bias self.output = self.activation(self.wx_plus_b) return self.output - def back_propagation(self,gradient): + def back_propagation(self, gradient): - gradient_activation = self.cal_gradient() # i * i 维 - gradient = np.asmatrix(np.dot(gradient.T,gradient_activation)) + gradient_activation = self.cal_gradient() # i * i 维 + gradient = np.asmatrix(np.dot(gradient.T, gradient_activation)) self._gradient_weight = np.asmatrix(self.xdata) self._gradient_bias = -1 self._gradient_x = self.weight - self.gradient_weight = np.dot(gradient.T,self._gradient_weight.T) + self.gradient_weight = np.dot(gradient.T, self._gradient_weight.T) self.gradient_bias = gradient * self._gradient_bias - self.gradient = np.dot(gradient,self._gradient_x).T + self.gradient = np.dot(gradient, self._gradient_x).T # ----------------------upgrade # -----------the Negative gradient direction -------- self.weight = self.weight - self.learn_rate * self.gradient_weight @@ -97,29 +99,30 @@ class BPNN(): ''' Back Propagation Neural Network model ''' + def __init__(self): self.layers = [] self.train_mse = [] self.fig_loss = plt.figure() - self.ax_loss = self.fig_loss.add_subplot(1,1,1) + self.ax_loss = self.fig_loss.add_subplot(1, 1, 1) - def add_layer(self,layer): + def add_layer(self, layer): self.layers.append(layer) def build(self): - for i,layer in enumerate(self.layers[:]): + for i, layer in enumerate(self.layers[:]): if i < 1: layer.is_input_layer = True else: - layer.initializer(self.layers[i-1].units) + layer.initializer(self.layers[i - 1].units) def summary(self): - for i,layer in enumerate(self.layers[:]): - print('------- layer %d -------'%i) - print('weight.shape ',np.shape(layer.weight)) - print('bias.shape ',np.shape(layer.bias)) + for i, layer in enumerate(self.layers[:]): + print('------- layer %d -------' % i) + print('weight.shape ', np.shape(layer.weight)) + print('bias.shape ', np.shape(layer.bias)) - def train(self,xdata,ydata,train_round,accuracy): + def train(self, xdata, ydata, train_round, accuracy): self.train_round = train_round self.accuracy = accuracy @@ -129,8 +132,8 @@ def train(self,xdata,ydata,train_round,accuracy): for round_i in range(train_round): all_loss = 0 for row in range(x_shape[0]): - _xdata = np.asmatrix(xdata[row,:]).T - _ydata = np.asmatrix(ydata[row,:]).T + _xdata = np.asmatrix(xdata[row, :]).T + _ydata = np.asmatrix(ydata[row, :]).T # forward propagation for layer in self.layers: @@ -144,7 +147,7 @@ def train(self,xdata,ydata,train_round,accuracy): for layer in self.layers[:0:-1]: gradient = layer.back_propagation(gradient) - mse = all_loss/x_shape[0] + mse = all_loss / x_shape[0] self.train_mse.append(mse) self.plot_loss() @@ -153,11 +156,11 @@ def train(self,xdata,ydata,train_round,accuracy): print('----达到精度----') return mse - def cal_loss(self,ydata,ydata_): - self.loss = np.sum(np.power((ydata - ydata_),2)) + def cal_loss(self, ydata, ydata_): + self.loss = np.sum(np.power((ydata - ydata_), 2)) self.loss_gradient = 2 * (ydata_ - ydata) # vector (shape is the same as _ydata.shape) - return self.loss,self.loss_gradient + return self.loss, self.loss_gradient def plot_loss(self): if self.ax_loss.lines: @@ -170,14 +173,11 @@ def plot_loss(self): plt.pause(0.1) - - def example(): - - x = np.random.randn(10,10) - y = np.asarray([[0.8,0.4],[0.4,0.3],[0.34,0.45],[0.67,0.32], - [0.88,0.67],[0.78,0.77],[0.55,0.66],[0.55,0.43],[0.54,0.1], - [0.1,0.5]]) + x = np.random.randn(10, 10) + y = np.asarray([[0.8, 0.4], [0.4, 0.3], [0.34, 0.45], [0.67, 0.32], + [0.88, 0.67], [0.78, 0.77], [0.55, 0.66], [0.55, 0.43], [0.54, 0.1], + [0.1, 0.5]]) model = BPNN() model.add_layer(DenseLayer(10)) @@ -189,7 +189,8 @@ def example(): model.summary() - model.train(xdata=x,ydata=y,train_round=100,accuracy=0.01) + model.train(xdata=x, ydata=y, train_round=100, accuracy=0.01) + if __name__ == '__main__': example() diff --git a/neural_network/convolution_neural_network.py b/neural_network/convolution_neural_network.py index 0e72f0c0dca2..7df83aec88c2 100644 --- a/neural_network/convolution_neural_network.py +++ b/neural_network/convolution_neural_network.py @@ -1,4 +1,4 @@ -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- ''' - - - - - -- - - - - - - - - - - - - - - - - - - - - - - @@ -18,8 +18,10 @@ from __future__ import print_function import pickle -import numpy as np + import matplotlib.pyplot as plt +import numpy as np + class CNN(): @@ -41,42 +43,41 @@ def __init__(self, conv1_get, size_p1, bp_num1, bp_num2, bp_num3, rate_w=0.2, ra self.size_pooling1 = size_p1 self.rate_weight = rate_w self.rate_thre = rate_t - self.w_conv1 = [np.mat(-1*np.random.rand(self.conv1[0],self.conv1[0])+0.5) for i in range(self.conv1[1])] + self.w_conv1 = [np.mat(-1 * np.random.rand(self.conv1[0], self.conv1[0]) + 0.5) for i in range(self.conv1[1])] self.wkj = np.mat(-1 * np.random.rand(self.num_bp3, self.num_bp2) + 0.5) - self.vji = np.mat(-1*np.random.rand(self.num_bp2, self.num_bp1)+0.5) - self.thre_conv1 = -2*np.random.rand(self.conv1[1])+1 - self.thre_bp2 = -2*np.random.rand(self.num_bp2)+1 - self.thre_bp3 = -2*np.random.rand(self.num_bp3)+1 - + self.vji = np.mat(-1 * np.random.rand(self.num_bp2, self.num_bp1) + 0.5) + self.thre_conv1 = -2 * np.random.rand(self.conv1[1]) + 1 + self.thre_bp2 = -2 * np.random.rand(self.num_bp2) + 1 + self.thre_bp3 = -2 * np.random.rand(self.num_bp3) + 1 def save_model(self, save_path): - #save model dict with pickle - model_dic = {'num_bp1':self.num_bp1, - 'num_bp2':self.num_bp2, - 'num_bp3':self.num_bp3, - 'conv1':self.conv1, - 'step_conv1':self.step_conv1, - 'size_pooling1':self.size_pooling1, - 'rate_weight':self.rate_weight, - 'rate_thre':self.rate_thre, - 'w_conv1':self.w_conv1, - 'wkj':self.wkj, - 'vji':self.vji, - 'thre_conv1':self.thre_conv1, - 'thre_bp2':self.thre_bp2, - 'thre_bp3':self.thre_bp3} + # save model dict with pickle + model_dic = {'num_bp1': self.num_bp1, + 'num_bp2': self.num_bp2, + 'num_bp3': self.num_bp3, + 'conv1': self.conv1, + 'step_conv1': self.step_conv1, + 'size_pooling1': self.size_pooling1, + 'rate_weight': self.rate_weight, + 'rate_thre': self.rate_thre, + 'w_conv1': self.w_conv1, + 'wkj': self.wkj, + 'vji': self.vji, + 'thre_conv1': self.thre_conv1, + 'thre_bp2': self.thre_bp2, + 'thre_bp3': self.thre_bp3} with open(save_path, 'wb') as f: pickle.dump(model_dic, f) - print('Model saved: %s'% save_path) + print('Model saved: %s' % save_path) @classmethod def ReadModel(cls, model_path): - #read saved model + # read saved model with open(model_path, 'rb') as f: model_dic = pickle.load(f) - conv_get= model_dic.get('conv1') + conv_get = model_dic.get('conv1') conv_get.append(model_dic.get('step_conv1')) size_p1 = model_dic.get('size_pooling1') bp1 = model_dic.get('num_bp1') @@ -84,9 +85,9 @@ def ReadModel(cls, model_path): bp3 = model_dic.get('num_bp3') r_w = model_dic.get('rate_weight') r_t = model_dic.get('rate_thre') - #create model instance - conv_ins = CNN(conv_get,size_p1,bp1,bp2,bp3,r_w,r_t) - #modify model parameter + # create model instance + conv_ins = CNN(conv_get, size_p1, bp1, bp2, bp3, r_w, r_t) + # modify model parameter conv_ins.w_conv1 = model_dic.get('w_conv1') conv_ins.wkj = model_dic.get('wkj') conv_ins.vji = model_dic.get('vji') @@ -95,25 +96,24 @@ def ReadModel(cls, model_path): conv_ins.thre_bp3 = model_dic.get('thre_bp3') return conv_ins - def sig(self, x): - return 1 / (1 + np.exp(-1*x)) + return 1 / (1 + np.exp(-1 * x)) def do_round(self, x): return round(x, 3) def convolute(self, data, convs, w_convs, thre_convs, conv_step): - #convolution process + # convolution process size_conv = convs[0] - num_conv =convs[1] + num_conv = convs[1] size_data = np.shape(data)[0] - #get the data slice of original image data, data_focus + # get the data slice of original image data, data_focus data_focus = [] for i_focus in range(0, size_data - size_conv + 1, conv_step): for j_focus in range(0, size_data - size_conv + 1, conv_step): focus = data[i_focus:i_focus + size_conv, j_focus:j_focus + size_conv] data_focus.append(focus) - #caculate the feature map of every single kernel, and saved as list of matrix + # caculate the feature map of every single kernel, and saved as list of matrix data_featuremap = [] Size_FeatureMap = int((size_data - size_conv) / conv_step + 1) for i_map in range(num_conv): @@ -124,53 +124,53 @@ def convolute(self, data, convs, w_convs, thre_convs, conv_step): featuremap = np.asmatrix(featuremap).reshape(Size_FeatureMap, Size_FeatureMap) data_featuremap.append(featuremap) - #expanding the data slice to One dimenssion + # expanding the data slice to One dimenssion focus1_list = [] for each_focus in data_focus: focus1_list.extend(self.Expand_Mat(each_focus)) focus_list = np.asarray(focus1_list) - return focus_list,data_featuremap + return focus_list, data_featuremap def pooling(self, featuremaps, size_pooling, type='average_pool'): - #pooling process + # pooling process size_map = len(featuremaps[0]) - size_pooled = int(size_map/size_pooling) + size_pooled = int(size_map / size_pooling) featuremap_pooled = [] for i_map in range(len(featuremaps)): map = featuremaps[i_map] map_pooled = [] - for i_focus in range(0,size_map,size_pooling): + for i_focus in range(0, size_map, size_pooling): for j_focus in range(0, size_map, size_pooling): focus = map[i_focus:i_focus + size_pooling, j_focus:j_focus + size_pooling] if type == 'average_pool': - #average pooling + # average pooling map_pooled.append(np.average(focus)) elif type == 'max_pooling': - #max pooling + # max pooling map_pooled.append(np.max(focus)) - map_pooled = np.asmatrix(map_pooled).reshape(size_pooled,size_pooled) + map_pooled = np.asmatrix(map_pooled).reshape(size_pooled, size_pooled) featuremap_pooled.append(map_pooled) return featuremap_pooled def _expand(self, datas): - #expanding three dimension data to one dimension list + # expanding three dimension data to one dimension list data_expanded = [] for i in range(len(datas)): shapes = np.shape(datas[i]) - data_listed = datas[i].reshape(1,shapes[0]*shapes[1]) + data_listed = datas[i].reshape(1, shapes[0] * shapes[1]) data_listed = data_listed.getA().tolist()[0] data_expanded.extend(data_listed) data_expanded = np.asarray(data_expanded) return data_expanded def _expand_mat(self, data_mat): - #expanding matrix to one dimension list + # expanding matrix to one dimension list data_mat = np.asarray(data_mat) shapes = np.shape(data_mat) - data_expanded = data_mat.reshape(1,shapes[0]*shapes[1]) + data_expanded = data_mat.reshape(1, shapes[0] * shapes[1]) return data_expanded - def _calculate_gradient_from_pool(self, out_map, pd_pool,num_map, size_map, size_pooling): + def _calculate_gradient_from_pool(self, out_map, pd_pool, num_map, size_map, size_pooling): ''' calcluate the gradient from the data slice of pool layer pd_pool: list of matrix @@ -185,28 +185,28 @@ def _calculate_gradient_from_pool(self, out_map, pd_pool,num_map, size_map, size for j in range(0, size_map, size_pooling): pd_conv1[i:i + size_pooling, j:j + size_pooling] = pd_pool[i_pool] i_pool = i_pool + 1 - pd_conv2 = np.multiply(pd_conv1,np.multiply(out_map[i_map],(1-out_map[i_map]))) + pd_conv2 = np.multiply(pd_conv1, np.multiply(out_map[i_map], (1 - out_map[i_map]))) pd_all.append(pd_conv2) return pd_all - def train(self, patterns, datas_train, datas_teach, n_repeat, error_accuracy, draw_e = bool): - #model traning + def train(self, patterns, datas_train, datas_teach, n_repeat, error_accuracy, draw_e=bool): + # model traning print('----------------------Start Training-------------------------') - print((' - - Shape: Train_Data ',np.shape(datas_train))) - print((' - - Shape: Teach_Data ',np.shape(datas_teach))) + print((' - - Shape: Train_Data ', np.shape(datas_train))) + print((' - - Shape: Teach_Data ', np.shape(datas_teach))) rp = 0 all_mse = [] - mse = 10000 + mse = 10000 while rp < n_repeat and mse >= error_accuracy: alle = 0 - print('-------------Learning Time %d--------------'%rp) + print('-------------Learning Time %d--------------' % rp) for p in range(len(datas_train)): - #print('------------Learning Image: %d--------------'%p) + # print('------------Learning Image: %d--------------'%p) data_train = np.asmatrix(datas_train[p]) data_teach = np.asarray(datas_teach[p]) - data_focus1,data_conved1 = self.convolute(data_train,self.conv1,self.w_conv1, - self.thre_conv1,conv_step=self.step_conv1) - data_pooled1 = self.pooling(data_conved1,self.size_pooling1) + data_focus1, data_conved1 = self.convolute(data_train, self.conv1, self.w_conv1, + self.thre_conv1, conv_step=self.step_conv1) + data_pooled1 = self.pooling(data_conved1, self.size_pooling1) shape_featuremap1 = np.shape(data_conved1) ''' print(' -----original shape ', np.shape(data_train)) @@ -216,31 +216,31 @@ def train(self, patterns, datas_train, datas_teach, n_repeat, error_accuracy, dr data_bp_input = self._expand(data_pooled1) bp_out1 = data_bp_input - bp_net_j = np.dot(bp_out1,self.vji.T) - self.thre_bp2 + bp_net_j = np.dot(bp_out1, self.vji.T) - self.thre_bp2 bp_out2 = self.sig(bp_net_j) - bp_net_k = np.dot(bp_out2 ,self.wkj.T) - self.thre_bp3 + bp_net_k = np.dot(bp_out2, self.wkj.T) - self.thre_bp3 bp_out3 = self.sig(bp_net_k) - #--------------Model Leaning ------------------------ + # --------------Model Leaning ------------------------ # calcluate error and gradient--------------- pd_k_all = np.multiply((data_teach - bp_out3), np.multiply(bp_out3, (1 - bp_out3))) - pd_j_all = np.multiply(np.dot(pd_k_all,self.wkj), np.multiply(bp_out2, (1 - bp_out2))) - pd_i_all = np.dot(pd_j_all,self.vji) + pd_j_all = np.multiply(np.dot(pd_k_all, self.wkj), np.multiply(bp_out2, (1 - bp_out2))) + pd_i_all = np.dot(pd_j_all, self.vji) - pd_conv1_pooled = pd_i_all / (self.size_pooling1*self.size_pooling1) + pd_conv1_pooled = pd_i_all / (self.size_pooling1 * self.size_pooling1) pd_conv1_pooled = pd_conv1_pooled.T.getA().tolist() - pd_conv1_all = self._calculate_gradient_from_pool(data_conved1,pd_conv1_pooled,shape_featuremap1[0], - shape_featuremap1[1],self.size_pooling1) - #weight and threshold learning process--------- - #convolution layer + pd_conv1_all = self._calculate_gradient_from_pool(data_conved1, pd_conv1_pooled, shape_featuremap1[0], + shape_featuremap1[1], self.size_pooling1) + # weight and threshold learning process--------- + # convolution layer for k_conv in range(self.conv1[1]): pd_conv_list = self._expand_mat(pd_conv1_all[k_conv]) - delta_w = self.rate_weight * np.dot(pd_conv_list,data_focus1) + delta_w = self.rate_weight * np.dot(pd_conv_list, data_focus1) - self.w_conv1[k_conv] = self.w_conv1[k_conv] + delta_w.reshape((self.conv1[0],self.conv1[0])) + self.w_conv1[k_conv] = self.w_conv1[k_conv] + delta_w.reshape((self.conv1[0], self.conv1[0])) self.thre_conv1[k_conv] = self.thre_conv1[k_conv] - np.sum(pd_conv1_all[k_conv]) * self.rate_thre - #all connected layer + # all connected layer self.wkj = self.wkj + pd_k_all.T * bp_out2 * self.rate_weight self.vji = self.vji + pd_j_all.T * bp_out1 * self.rate_weight self.thre_bp3 = self.thre_bp3 - pd_k_all * self.rate_thre @@ -248,11 +248,12 @@ def train(self, patterns, datas_train, datas_teach, n_repeat, error_accuracy, dr # calculate the sum error of all single image errors = np.sum(abs((data_teach - bp_out3))) alle = alle + errors - #print(' ----Teach ',data_teach) - #print(' ----BP_output ',bp_out3) + # print(' ----Teach ',data_teach) + # print(' ----BP_output ',bp_out3) rp = rp + 1 - mse = alle/patterns + mse = alle / patterns all_mse.append(mse) + def draw_error(): yplot = [error_accuracy for i in range(int(n_repeat * 1.2))] plt.plot(all_mse, '+-') @@ -261,6 +262,7 @@ def draw_error(): plt.ylabel('All_mse') plt.grid(True, alpha=0.5) plt.show() + print('------------------Training Complished---------------------') print((' - - Training epoch: ', rp, ' - - Mse: %.6f' % mse)) if draw_e: @@ -268,14 +270,14 @@ def draw_error(): return mse def predict(self, datas_test): - #model predict + # model predict produce_out = [] print('-------------------Start Testing-------------------------') - print((' - - Shape: Test_Data ',np.shape(datas_test))) + print((' - - Shape: Test_Data ', np.shape(datas_test))) for p in range(len(datas_test)): data_test = np.asmatrix(datas_test[p]) data_focus1, data_conved1 = self.convolute(data_test, self.conv1, self.w_conv1, - self.thre_conv1, conv_step=self.step_conv1) + self.thre_conv1, conv_step=self.step_conv1) data_pooled1 = self.pooling(data_conved1, self.size_pooling1) data_bp_input = self._expand(data_pooled1) @@ -285,17 +287,17 @@ def predict(self, datas_test): bp_net_k = bp_out2 * self.wkj.T - self.thre_bp3 bp_out3 = self.sig(bp_net_k) produce_out.extend(bp_out3.getA().tolist()) - res = [list(map(self.do_round,each)) for each in produce_out] + res = [list(map(self.do_round, each)) for each in produce_out] return np.asarray(res) def convolution(self, data): - #return the data of image after convoluting process so we can check it out + # return the data of image after convoluting process so we can check it out data_test = np.asmatrix(data) data_focus1, data_conved1 = self.convolute(data_test, self.conv1, self.w_conv1, - self.thre_conv1, conv_step=self.step_conv1) + self.thre_conv1, conv_step=self.step_conv1) data_pooled1 = self.pooling(data_conved1, self.size_pooling1) - return data_conved1,data_pooled1 + return data_conved1, data_pooled1 if __name__ == '__main__': diff --git a/neural_network/perceptron.py b/neural_network/perceptron.py index eb8b04e855d3..6955e65ae559 100644 --- a/neural_network/perceptron.py +++ b/neural_network/perceptron.py @@ -30,7 +30,7 @@ def training(self): sample.insert(0, self.bias) for i in range(self.col_sample): - self.weight.append(random.random()) + self.weight.append(random.random()) self.weight.insert(0, self.bias) @@ -46,16 +46,15 @@ def training(self): if y != self.exit[i]: for j in range(self.col_sample + 1): - self.weight[j] = self.weight[j] + self.learn_rate * (self.exit[i] - y) * self.sample[i][j] erro = True - #print('Epoch: \n',epoch_count) + # print('Epoch: \n',epoch_count) epoch_count = epoch_count + 1 # if you want controle the epoch or just by erro if erro == False: - print(('\nEpoch:\n',epoch_count)) + print(('\nEpoch:\n', epoch_count)) print('------------------------\n') - #if epoch_count > self.epoch_number or not erro: + # if epoch_count > self.epoch_number or not erro: break def sort(self, sample): @@ -66,7 +65,7 @@ def sort(self, sample): y = self.sign(u) - if y == -1: + if y == -1: print(('Sample: ', sample)) print('classification: P1') else: @@ -113,7 +112,7 @@ def sign(self, u): exit = [-1, -1, -1, 1, 1, -1, 1, -1, 1, 1, -1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, 1, 1, 1, 1, -1, -1, 1, -1, 1] -network = Perceptron(sample=samples, exit = exit, learn_rate=0.01, epoch_number=1000, bias=-1) +network = Perceptron(sample=samples, exit=exit, learn_rate=0.01, epoch_number=1000, bias=-1) network.training() diff --git a/other/anagrams.py b/other/anagrams.py index 29b34fbdc5d3..e6e5b7f3dbbc 100644 --- a/other/anagrams.py +++ b/other/anagrams.py @@ -1,5 +1,9 @@ from __future__ import print_function -import collections, pprint, time, os + +import collections +import os +import pprint +import time start_time = time.time() print('creating word list...') @@ -7,16 +11,20 @@ with open(path[0] + '/words') as f: word_list = sorted(list(set([word.strip().lower() for word in f]))) + def signature(word): return ''.join(sorted(word)) + word_bysig = collections.defaultdict(list) for word in word_list: word_bysig[signature(word)].append(word) + def anagram(myword): return word_bysig[signature(myword)] + print('finding anagrams...') all_anagrams = {word: anagram(word) for word in word_list if len(anagram(word)) > 1} diff --git a/other/binary_exponentiation.py b/other/binary_exponentiation.py index 1a30fb8fd266..dd4e70e74129 100644 --- a/other/binary_exponentiation.py +++ b/other/binary_exponentiation.py @@ -14,7 +14,7 @@ def b_expo(a, b): res = 1 while b > 0: - if b&1: + if b & 1: res *= a a *= a @@ -26,14 +26,15 @@ def b_expo(a, b): def b_expo_mod(a, b, c): res = 1 while b > 0: - if b&1: - res = ((res%c) * (a%c)) % c + if b & 1: + res = ((res % c) * (a % c)) % c a *= a b >>= 1 return res + """ * Wondering how this method works ! * It's pretty simple. diff --git a/other/binary_exponentiation_2.py b/other/binary_exponentiation_2.py index 217a616c99fb..51ec4baf2598 100644 --- a/other/binary_exponentiation_2.py +++ b/other/binary_exponentiation_2.py @@ -14,7 +14,7 @@ def b_expo(a, b): res = 0 while b > 0: - if b&1: + if b & 1: res += a a += a @@ -26,8 +26,8 @@ def b_expo(a, b): def b_expo_mod(a, b, c): res = 0 while b > 0: - if b&1: - res = ((res%c) + (a%c)) % c + if b & 1: + res = ((res % c) + (a % c)) % c a += a b >>= 1 diff --git a/other/detecting_english_programmatically.py b/other/detecting_english_programmatically.py index 005fd3c10ca3..a41fa06acd39 100644 --- a/other/detecting_english_programmatically.py +++ b/other/detecting_english_programmatically.py @@ -3,6 +3,7 @@ UPPERLETTERS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' LETTERS_AND_SPACE = UPPERLETTERS + UPPERLETTERS.lower() + ' \t\n' + def loadDictionary(): path = os.path.split(os.path.realpath(__file__)) englishWords = {} @@ -11,8 +12,10 @@ def loadDictionary(): englishWords[word] = None return englishWords + ENGLISH_WORDS = loadDictionary() + def getEnglishCount(message): message = message.upper() message = removeNonLetters(message) @@ -28,6 +31,7 @@ def getEnglishCount(message): return float(matches) / len(possibleWords) + def removeNonLetters(message): lettersOnly = [] for symbol in message: @@ -35,7 +39,8 @@ def removeNonLetters(message): lettersOnly.append(symbol) return ''.join(lettersOnly) -def isEnglish(message, wordPercentage = 20, letterPercentage = 85): + +def isEnglish(message, wordPercentage=20, letterPercentage=85): """ >>> isEnglish('Hello World') True @@ -51,4 +56,5 @@ def isEnglish(message, wordPercentage = 20, letterPercentage = 85): import doctest + doctest.testmod() diff --git a/other/euclidean_gcd.py b/other/euclidean_gcd.py index 30853e172076..6fe269ecdc5e 100644 --- a/other/euclidean_gcd.py +++ b/other/euclidean_gcd.py @@ -1,4 +1,6 @@ from __future__ import print_function + + # https://en.wikipedia.org/wiki/Euclidean_algorithm def euclidean_gcd(a, b): @@ -8,6 +10,7 @@ def euclidean_gcd(a, b): a = t return a + def main(): print("GCD(3, 5) = " + str(euclidean_gcd(3, 5))) print("GCD(5, 3) = " + str(euclidean_gcd(5, 3))) @@ -15,5 +18,6 @@ def main(): print("GCD(3, 6) = " + str(euclidean_gcd(3, 6))) print("GCD(6, 3) = " + str(euclidean_gcd(6, 3))) + if __name__ == '__main__': main() diff --git a/other/finding_Primes.py b/other/finding_Primes.py index 035a14f4a335..055be67439fc 100644 --- a/other/finding_Primes.py +++ b/other/finding_Primes.py @@ -4,18 +4,19 @@ ''' from __future__ import print_function - from math import sqrt + + def SOE(n): - check = round(sqrt(n)) #Need not check for multiples past the square root of n - - sieve = [False if i <2 else True for i in range(n+1)] #Set every index to False except for index 0 and 1 - + check = round(sqrt(n)) # Need not check for multiples past the square root of n + + sieve = [False if i < 2 else True for i in range(n + 1)] # Set every index to False except for index 0 and 1 + for i in range(2, check): - if(sieve[i] == True): #If i is a prime - for j in range(i+i, n+1, i): #Step through the list in increments of i(the multiples of the prime) - sieve[j] = False #Sets every multiple of i to False - - for i in range(n+1): - if(sieve[i] == True): + if (sieve[i] == True): # If i is a prime + for j in range(i + i, n + 1, i): # Step through the list in increments of i(the multiples of the prime) + sieve[j] = False # Sets every multiple of i to False + + for i in range(n + 1): + if (sieve[i] == True): print(i, end=" ") diff --git a/other/fischer_yates_shuffle.py b/other/fischer_yates_shuffle.py index d87792f45558..9a6b9f76cc8e 100644 --- a/other/fischer_yates_shuffle.py +++ b/other/fischer_yates_shuffle.py @@ -7,16 +7,18 @@ """ import random + def FYshuffle(LIST): for i in range(len(LIST)): - a = random.randint(0, len(LIST)-1) - b = random.randint(0, len(LIST)-1) + a = random.randint(0, len(LIST) - 1) + b = random.randint(0, len(LIST) - 1) LIST[a], LIST[b] = LIST[b], LIST[a] return LIST + if __name__ == '__main__': - integers = [0,1,2,3,4,5,6,7] + integers = [0, 1, 2, 3, 4, 5, 6, 7] strings = ['python', 'says', 'hello', '!'] - print ('Fisher-Yates Shuffle:') - print ('List',integers, strings) - print ('FY Shuffle',FYshuffle(integers), FYshuffle(strings)) + print('Fisher-Yates Shuffle:') + print('List', integers, strings) + print('FY Shuffle', FYshuffle(integers), FYshuffle(strings)) diff --git a/other/frequency_finder.py b/other/frequency_finder.py index 6264b25bf303..c6f41bd3ef47 100644 --- a/other/frequency_finder.py +++ b/other/frequency_finder.py @@ -10,6 +10,7 @@ ETAOIN = 'ETAOINSHRDLCUMWFGYPBVKJXQZ' LETTERS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' + def getLetterCount(message): letterCount = {'A': 0, 'B': 0, 'C': 0, 'D': 0, 'E': 0, 'F': 0, 'G': 0, 'H': 0, 'I': 0, 'J': 0, 'K': 0, 'L': 0, 'M': 0, 'N': 0, 'O': 0, 'P': 0, @@ -21,9 +22,11 @@ def getLetterCount(message): return letterCount + def getItemAtIndexZero(x): return x[0] + def getFrequencyOrder(message): letterToFreq = getLetterCount(message) freqToLetter = {} @@ -34,11 +37,11 @@ def getFrequencyOrder(message): freqToLetter[letterToFreq[letter]].append(letter) for freq in freqToLetter: - freqToLetter[freq].sort(key = ETAOIN.find, reverse = True) + freqToLetter[freq].sort(key=ETAOIN.find, reverse=True) freqToLetter[freq] = ''.join(freqToLetter[freq]) freqPairs = list(freqToLetter.items()) - freqPairs.sort(key = getItemAtIndexZero, reverse = True) + freqPairs.sort(key=getItemAtIndexZero, reverse=True) freqOrder = [] for freqPair in freqPairs: @@ -46,6 +49,7 @@ def getFrequencyOrder(message): return ''.join(freqOrder) + def englishFreqMatchScore(message): ''' >>> englishFreqMatchScore('Hello World') @@ -63,6 +67,8 @@ def englishFreqMatchScore(message): return matchScore + if __name__ == '__main__': import doctest + doctest.testmod() diff --git a/other/game_of_life/game_o_life.py b/other/game_of_life/game_o_life.py index 1fdaa21b4a7b..cfaef5fc6c2a 100644 --- a/other/game_of_life/game_o_life.py +++ b/other/game_of_life/game_o_life.py @@ -27,24 +27,29 @@ Any dead cell with exactly three live neighbours be- comes a live cell, as if by reproduction. ''' +import random +import sys + import numpy as np -import random, sys from matplotlib import pyplot as plt from matplotlib.colors import ListedColormap -usage_doc='Usage of script: script_nama ' +usage_doc = 'Usage of script: script_nama ' -choice = [0]*100 + [1]*10 +choice = [0] * 100 + [1] * 10 random.shuffle(choice) + def create_canvas(size): - canvas = [ [False for i in range(size)] for j in range(size)] + canvas = [[False for i in range(size)] for j in range(size)] return canvas + def seed(canvas): - for i,row in enumerate(canvas): - for j,_ in enumerate(row): - canvas[i][j]=bool(random.getrandbits(1)) + for i, row in enumerate(canvas): + for j, _ in enumerate(row): + canvas[i][j] = bool(random.getrandbits(1)) + def run(canvas): ''' This function runs the rules of game through all points, and changes their status accordingly.(in the same canvas) @@ -61,57 +66,62 @@ def run(canvas): for r, row in enumerate(canvas): for c, pt in enumerate(row): # print(r-1,r+2,c-1,c+2) - next_gen_canvas[r][c] = __judge_point(pt,canvas[r-1:r+2,c-1:c+2]) - + next_gen_canvas[r][c] = __judge_point(pt, canvas[r - 1:r + 2, c - 1:c + 2]) + canvas = next_gen_canvas - del next_gen_canvas # cleaning memory as we move on. - return canvas.tolist() + del next_gen_canvas # cleaning memory as we move on. + return canvas.tolist() -def __judge_point(pt,neighbours): - dead = 0 + +def __judge_point(pt, neighbours): + dead = 0 alive = 0 # finding dead or alive neighbours count. for i in neighbours: for status in i: - if status: alive+=1 - else: dead+=1 + if status: + alive += 1 + else: + dead += 1 # handling duplicate entry for focus pt. - if pt : alive-=1 - else : dead-=1 - + if pt: + alive -= 1 + else: + dead -= 1 + # running the rules of game here. state = pt if pt: - if alive<2: - state=False - elif alive==2 or alive==3: - state=True - elif alive>3: - state=False + if alive < 2: + state = False + elif alive == 2 or alive == 3: + state = True + elif alive > 3: + state = False else: - if alive==3: - state=True + if alive == 3: + state = True return state -if __name__=='__main__': +if __name__ == '__main__': if len(sys.argv) != 2: raise Exception(usage_doc) - + canvas_size = int(sys.argv[1]) # main working structure of this module. - c=create_canvas(canvas_size) + c = create_canvas(canvas_size) seed(c) fig, ax = plt.subplots() - fig.show() - cmap = ListedColormap(['w','k']) + fig.show() + cmap = ListedColormap(['w', 'k']) try: while True: - c = run(c) - ax.matshow(c,cmap=cmap) + c = run(c) + ax.matshow(c, cmap=cmap) fig.canvas.draw() - ax.cla() + ax.cla() except KeyboardInterrupt: # do nothing. pass diff --git a/other/linear_congruential_generator.py b/other/linear_congruential_generator.py index 34abdf34eaf3..246f140078bd 100644 --- a/other/linear_congruential_generator.py +++ b/other/linear_congruential_generator.py @@ -1,14 +1,16 @@ from __future__ import print_function + __author__ = "Tobias Carryer" from time import time + class LinearCongruentialGenerator(object): """ A pseudorandom number generator. """ - - def __init__( self, multiplier, increment, modulo, seed=int(time()) ): + + def __init__(self, multiplier, increment, modulo, seed=int(time())): """ These parameters are saved and used when nextNumber() is called. @@ -19,8 +21,8 @@ def __init__( self, multiplier, increment, modulo, seed=int(time()) ): self.increment = increment self.modulo = modulo self.seed = seed - - def next_number( self ): + + def next_number(self): """ The smallest number that can be generated is zero. The largest number that can be generated is modulo-1. modulo is set in the constructor. @@ -28,8 +30,9 @@ def next_number( self ): self.seed = (self.multiplier * self.seed + self.increment) % self.modulo return self.seed + if __name__ == "__main__": # Show the LCG in action. - lcg = LinearCongruentialGenerator(1664525, 1013904223, 2<<31) - while True : - print(lcg.next_number()) \ No newline at end of file + lcg = LinearCongruentialGenerator(1664525, 1013904223, 2 << 31) + while True: + print(lcg.next_number()) diff --git a/other/n_queens.py b/other/n_queens.py index 0e80a0cff5e9..86522e5e2eed 100644 --- a/other/n_queens.py +++ b/other/n_queens.py @@ -1,77 +1,79 @@ #! /usr/bin/python3 import sys + def nqueens(board_width): - board = [0] - current_row = 0 - while True: - conflict = False - - for review_index in range(0, current_row): - left = board[review_index] - (current_row - review_index) - right = board[review_index] + (current_row - review_index); - if (board[current_row] == board[review_index] or (left >= 0 and left == board[current_row]) or (right < board_width and right == board[current_row])): - conflict = True; - break - - if (current_row == 0 and conflict == False): - board.append(0) - current_row = 1 - continue - - if (conflict == True): - board[current_row] += 1 - - if (current_row == 0 and board[current_row] == board_width): - print("No solution exists for specificed board size.") - return None - - while True: - if (board[current_row] == board_width): - board[current_row] = 0 - if (current_row == 0): - print("No solution exists for specificed board size.") - return None - - board.pop() - current_row -= 1 - board[current_row] += 1 - - if board[current_row] != board_width: - break - else: - current_row += 1 - if (current_row == board_width): - break - - board.append(0) - return board + board = [0] + current_row = 0 + while True: + conflict = False + + for review_index in range(0, current_row): + left = board[review_index] - (current_row - review_index) + right = board[review_index] + (current_row - review_index); + if (board[current_row] == board[review_index] or (left >= 0 and left == board[current_row]) or (right < board_width and right == board[current_row])): + conflict = True; + break + + if (current_row == 0 and conflict == False): + board.append(0) + current_row = 1 + continue + + if (conflict == True): + board[current_row] += 1 + + if (current_row == 0 and board[current_row] == board_width): + print("No solution exists for specificed board size.") + return None + + while True: + if (board[current_row] == board_width): + board[current_row] = 0 + if (current_row == 0): + print("No solution exists for specificed board size.") + return None + + board.pop() + current_row -= 1 + board[current_row] += 1 + + if board[current_row] != board_width: + break + else: + current_row += 1 + if (current_row == board_width): + break + + board.append(0) + return board + def print_board(board): - if (board == None): - return + if (board == None): + return - board_width = len(board) - for row in range(board_width): - line_print = [] - for column in range(board_width): - if column == board[row]: - line_print.append("Q") - else: - line_print.append(".") - print(line_print) + board_width = len(board) + for row in range(board_width): + line_print = [] + for column in range(board_width): + if column == board[row]: + line_print.append("Q") + else: + line_print.append(".") + print(line_print) if __name__ == '__main__': - default_width = 8 - for arg in sys.argv: - if (arg.isdecimal() and int(arg) > 3): - default_width = int(arg) - break - - if (default_width == 8): - print("Running algorithm with board size of 8. Specify an alternative Chess board size for N-Queens as a command line argument.") - - board = nqueens(default_width) - print(board) - print_board(board) + default_width = 8 + for arg in sys.argv: + if (arg.isdecimal() and int(arg) > 3): + default_width = int(arg) + break + + if (default_width == 8): + print("Running algorithm with board size of 8. Specify an alternative Chess board size for N-Queens as a command line argument.") + + board = nqueens(default_width) + print(board) + print_board(board) diff --git a/other/nested_brackets.py b/other/nested_brackets.py index 76677d56439a..1d991424167e 100644 --- a/other/nested_brackets.py +++ b/other/nested_brackets.py @@ -17,11 +17,10 @@ def is_balanced(S): - stack = [] open_brackets = set({'(', '[', '{'}) closed_brackets = set({')', ']', '}'}) - open_to_closed = dict({'{':'}', '[':']', '(':')'}) + open_to_closed = dict({'{': '}', '[': ']', '(': ')'}) for i in range(len(S)): @@ -36,7 +35,6 @@ def is_balanced(S): def main(): - S = input("Enter sequence of brackets: ") if is_balanced(S): diff --git a/other/password_generator.py b/other/password_generator.py index 8916079fc758..99a7881911f7 100644 --- a/other/password_generator.py +++ b/other/password_generator.py @@ -1,6 +1,7 @@ from __future__ import print_function -import string + import random +import string letters = [letter for letter in string.ascii_letters] digits = [digit for digit in string.digits] @@ -19,17 +20,17 @@ # ctbi= characters that must be in password # i= how many letters or characters the password length will be def password_generator(ctbi, i): - # Password generator = full boot with random_number, random_letters, and random_character FUNCTIONS - pass # Put your code here... + # Password generator = full boot with random_number, random_letters, and random_character FUNCTIONS + pass # Put your code here... def random_number(ctbi, i): - pass # Put your code here... + pass # Put your code here... def random_letters(ctbi, i): - pass # Put your code here... + pass # Put your code here... def random_characters(ctbi, i): - pass # Put your code here... + pass # Put your code here... diff --git a/other/primelib.py b/other/primelib.py index 19572f8611cb..d686a3f404e1 100644 --- a/other/primelib.py +++ b/other/primelib.py @@ -39,36 +39,38 @@ """ + def isPrime(number): """ input: positive integer 'number' returns true if 'number' is prime otherwise false. """ - import math # for function sqrt - + import math # for function sqrt + # precondition - assert isinstance(number,int) and (number >= 0) , \ - "'number' must been an int and positive" - + assert isinstance(number, int) and (number >= 0), \ + "'number' must been an int and positive" + status = True - + # 0 and 1 are none primes. if number <= 1: status = False - - for divisor in range(2,int(round(math.sqrt(number)))+1): - + + for divisor in range(2, int(round(math.sqrt(number))) + 1): + # if 'number' divisible by 'divisor' then sets 'status' # of false and break up the loop. if number % divisor == 0: status = False break - + # precondition - assert isinstance(status,bool), "'status' must been from type bool" - + assert isinstance(status, bool), "'status' must been from type bool" + return status + # ------------------------------------------ def sieveEr(N): @@ -80,32 +82,32 @@ def sieveEr(N): sieve of erathostenes. """ - + # precondition - assert isinstance(N,int) and (N > 2), "'N' must been an int and > 2" - + assert isinstance(N, int) and (N > 2), "'N' must been an int and > 2" + # beginList: conatins all natural numbers from 2 upt to N - beginList = [x for x in range(2,N+1)] + beginList = [x for x in range(2, N + 1)] + + ans = [] # this list will be returns. - ans = [] # this list will be returns. - # actual sieve of erathostenes for i in range(len(beginList)): - - for j in range(i+1,len(beginList)): - + + for j in range(i + 1, len(beginList)): + if (beginList[i] != 0) and \ - (beginList[j] % beginList[i] == 0): + (beginList[j] % beginList[i] == 0): beginList[j] = 0 - + # filters actual prime numbers. ans = [x for x in beginList if x != 0] - + # precondition - assert isinstance(ans,list), "'ans' must been from type list" - + assert isinstance(ans, list), "'ans' must been from type list" + return ans - + # -------------------------------- @@ -114,339 +116,339 @@ def getPrimeNumbers(N): input: positive integer 'N' > 2 returns a list of prime numbers from 2 up to N (inclusive) This function is more efficient as function 'sieveEr(...)' - """ - + """ + # precondition - assert isinstance(N,int) and (N > 2), "'N' must been an int and > 2" - - ans = [] - + assert isinstance(N, int) and (N > 2), "'N' must been an int and > 2" + + ans = [] + # iterates over all numbers between 2 up to N+1 # if a number is prime then appends to list 'ans' - for number in range(2,N+1): - + for number in range(2, N + 1): + if isPrime(number): - ans.append(number) - + # precondition - assert isinstance(ans,list), "'ans' must been from type list" - + assert isinstance(ans, list), "'ans' must been from type list" + return ans # ----------------------------------------- - + def primeFactorization(number): """ input: positive integer 'number' returns a list of the prime number factors of 'number' """ - import math # for function sqrt - # precondition - assert isinstance(number,int) and number >= 0, \ - "'number' must been an int and >= 0" - - ans = [] # this list will be returns of the function. + assert isinstance(number, int) and number >= 0, \ + "'number' must been an int and >= 0" + + ans = [] # this list will be returns of the function. # potential prime number factors. - factor = 2 + factor = 2 quotient = number - - + if number == 0 or number == 1: - + ans.append(number) - + # if 'number' not prime then builds the prime factorization of 'number' elif not isPrime(number): - + while (quotient != 1): - + if isPrime(factor) and (quotient % factor == 0): - ans.append(factor) - quotient /= factor + ans.append(factor) + quotient /= factor else: - factor += 1 - + factor += 1 + else: ans.append(number) - + # precondition - assert isinstance(ans,list), "'ans' must been from type list" - + assert isinstance(ans, list), "'ans' must been from type list" + return ans - + # ----------------------------------------- - + def greatestPrimeFactor(number): """ input: positive integer 'number' >= 0 returns the greatest prime number factor of 'number' """ - + # precondition - assert isinstance(number,int) and (number >= 0), \ - "'number' bust been an int and >= 0" - - ans = 0 - + assert isinstance(number, int) and (number >= 0), \ + "'number' bust been an int and >= 0" + + ans = 0 + # prime factorization of 'number' primeFactors = primeFactorization(number) - ans = max(primeFactors) - + ans = max(primeFactors) + # precondition - assert isinstance(ans,int), "'ans' must been from type int" - + assert isinstance(ans, int), "'ans' must been from type int" + return ans - + # ---------------------------------------------- - - + + def smallestPrimeFactor(number): """ input: integer 'number' >= 0 returns the smallest prime number factor of 'number' """ - + # precondition - assert isinstance(number,int) and (number >= 0), \ - "'number' bust been an int and >= 0" - - ans = 0 - + assert isinstance(number, int) and (number >= 0), \ + "'number' bust been an int and >= 0" + + ans = 0 + # prime factorization of 'number' primeFactors = primeFactorization(number) - + ans = min(primeFactors) # precondition - assert isinstance(ans,int), "'ans' must been from type int" - + assert isinstance(ans, int), "'ans' must been from type int" + return ans - - + + # ---------------------- - + def isEven(number): """ input: integer 'number' returns true if 'number' is even, otherwise false. - """ + """ # precondition - assert isinstance(number, int), "'number' must been an int" + assert isinstance(number, int), "'number' must been an int" assert isinstance(number % 2 == 0, bool), "compare bust been from type bool" - + return number % 2 == 0 - + + # ------------------------ - + def isOdd(number): """ input: integer 'number' returns true if 'number' is odd, otherwise false. - """ + """ # precondition - assert isinstance(number, int), "'number' must been an int" + assert isinstance(number, int), "'number' must been an int" assert isinstance(number % 2 != 0, bool), "compare bust been from type bool" - + return number % 2 != 0 - + + # ------------------------ - - + + def goldbach(number): """ Goldbach's assumption input: a even positive integer 'number' > 2 returns a list of two prime numbers whose sum is equal to 'number' """ - + # precondition - assert isinstance(number,int) and (number > 2) and isEven(number), \ - "'number' must been an int, even and > 2" - - ans = [] # this list will returned - + assert isinstance(number, int) and (number > 2) and isEven(number), \ + "'number' must been an int, even and > 2" + + ans = [] # this list will returned + # creates a list of prime numbers between 2 up to 'number' primeNumbers = getPrimeNumbers(number) - lenPN = len(primeNumbers) + lenPN = len(primeNumbers) # run variable for while-loops. i = 0 j = 1 - + # exit variable. for break up the loops loop = True - + while (i < lenPN and loop): - - j = i+1 - - + + j = i + 1 + while (j < lenPN and loop): - + if primeNumbers[i] + primeNumbers[j] == number: loop = False ans.append(primeNumbers[i]) ans.append(primeNumbers[j]) - + j += 1 i += 1 - + # precondition - assert isinstance(ans,list) and (len(ans) == 2) and \ - (ans[0] + ans[1] == number) and isPrime(ans[0]) and isPrime(ans[1]), \ - "'ans' must contains two primes. And sum of elements must been eq 'number'" - + assert isinstance(ans, list) and (len(ans) == 2) and \ + (ans[0] + ans[1] == number) and isPrime(ans[0]) and isPrime(ans[1]), \ + "'ans' must contains two primes. And sum of elements must been eq 'number'" + return ans - + + # ---------------------------------------------- -def gcd(number1,number2): +def gcd(number1, number2): """ Greatest common divisor input: two positive integer 'number1' and 'number2' returns the greatest common divisor of 'number1' and 'number2' """ - + # precondition - assert isinstance(number1,int) and isinstance(number2,int) \ - and (number1 >= 0) and (number2 >= 0), \ - "'number1' and 'number2' must been positive integer." + assert isinstance(number1, int) and isinstance(number2, int) \ + and (number1 >= 0) and (number2 >= 0), \ + "'number1' and 'number2' must been positive integer." + + rest = 0 - rest = 0 - while number2 != 0: - rest = number1 % number2 number1 = number2 number2 = rest # precondition - assert isinstance(number1,int) and (number1 >= 0), \ - "'number' must been from type int and positive" - + assert isinstance(number1, int) and (number1 >= 0), \ + "'number' must been from type int and positive" + return number1 - + + # ---------------------------------------------------- - + def kgV(number1, number2): """ Least common multiple input: two positive integer 'number1' and 'number2' returns the least common multiple of 'number1' and 'number2' """ - + # precondition - assert isinstance(number1,int) and isinstance(number2,int) \ - and (number1 >= 1) and (number2 >= 1), \ - "'number1' and 'number2' must been positive integer." - - ans = 1 # actual answer that will be return. - + assert isinstance(number1, int) and isinstance(number2, int) \ + and (number1 >= 1) and (number2 >= 1), \ + "'number1' and 'number2' must been positive integer." + + ans = 1 # actual answer that will be return. + # for kgV (x,1) if number1 > 1 and number2 > 1: - + # builds the prime factorization of 'number1' and 'number2' primeFac1 = primeFactorization(number1) primeFac2 = primeFactorization(number2) - + elif number1 == 1 or number2 == 1: - + primeFac1 = [] primeFac2 = [] - ans = max(number1,number2) - + ans = max(number1, number2) + count1 = 0 count2 = 0 - - done = [] # captured numbers int both 'primeFac1' and 'primeFac2' - + + done = [] # captured numbers int both 'primeFac1' and 'primeFac2' + # iterates through primeFac1 for n in primeFac1: - + if n not in done: - + if n in primeFac2: - + count1 = primeFac1.count(n) count2 = primeFac2.count(n) - - for i in range(max(count1,count2)): + + for i in range(max(count1, count2)): ans *= n - + else: - + count1 = primeFac1.count(n) - + for i in range(count1): ans *= n - + done.append(n) - + # iterates through primeFac2 for n in primeFac2: - + if n not in done: - + count2 = primeFac2.count(n) - + for i in range(count2): ans *= n - + done.append(n) - + # precondition - assert isinstance(ans,int) and (ans >= 0), \ - "'ans' must been from type int and positive" - + assert isinstance(ans, int) and (ans >= 0), \ + "'ans' must been from type int and positive" + return ans - + + # ---------------------------------- - + def getPrime(n): """ Gets the n-th prime number. input: positive integer 'n' >= 0 returns the n-th prime number, beginning at index 0 """ - + # precondition - assert isinstance(n,int) and (n >= 0), "'number' must been a positive int" - + assert isinstance(n, int) and (n >= 0), "'number' must been a positive int" + index = 0 - ans = 2 # this variable holds the answer - + ans = 2 # this variable holds the answer + while index < n: - + index += 1 - - ans += 1 # counts to the next number - + + ans += 1 # counts to the next number + # if ans not prime then # runs to the next prime number. while not isPrime(ans): ans += 1 - + # precondition - assert isinstance(ans,int) and isPrime(ans), \ - "'ans' must been a prime number and from type int" - + assert isinstance(ans, int) and isPrime(ans), \ + "'ans' must been a prime number and from type int" + return ans - + + # --------------------------------------------------- - + def getPrimesBetween(pNumber1, pNumber2): """ input: prime numbers 'pNumber1' and 'pNumber2' @@ -454,38 +456,39 @@ def getPrimesBetween(pNumber1, pNumber2): returns a list of all prime numbers between 'pNumber1' (exclusiv) and 'pNumber2' (exclusiv) """ - + # precondition assert isPrime(pNumber1) and isPrime(pNumber2) and (pNumber1 < pNumber2), \ - "The arguments must been prime numbers and 'pNumber1' < 'pNumber2'" - - number = pNumber1 + 1 # jump to the next number - - ans = [] # this list will be returns. - + "The arguments must been prime numbers and 'pNumber1' < 'pNumber2'" + + number = pNumber1 + 1 # jump to the next number + + ans = [] # this list will be returns. + # if number is not prime then # fetch the next prime number. while not isPrime(number): number += 1 - + while number < pNumber2: - + ans.append(number) - + number += 1 - + # fetch the next prime number. while not isPrime(number): number += 1 - + # precondition - assert isinstance(ans,list) and ans[0] != pNumber1 \ - and ans[len(ans)-1] != pNumber2, \ - "'ans' must been a list without the arguments" - + assert isinstance(ans, list) and ans[0] != pNumber1 \ + and ans[len(ans) - 1] != pNumber2, \ + "'ans' must been a list without the arguments" + # 'ans' contains not 'pNumber1' and 'pNumber2' ! return ans - + + # ---------------------------------------------------- def getDivisors(n): @@ -493,25 +496,21 @@ def getDivisors(n): input: positive integer 'n' >= 1 returns all divisors of n (inclusive 1 and 'n') """ - + # precondition - assert isinstance(n,int) and (n >= 1), "'n' must been int and >= 1" + assert isinstance(n, int) and (n >= 1), "'n' must been int and >= 1" + + ans = [] # will be returned. + + for divisor in range(1, n + 1): - from math import sqrt - - ans = [] # will be returned. - - for divisor in range(1,n+1): - if n % divisor == 0: ans.append(divisor) - - - #precondition - assert ans[0] == 1 and ans[len(ans)-1] == n, \ - "Error in function getDivisiors(...)" - - + + # precondition + assert ans[0] == 1 and ans[len(ans) - 1] == n, \ + "Error in function getDivisiors(...)" + return ans @@ -523,21 +522,22 @@ def isPerfectNumber(number): input: positive integer 'number' > 1 returns true if 'number' is a perfect number otherwise false. """ - + # precondition - assert isinstance(number,int) and (number > 1), \ - "'number' must been an int and >= 1" - + assert isinstance(number, int) and (number > 1), \ + "'number' must been an int and >= 1" + divisors = getDivisors(number) - + # precondition - assert isinstance(divisors,list) and(divisors[0] == 1) and \ - (divisors[len(divisors)-1] == number), \ - "Error in help-function getDivisiors(...)" - + assert isinstance(divisors, list) and (divisors[0] == 1) and \ + (divisors[len(divisors) - 1] == number), \ + "Error in help-function getDivisiors(...)" + # summed all divisors up to 'number' (exclusive), hence [:-1] return sum(divisors[:-1]) == number + # ------------------------------------------------------------ def simplifyFraction(numerator, denominator): @@ -545,60 +545,61 @@ def simplifyFraction(numerator, denominator): input: two integer 'numerator' and 'denominator' assumes: 'denominator' != 0 returns: a tuple with simplify numerator and denominator. - """ - + """ + # precondition - assert isinstance(numerator, int) and isinstance(denominator,int) \ - and (denominator != 0), \ - "The arguments must been from type int and 'denominator' != 0" - + assert isinstance(numerator, int) and isinstance(denominator, int) \ + and (denominator != 0), \ + "The arguments must been from type int and 'denominator' != 0" + # build the greatest common divisor of numerator and denominator. gcdOfFraction = gcd(abs(numerator), abs(denominator)) # precondition assert isinstance(gcdOfFraction, int) and (numerator % gcdOfFraction == 0) \ - and (denominator % gcdOfFraction == 0), \ - "Error in function gcd(...,...)" - + and (denominator % gcdOfFraction == 0), \ + "Error in function gcd(...,...)" + return (numerator // gcdOfFraction, denominator // gcdOfFraction) - + + # ----------------------------------------------------------------- - + def factorial(n): """ input: positive integer 'n' returns the factorial of 'n' (n!) """ - + # precondition - assert isinstance(n,int) and (n >= 0), "'n' must been a int and >= 0" - - ans = 1 # this will be return. - - for factor in range(1,n+1): + assert isinstance(n, int) and (n >= 0), "'n' must been a int and >= 0" + + ans = 1 # this will be return. + + for factor in range(1, n + 1): ans *= factor - + return ans - + + # ------------------------------------------------------------------- - + def fib(n): """ input: positive integer 'n' returns the n-th fibonacci term , indexing by 0 - """ - + """ + # precondition assert isinstance(n, int) and (n >= 0), "'n' must been an int and >= 0" - + tmp = 0 fib1 = 1 - ans = 1 # this will be return - - for i in range(n-1): - + ans = 1 # this will be return + + for i in range(n - 1): tmp = ans ans += fib1 fib1 = tmp - + return ans diff --git a/other/sierpinski_triangle.py b/other/sierpinski_triangle.py index 329a8ce5c43f..1af411090806 100644 --- a/other/sierpinski_triangle.py +++ b/other/sierpinski_triangle.py @@ -24,10 +24,11 @@ Credits: This code was written by editing the code from http://www.riannetrujillo.com/blog/python-fractal/ ''' -import turtle import sys +import turtle + PROGNAME = 'Sierpinski Triangle' -if len(sys.argv) !=2: +if len(sys.argv) != 2: raise Exception('right format for using this script: $python fractals.py ') myPen = turtle.Turtle() @@ -35,33 +36,34 @@ myPen.speed(5) myPen.pencolor('red') -points = [[-175,-125],[0,175],[175,-125]] #size of triangle +points = [[-175, -125], [0, 175], [175, -125]] # size of triangle + -def getMid(p1,p2): - return ( (p1[0]+p2[0]) / 2, (p1[1] + p2[1]) / 2) #find midpoint +def getMid(p1, p2): + return ((p1[0] + p2[0]) / 2, (p1[1] + p2[1]) / 2) # find midpoint -def triangle(points,depth): +def triangle(points, depth): myPen.up() - myPen.goto(points[0][0],points[0][1]) + myPen.goto(points[0][0], points[0][1]) myPen.down() - myPen.goto(points[1][0],points[1][1]) - myPen.goto(points[2][0],points[2][1]) - myPen.goto(points[0][0],points[0][1]) + myPen.goto(points[1][0], points[1][1]) + myPen.goto(points[2][0], points[2][1]) + myPen.goto(points[0][0], points[0][1]) - if depth>0: + if depth > 0: triangle([points[0], - getMid(points[0], points[1]), - getMid(points[0], points[2])], - depth-1) + getMid(points[0], points[1]), + getMid(points[0], points[2])], + depth - 1) triangle([points[1], - getMid(points[0], points[1]), - getMid(points[1], points[2])], - depth-1) + getMid(points[0], points[1]), + getMid(points[1], points[2])], + depth - 1) triangle([points[2], - getMid(points[2], points[1]), - getMid(points[0], points[2])], - depth-1) + getMid(points[2], points[1]), + getMid(points[0], points[2])], + depth - 1) -triangle(points,int(sys.argv[1])) +triangle(points, int(sys.argv[1])) diff --git a/other/tower_of_hanoi.py b/other/tower_of_hanoi.py index dc15b2ce8e58..a6848b2e4913 100644 --- a/other/tower_of_hanoi.py +++ b/other/tower_of_hanoi.py @@ -1,5 +1,7 @@ from __future__ import print_function -def moveTower(height, fromPole, toPole, withPole): + + +def moveTower(height, fromPole, toPole, withPole): ''' >>> moveTower(3, 'A', 'B', 'C') moving disk from A to B @@ -11,16 +13,19 @@ def moveTower(height, fromPole, toPole, withPole): moving disk from A to B ''' if height >= 1: - moveTower(height-1, fromPole, withPole, toPole) + moveTower(height - 1, fromPole, withPole, toPole) moveDisk(fromPole, toPole) - moveTower(height-1, withPole, toPole, fromPole) + moveTower(height - 1, withPole, toPole, fromPole) + -def moveDisk(fp,tp): +def moveDisk(fp, tp): print(('moving disk from', fp, 'to', tp)) + def main(): height = int(input('Height of hanoi: ')) moveTower(height, 'A', 'B', 'C') + if __name__ == '__main__': main() diff --git a/other/two_sum.py b/other/two_sum.py index d4484aa85505..7c67ae97d801 100644 --- a/other/two_sum.py +++ b/other/two_sum.py @@ -11,6 +11,7 @@ """ from __future__ import print_function + def twoSum(nums, target): """ :type nums: List[int] @@ -19,11 +20,11 @@ def twoSum(nums, target): """ chk_map = {} for index, val in enumerate(nums): - compl = target - val - if compl in chk_map: - indices = [chk_map[compl], index] - print(indices) - return [indices] - else: - chk_map[val] = index + compl = target - val + if compl in chk_map: + indices = [chk_map[compl], index] + print(indices) + return [indices] + else: + chk_map[val] = index return False diff --git a/other/word_patterns.py b/other/word_patterns.py index c33d520087f7..87365f210625 100644 --- a/other/word_patterns.py +++ b/other/word_patterns.py @@ -1,5 +1,8 @@ from __future__ import print_function -import pprint, time + +import pprint +import time + def getWordPattern(word): word = word.upper() @@ -14,6 +17,7 @@ def getWordPattern(word): wordPattern.append(letterNums[letter]) return '.'.join(wordPattern) + def main(): startTime = time.time() allPatterns = {} @@ -35,5 +39,6 @@ def main(): totalTime = round(time.time() - startTime, 2) print(('Done! [', totalTime, 'seconds ]')) + if __name__ == '__main__': main() diff --git a/project_euler/problem_01/sol1.py b/project_euler/problem_01/sol1.py index c9a8c0f1ebeb..5058ca36c64e 100644 --- a/project_euler/problem_01/sol1.py +++ b/project_euler/problem_01/sol1.py @@ -1,13 +1,20 @@ -''' +""" Problem Statement: If we list all the natural numbers below 10 that are multiples of 3 or 5, we get 3,5,6 and 9. The sum of these multiples is 23. Find the sum of all the multiples of 3 or 5 below N. -''' +""" from __future__ import print_function -try: - raw_input # Python 2 -except NameError: - raw_input = input # Python 3 -n = int(raw_input().strip()) -print(sum([e for e in range(3, n) if e % 3 == 0 or e % 5 == 0])) + +N = 10 +N_limit = 101 +while N < N_limit: + # raw_input = input("请输入一个大于3的自然数:") + # n = int(filter(str.isdigit(), raw_input)) + n = N + sum_ = 0 + for e in range(3, n): + if e % 3 == 0 or e % 5 == 0: + sum_ += e + print(sum_) + N += 10 diff --git a/project_euler/problem_01/sol2.py b/project_euler/problem_01/sol2.py deleted file mode 100644 index 2b7760e0bfff..000000000000 --- a/project_euler/problem_01/sol2.py +++ /dev/null @@ -1,20 +0,0 @@ -''' -Problem Statement: -If we list all the natural numbers below 10 that are multiples of 3 or 5, -we get 3,5,6 and 9. The sum of these multiples is 23. -Find the sum of all the multiples of 3 or 5 below N. -''' -from __future__ import print_function -try: - raw_input # Python 2 -except NameError: - raw_input = input # Python 3 -n = int(raw_input().strip()) -sum = 0 -terms = (n-1)//3 -sum+= ((terms)*(6+(terms-1)*3))//2 #sum of an A.P. -terms = (n-1)//5 -sum+= ((terms)*(10+(terms-1)*5))//2 -terms = (n-1)//15 -sum-= ((terms)*(30+(terms-1)*15))//2 -print(sum) diff --git a/project_euler/problem_01/sol3.py b/project_euler/problem_01/sol3.py deleted file mode 100644 index f4f3aefcc5de..000000000000 --- a/project_euler/problem_01/sol3.py +++ /dev/null @@ -1,50 +0,0 @@ -from __future__ import print_function - -''' -Problem Statement: -If we list all the natural numbers below 10 that are multiples of 3 or 5, -we get 3,5,6 and 9. The sum of these multiples is 23. -Find the sum of all the multiples of 3 or 5 below N. -''' -''' -This solution is based on the pattern that the successive numbers in the series follow: 0+3,+2,+1,+3,+1,+2,+3. -''' - -try: - raw_input # Python 2 -except NameError: - raw_input = input # Python 3 -n = int(raw_input().strip()) -sum=0 -num=0 -while(1): - num+=3 - if(num>=n): - break - sum+=num - num+=2 - if(num>=n): - break - sum+=num - num+=1 - if(num>=n): - break - sum+=num - num+=3 - if(num>=n): - break - sum+=num - num+=1 - if(num>=n): - break - sum+=num - num+=2 - if(num>=n): - break - sum+=num - num+=3 - if(num>=n): - break - sum+=num - -print(sum); diff --git a/project_euler/problem_01/sol4.py b/project_euler/problem_01/sol4.py deleted file mode 100644 index 7941f5fcd3fe..000000000000 --- a/project_euler/problem_01/sol4.py +++ /dev/null @@ -1,30 +0,0 @@ -def mulitples(limit): - xmulti = [] - zmulti = [] - z = 3 - x = 5 - temp = 1 - while True: - result = z * temp - if (result < limit): - zmulti.append(result) - temp += 1 - else: - temp = 1 - break - while True: - result = x * temp - if (result < limit): - xmulti.append(result) - temp += 1 - else: - break - collection = list(set(xmulti+zmulti)) - return (sum(collection)) - - - - - - -print (mulitples(1000)) diff --git a/project_euler/problem_01/sol5.py b/project_euler/problem_01/sol5.py deleted file mode 100644 index e261cc8fc729..000000000000 --- a/project_euler/problem_01/sol5.py +++ /dev/null @@ -1,16 +0,0 @@ -''' -Problem Statement: -If we list all the natural numbers below 10 that are multiples of 3 or 5, -we get 3,5,6 and 9. The sum of these multiples is 23. -Find the sum of all the multiples of 3 or 5 below N. -''' -from __future__ import print_function -try: - input = raw_input #python3 -except NameError: - pass #python 2 - -"""A straightforward pythonic solution using list comprehension""" -n = int(input().strip()) -print(sum([i for i in range(n) if i%3==0 or i%5==0])) - diff --git a/project_euler/problem_01/sol6.py b/project_euler/problem_01/sol6.py deleted file mode 100644 index 54c3073f3897..000000000000 --- a/project_euler/problem_01/sol6.py +++ /dev/null @@ -1,9 +0,0 @@ -a = 3 -result = 0 -while a < 1000: - if(a % 3 == 0 or a % 5 == 0): - result += a - elif(a % 15 == 0): - result -= a - a += 1 -print(result) diff --git a/project_euler/problem_02/sol1.py b/project_euler/problem_02/sol1.py index 44ea980f2df0..456d56cb9238 100644 --- a/project_euler/problem_02/sol1.py +++ b/project_euler/problem_02/sol1.py @@ -1,24 +1,24 @@ -''' +""" Problem: Each new term in the Fibonacci sequence is generated by adding the previous two terms. By starting with 1 and 2, the first 10 terms will be: 1,2,3,5,8,13,21,34,55,89,.. By considering the terms in the Fibonacci sequence whose values do not exceed n, find the sum of the even-valued terms. e.g. for n=10, we have {2,8}, sum is 10. -''' +""" from __future__ import print_function -try: - raw_input # Python 2 -except NameError: - raw_input = input # Python 3 - -n = int(raw_input().strip()) -i=1 -j=2 -sum=0 -while(j<=n): - if j%2 == 0: - sum+=j - i , j = j, i+j -print(sum) +N = 1 +N_limit = 10 +while N < N_limit: + n = N + sum_ = 0 + i = 1 + j = 2 + while j <= n: + if j % 2 == 0: + sum_ += j + # 二元赋值运算 + i, j = j, i + j + print(sum_) + N += 1 diff --git a/project_euler/problem_02/sol2.py b/project_euler/problem_02/sol2.py deleted file mode 100644 index a2772697bb79..000000000000 --- a/project_euler/problem_02/sol2.py +++ /dev/null @@ -1,15 +0,0 @@ -def fib(n): - """ - Returns a list of all the even terms in the Fibonacci sequence that are less than n. - """ - ls = [] - a, b = 0, 1 - while b < n: - if b % 2 == 0: - ls.append(b) - a, b = b, a+b - return ls - -if __name__ == '__main__': - n = int(input("Enter max number: ").strip()) - print(sum(fib(n))) diff --git a/project_euler/problem_02/sol3.py b/project_euler/problem_02/sol3.py deleted file mode 100644 index 0eb46d879704..000000000000 --- a/project_euler/problem_02/sol3.py +++ /dev/null @@ -1,18 +0,0 @@ -''' -Problem: -Each new term in the Fibonacci sequence is generated by adding the previous two terms. - 0,1,1,2,3,5,8,13,21,34,55,89,.. -Every third term from 0 is even So using this I have written a simple code -By considering the terms in the Fibonacci sequence whose values do not exceed n, find the sum of the even-valued terms. -e.g. for n=10, we have {2,8}, sum is 10. -''' -"""Python 3""" -n = int(input()) -a=0 -b=2 -count=0 -while 4*b+a1): - prime=n + +n = int(input()) +prime = 1 +i = 2 +while i * i <= n: + while n % i == 0: + prime = i + n //= i + i += 1 +if n > 1: + prime = n print(prime) diff --git a/project_euler/problem_04/sol1.py b/project_euler/problem_04/sol1.py index 05fdd9ebab55..30a2b0032bf0 100644 --- a/project_euler/problem_04/sol1.py +++ b/project_euler/problem_04/sol1.py @@ -4,26 +4,26 @@ Find the largest palindrome made from the product of two 3-digit numbers which is less than N. ''' from __future__ import print_function + limit = int(input("limit? ")) # fetchs the next number -for number in range(limit-1,10000,-1): +for number in range(limit - 1, 10000, -1): # converts number into string. strNumber = str(number) # checks whether 'strNumber' is a palindrome. - if(strNumber == strNumber[::-1]): + if (strNumber == strNumber[::-1]): divisor = 999 # if 'number' is a product of two 3-digit numbers # then number is the answer otherwise fetch next number. - while(divisor != 99): - - if((number % divisor == 0) and (len(str(number / divisor)) == 3)): + while (divisor != 99): + if ((number % divisor == 0) and (len(str(number / divisor)) == 3)): print(number) exit(0) - divisor -=1 + divisor -= 1 diff --git a/project_euler/problem_04/sol2.py b/project_euler/problem_04/sol2.py index 70810c38986f..e05f3773fc00 100644 --- a/project_euler/problem_04/sol2.py +++ b/project_euler/problem_04/sol2.py @@ -4,14 +4,13 @@ Find the largest palindrome made from the product of two 3-digit numbers which is less than N. ''' from __future__ import print_function + n = int(input().strip()) answer = 0 -for i in range(999,99,-1): #3 digit nimbers range from 999 down to 100 - for j in range(999,99,-1): - t = str(i*j) - if t == t[::-1] and i*j < n: - answer = max(answer,i*j) +for i in range(999, 99, -1): # 3 digit nimbers range from 999 down to 100 + for j in range(999, 99, -1): + t = str(i * j) + if t == t[::-1] and i * j < n: + answer = max(answer, i * j) print(answer) exit(0) - - diff --git a/project_euler/problem_05/sol1.py b/project_euler/problem_05/sol1.py index 7896d75e3456..9dc912b5c208 100644 --- a/project_euler/problem_05/sol1.py +++ b/project_euler/problem_05/sol1.py @@ -8,14 +8,14 @@ n = int(input()) i = 0 while 1: - i+=n*(n-1) - nfound=0 - for j in range(2,n): - if (i%j != 0): - nfound=1 + i += n * (n - 1) + nfound = 0 + for j in range(2, n): + if (i % j != 0): + nfound = 1 break - if(nfound==0): - if(i==0): - i=1 + if (nfound == 0): + if (i == 0): + i = 1 print(i) break diff --git a/project_euler/problem_05/sol2.py b/project_euler/problem_05/sol2.py index cd11437f30db..11c8308b11f4 100644 --- a/project_euler/problem_05/sol2.py +++ b/project_euler/problem_05/sol2.py @@ -6,15 +6,21 @@ ''' """ Euclidean GCD Algorithm """ -def gcd(x,y): - return x if y==0 else gcd(y,x%y) + + +def gcd(x, y): + return x if y == 0 else gcd(y, x % y) + """ Using the property lcm*gcd of two numbers = product of them """ -def lcm(x,y): - return (x*y)//gcd(x,y) + + +def lcm(x, y): + return (x * y) // gcd(x, y) + n = int(input()) -g=1 -for i in range(1,n+1): - g=lcm(g,i) +g = 1 +for i in range(1, n + 1): + g = lcm(g, i) print(g) diff --git a/project_euler/problem_06/sol1.py b/project_euler/problem_06/sol1.py index 852d4e2f9fc4..135723b72bab 100644 --- a/project_euler/problem_06/sol1.py +++ b/project_euler/problem_06/sol1.py @@ -13,8 +13,8 @@ suma = 0 sumb = 0 n = int(input()) -for i in range(1,n+1): - suma += i**2 +for i in range(1, n + 1): + suma += i ** 2 sumb += i -sum = sumb**2 - suma +sum = sumb ** 2 - suma print(sum) diff --git a/project_euler/problem_06/sol2.py b/project_euler/problem_06/sol2.py index aa8aea58fd7b..179947b40598 100644 --- a/project_euler/problem_06/sol2.py +++ b/project_euler/problem_06/sol2.py @@ -9,8 +9,9 @@ Find the difference between the sum of the squares of the first N natural numbers and the square of the sum. ''' from __future__ import print_function + n = int(input()) -suma = n*(n+1)/2 +suma = n * (n + 1) / 2 suma **= 2 -sumb = n*(n+1)*(2*n+1)/6 -print(suma-sumb) +sumb = n * (n + 1) * (2 * n + 1) / 6 +print(suma - sumb) diff --git a/project_euler/problem_06/sol3.py b/project_euler/problem_06/sol3.py index b2d9f444d9a9..cb65cf164a6d 100644 --- a/project_euler/problem_06/sol3.py +++ b/project_euler/problem_06/sol3.py @@ -8,13 +8,19 @@ Find the difference between the sum of the squares of the first N natural numbers and the square of the sum. ''' from __future__ import print_function + import math + + def problem6(number=100): - sum_of_squares = sum([i*i for i in range(1,number+1)]) - square_of_sum = int(math.pow(sum(range(1,number+1)),2)) + sum_of_squares = sum([i * i for i in range(1, number + 1)]) + square_of_sum = int(math.pow(sum(range(1, number + 1)), 2)) return square_of_sum - sum_of_squares + + def main(): print(problem6()) + if __name__ == '__main__': - main() \ No newline at end of file + main() diff --git a/project_euler/problem_07/sol1.py b/project_euler/problem_07/sol1.py index ea31d0b2bb2c..534314ad6cd6 100644 --- a/project_euler/problem_07/sol1.py +++ b/project_euler/problem_07/sol1.py @@ -4,27 +4,32 @@ What is the Nth prime number? ''' from __future__ import print_function + from math import sqrt + + def isprime(n): - if (n==2): + if (n == 2): return True - elif (n%2==0): + elif (n % 2 == 0): return False else: - sq = int(sqrt(n))+1 - for i in range(3,sq,2): - if(n%i==0): + sq = int(sqrt(n)) + 1 + for i in range(3, sq, 2): + if (n % i == 0): return False return True + + n = int(input()) -i=0 -j=1 -while(i!=n and j<3): - j+=1 +i = 0 +j = 1 +while (i != n and j < 3): + j += 1 if (isprime(j)): - i+=1 -while(i!=n): - j+=2 - if(isprime(j)): - i+=1 + i += 1 +while (i != n): + j += 2 + if (isprime(j)): + i += 1 print(j) diff --git a/project_euler/problem_07/sol2.py b/project_euler/problem_07/sol2.py index fdf39cbc4d26..07f972efd850 100644 --- a/project_euler/problem_07/sol2.py +++ b/project_euler/problem_07/sol2.py @@ -1,16 +1,18 @@ # By listing the first six prime numbers: 2, 3, 5, 7, 11, and 13, we can see that the 6th prime is 13. What is the Nth prime number? def isprime(number): - for i in range(2,int(number**0.5)+1): - if number%i==0: - return False - return True -n = int(input('Enter The N\'th Prime Number You Want To Get: ')) # Ask For The N'th Prime Number Wanted + for i in range(2, int(number ** 0.5) + 1): + if number % i == 0: + return False + return True + + +n = int(input('Enter The N\'th Prime Number You Want To Get: ')) # Ask For The N'th Prime Number Wanted primes = [] num = 2 while len(primes) < n: - if isprime(num): - primes.append(num) - num += 1 - else: - num += 1 + if isprime(num): + primes.append(num) + num += 1 + else: + num += 1 print(primes[len(primes) - 1]) diff --git a/project_euler/problem_07/sol3.py b/project_euler/problem_07/sol3.py index 0001e4318cc9..4f37dfb7f307 100644 --- a/project_euler/problem_07/sol3.py +++ b/project_euler/problem_07/sol3.py @@ -4,25 +4,30 @@ What is the Nth prime number? ''' from __future__ import print_function + +import itertools # from Python.Math import PrimeCheck import math -import itertools + + def primeCheck(number): if number % 2 == 0 and number > 2: return False return all(number % i for i in range(3, int(math.sqrt(number)) + 1, 2)) + def prime_generator(): num = 2 while True: if primeCheck(num): yield num - num+=1 + num += 1 + def main(): n = int(input('Enter The N\'th Prime Number You Want To Get: ')) # Ask For The N'th Prime Number Wanted - print(next(itertools.islice(prime_generator(),n-1,n))) + print(next(itertools.islice(prime_generator(), n - 1, n))) if __name__ == '__main__': - main() \ No newline at end of file + main() diff --git a/project_euler/problem_08/sol1.py b/project_euler/problem_08/sol1.py index 817fd3f87507..80b1ce4df9c1 100644 --- a/project_euler/problem_08/sol1.py +++ b/project_euler/problem_08/sol1.py @@ -1,11 +1,13 @@ import sys + + def main(): - LargestProduct = -sys.maxsize-1 - number=input().strip() - for i in range(len(number)-12): - product=1 + LargestProduct = -sys.maxsize - 1 + number = input().strip() + for i in range(len(number) - 12): + product = 1 for j in range(13): - product *= int(number[i+j]) + product *= int(number[i + j]) if product > LargestProduct: LargestProduct = product print(LargestProduct) diff --git a/project_euler/problem_08/sol2.py b/project_euler/problem_08/sol2.py index ae03f3ad0aa6..324b60f26767 100644 --- a/project_euler/problem_08/sol2.py +++ b/project_euler/problem_08/sol2.py @@ -1,8 +1,10 @@ from functools import reduce + def main(): - number=input().strip() - print(max([reduce(lambda x,y: int(x)*int(y),number[i:i+13]) for i in range(len(number)-12)])) - + number = input().strip() + print(max([reduce(lambda x, y: int(x) * int(y), number[i:i + 13]) for i in range(len(number) - 12)])) + + if __name__ == '__main__': main() diff --git a/project_euler/problem_09/sol1.py b/project_euler/problem_09/sol1.py index e54c543b4721..15dfd8b6fe81 100644 --- a/project_euler/problem_09/sol1.py +++ b/project_euler/problem_09/sol1.py @@ -1,4 +1,5 @@ from __future__ import print_function + # Program to find the product of a,b,c which are Pythagorean Triplet that satisfice the following: # 1. a < b < c # 2. a**2 + b**2 = c**2 @@ -8,8 +9,8 @@ for a in range(300): for b in range(400): for c in range(500): - if(a < b < c): - if((a**2) + (b**2) == (c**2)): - if((a+b+c) == 1000): - print(("Product of",a,"*",b,"*",c,"=",(a*b*c))) + if (a < b < c): + if ((a ** 2) + (b ** 2) == (c ** 2)): + if ((a + b + c) == 1000): + print(("Product of", a, "*", b, "*", c, "=", (a * b * c))) break diff --git a/project_euler/problem_09/sol2.py b/project_euler/problem_09/sol2.py index 933f5c557d71..8eb89d184115 100644 --- a/project_euler/problem_09/sol2.py +++ b/project_euler/problem_09/sol2.py @@ -2,17 +2,17 @@ a^2+b^2=c^2 Given N, Check if there exists any Pythagorean triplet for which a+b+c=N Find maximum possible value of product of a,b,c among all such Pythagorean triplets, If there is no such Pythagorean triplet print -1.""" -#!/bin/python3 +# !/bin/python3 -product=-1 -d=0 +product = -1 +d = 0 N = int(input()) -for a in range(1,N//3): +for a in range(1, N // 3): """Solving the two equations a**2+b**2=c**2 and a+b+c=N eliminating c """ - b=(N*N-2*a*N)//(2*N-2*a) - c=N-a-b - if c*c==(a*a+b*b): - d=(a*b*c) - if d>=product: - product=d + b = (N * N - 2 * a * N) // (2 * N - 2 * a) + c = N - a - b + if c * c == (a * a + b * b): + d = (a * b * c) + if d >= product: + product = d print(product) diff --git a/project_euler/problem_09/sol3.py b/project_euler/problem_09/sol3.py index 5ebf38e76e1a..e11368b9a7db 100644 --- a/project_euler/problem_09/sol3.py +++ b/project_euler/problem_09/sol3.py @@ -1,6 +1,7 @@ def main(): - print([a*b*c for a in range(1,999) for b in range(a,999) for c in range(b,999) - if (a*a+b*b==c*c) and (a+b+c==1000 ) ][0]) - + print([a * b * c for a in range(1, 999) for b in range(a, 999) for c in range(b, 999) + if (a * a + b * b == c * c) and (a + b + c == 1000)][0]) + + if __name__ == '__main__': main() diff --git a/project_euler/problem_10/sol1.py b/project_euler/problem_10/sol1.py index 94e5b7362114..2435f1d1b3cb 100644 --- a/project_euler/problem_10/sol1.py +++ b/project_euler/problem_10/sol1.py @@ -1,38 +1,42 @@ from __future__ import print_function + from math import sqrt try: - xrange #Python 2 + xrange # Python 2 except NameError: - xrange = range #Python 3 + xrange = range # Python 3 + def is_prime(n): - for i in xrange(2, int(sqrt(n))+1): - if n%i == 0: - return False + for i in xrange(2, int(sqrt(n)) + 1): + if n % i == 0: + return False + + return True - return True def sum_of_primes(n): - if n > 2: - sumOfPrimes = 2 - else: - return 0 + if n > 2: + sumOfPrimes = 2 + else: + return 0 + + for i in xrange(3, n, 2): + if is_prime(i): + sumOfPrimes += i - for i in xrange(3, n, 2): - if is_prime(i): - sumOfPrimes += i + return sumOfPrimes - return sumOfPrimes if __name__ == '__main__': - import sys - - if len(sys.argv) == 1: - print(sum_of_primes(2000000)) - else: - try: - n = int(sys.argv[1]) - print(sum_of_primes(n)) - except ValueError: - print('Invalid entry - please enter a number.') + import sys + + if len(sys.argv) == 1: + print(sum_of_primes(2000000)) + else: + try: + n = int(sys.argv[1]) + print(sum_of_primes(n)) + except ValueError: + print('Invalid entry - please enter a number.') diff --git a/project_euler/problem_10/sol2.py b/project_euler/problem_10/sol2.py index 22df95c063e2..8b5aad7dc31a 100644 --- a/project_euler/problem_10/sol2.py +++ b/project_euler/problem_10/sol2.py @@ -1,22 +1,26 @@ -#from Python.Math import prime_generator -import math -from itertools import takewhile +# from Python.Math import prime_generator +import math +from itertools import takewhile + def primeCheck(number): if number % 2 == 0 and number > 2: return False return all(number % i for i in range(3, int(math.sqrt(number)) + 1, 2)) - + + def prime_generator(): num = 2 while True: if primeCheck(num): yield num - num+=1 - + num += 1 + + def main(): - n = int(input('Enter The upper limit of prime numbers: ')) - print(sum(takewhile(lambda x: x < n,prime_generator()))) - + n = int(input('Enter The upper limit of prime numbers: ')) + print(sum(takewhile(lambda x: x < n, prime_generator()))) + + if __name__ == '__main__': - main() + main() diff --git a/project_euler/problem_11/sol1.py b/project_euler/problem_11/sol1.py index b882dc449156..83337f42d0e9 100644 --- a/project_euler/problem_11/sol1.py +++ b/project_euler/problem_11/sol1.py @@ -1,4 +1,5 @@ from __future__ import print_function + ''' What is the greatest product of four adjacent numbers (horizontally, vertically, or diagonally) in this 20x20 array? @@ -25,44 +26,46 @@ ''' try: - xrange #Python 2 + xrange # Python 2 except NameError: - xrange = range #Python 2 + xrange = range # Python 2 + def largest_product(grid): - nColumns = len(grid[0]) - nRows = len(grid) + nColumns = len(grid[0]) + nRows = len(grid) + + largest = 0 + lrDiagProduct = 0 + rlDiagProduct = 0 - largest = 0 - lrDiagProduct = 0 - rlDiagProduct = 0 + # Check vertically, horizontally, diagonally at the same time (only works for nxn grid) + for i in xrange(nColumns): + for j in xrange(nRows - 3): + vertProduct = grid[j][i] * grid[j + 1][i] * grid[j + 2][i] * grid[j + 3][i] + horzProduct = grid[i][j] * grid[i][j + 1] * grid[i][j + 2] * grid[i][j + 3] - #Check vertically, horizontally, diagonally at the same time (only works for nxn grid) - for i in xrange(nColumns): - for j in xrange(nRows-3): - vertProduct = grid[j][i]*grid[j+1][i]*grid[j+2][i]*grid[j+3][i] - horzProduct = grid[i][j]*grid[i][j+1]*grid[i][j+2]*grid[i][j+3] + # Left-to-right diagonal (\) product + if (i < nColumns - 3): + lrDiagProduct = grid[i][j] * grid[i + 1][j + 1] * grid[i + 2][j + 2] * grid[i + 3][j + 3] - #Left-to-right diagonal (\) product - if (i < nColumns-3): - lrDiagProduct = grid[i][j]*grid[i+1][j+1]*grid[i+2][j+2]*grid[i+3][j+3] + # Right-to-left diagonal(/) product + if (i > 2): + rlDiagProduct = grid[i][j] * grid[i - 1][j + 1] * grid[i - 2][j + 2] * grid[i - 3][j + 3] - #Right-to-left diagonal(/) product - if (i > 2): - rlDiagProduct = grid[i][j]*grid[i-1][j+1]*grid[i-2][j+2]*grid[i-3][j+3] + maxProduct = max(vertProduct, horzProduct, lrDiagProduct, rlDiagProduct) + if maxProduct > largest: + largest = maxProduct - maxProduct = max(vertProduct, horzProduct, lrDiagProduct, rlDiagProduct) - if maxProduct > largest: - largest = maxProduct + return largest - return largest if __name__ == '__main__': - grid = [] - with open('grid.txt') as file: - for line in file: - grid.append(line.strip('\n').split(' ')) + grid = [] + with open('grid.txt') as file: + for line in file: + grid.append(line.strip('\n').split(' ')) - grid = [[int(i) for i in grid[j]] for j in xrange(len(grid))] + grid = [[int(i) for i in grid[j]] for j in xrange(len(grid))] - print(largest_product(grid)) \ No newline at end of file + print(largest_product(grid)) diff --git a/project_euler/problem_11/sol2.py b/project_euler/problem_11/sol2.py index b03395f01697..ebb845a3a34b 100644 --- a/project_euler/problem_11/sol2.py +++ b/project_euler/problem_11/sol2.py @@ -1,39 +1,40 @@ def main(): - with open ("grid.txt", "r") as f: - l = [] - for i in range(20): - l.append([int(x) for x in f.readline().split()]) + with open("grid.txt", "r") as f: + l = [] + for i in range(20): + l.append([int(x) for x in f.readline().split()]) - maximum = 0 + maximum = 0 - # right - for i in range(20): - for j in range(17): - temp = l[i][j] * l[i][j+1] * l[i][j+2] * l[i][j+3] - if temp > maximum: - maximum = temp + # right + for i in range(20): + for j in range(17): + temp = l[i][j] * l[i][j + 1] * l[i][j + 2] * l[i][j + 3] + if temp > maximum: + maximum = temp - # down - for i in range(17): - for j in range(20): - temp = l[i][j] * l[i+1][j] * l[i+2][j] * l[i+3][j] - if temp > maximum: - maximum = temp + # down + for i in range(17): + for j in range(20): + temp = l[i][j] * l[i + 1][j] * l[i + 2][j] * l[i + 3][j] + if temp > maximum: + maximum = temp + + # diagonal 1 + for i in range(17): + for j in range(17): + temp = l[i][j] * l[i + 1][j + 1] * l[i + 2][j + 2] * l[i + 3][j + 3] + if temp > maximum: + maximum = temp + + # diagonal 2 + for i in range(17): + for j in range(3, 20): + temp = l[i][j] * l[i + 1][j - 1] * l[i + 2][j - 2] * l[i + 3][j - 3] + if temp > maximum: + maximum = temp + print(maximum) - #diagonal 1 - for i in range(17): - for j in range(17): - temp = l[i][j] * l[i+1][j+1] * l[i+2][j+2] * l[i+3][j+3] - if temp > maximum: - maximum = temp - - #diagonal 2 - for i in range(17): - for j in range(3, 20): - temp = l[i][j] * l[i+1][j-1] * l[i+2][j-2] * l[i+3][j-3] - if temp > maximum: - maximum = temp - print(maximum) if __name__ == '__main__': - main() \ No newline at end of file + main() diff --git a/project_euler/problem_12/sol1.py b/project_euler/problem_12/sol1.py index 73d48a2ec897..a62cce3b243e 100644 --- a/project_euler/problem_12/sol1.py +++ b/project_euler/problem_12/sol1.py @@ -1,5 +1,7 @@ from __future__ import print_function + from math import sqrt + ''' Highly divisible triangular numbers Problem 12 @@ -21,28 +23,30 @@ What is the value of the first triangle number to have over five hundred divisors? ''' try: - xrange #Python 2 + xrange # Python 2 except NameError: - xrange = range #Python 3 + xrange = range # Python 3 + def count_divisors(n): - nDivisors = 0 - for i in xrange(1, int(sqrt(n))+1): - if n%i == 0: - nDivisors += 2 - #check if n is perfect square - if n**0.5 == int(n**0.5): - nDivisors -= 1 - return nDivisors + nDivisors = 0 + for i in xrange(1, int(sqrt(n)) + 1): + if n % i == 0: + nDivisors += 2 + # check if n is perfect square + if n ** 0.5 == int(n ** 0.5): + nDivisors -= 1 + return nDivisors + tNum = 1 i = 1 while True: - i += 1 - tNum += i + i += 1 + tNum += i - if count_divisors(tNum) > 500: - break + if count_divisors(tNum) > 500: + break print(tNum) diff --git a/project_euler/problem_12/sol2.py b/project_euler/problem_12/sol2.py index 479ab2b900cb..07cf0ddf5fe0 100644 --- a/project_euler/problem_12/sol2.py +++ b/project_euler/problem_12/sol2.py @@ -1,8 +1,10 @@ -def triangle_number_generator(): - for n in range(1,1000000): - yield n*(n+1)//2 - -def count_divisors(n): - return sum([2 for i in range(1,int(n**0.5)+1) if n%i==0 and i*i != n]) +def triangle_number_generator(): + for n in range(1, 1000000): + yield n * (n + 1) // 2 + + +def count_divisors(n): + return sum([2 for i in range(1, int(n ** 0.5) + 1) if n % i == 0 and i * i != n]) + print(next(i for i in triangle_number_generator() if count_divisors(i) > 500)) diff --git a/project_euler/problem_13/sol1.py b/project_euler/problem_13/sol1.py index faaaad5e88c1..4088f6580ead 100644 --- a/project_euler/problem_13/sol1.py +++ b/project_euler/problem_13/sol1.py @@ -11,4 +11,3 @@ array.append(int(input().strip())) print(str(sum(array))[:10]) - diff --git a/project_euler/problem_14/sol1.py b/project_euler/problem_14/sol1.py index 9037f6eb8bd5..148e5aff9a8f 100644 --- a/project_euler/problem_14/sol1.py +++ b/project_euler/problem_14/sol1.py @@ -1,21 +1,22 @@ from __future__ import print_function + largest_number = 0 pre_counter = 0 -for input1 in range(750000,1000000): +for input1 in range(750000, 1000000): counter = 1 number = input1 while number > 1: if number % 2 == 0: - number /=2 + number /= 2 counter += 1 else: - number = (3*number)+1 + number = (3 * number) + 1 counter += 1 if counter > pre_counter: largest_number = input1 pre_counter = counter -print(('Largest Number:',largest_number,'->',pre_counter,'digits')) +print(('Largest Number:', largest_number, '->', pre_counter, 'digits')) diff --git a/project_euler/problem_14/sol2.py b/project_euler/problem_14/sol2.py index b9de42be1108..981f97b2a52c 100644 --- a/project_euler/problem_14/sol2.py +++ b/project_euler/problem_14/sol2.py @@ -1,16 +1,17 @@ def collatz_sequence(n): - """Collatz conjecture: start with any positive integer n.Next termis obtained from the previous term as follows: - if the previous term is even, the next term is one half the previous term. - If the previous term is odd, the next term is 3 times the previous term plus 1. - The conjecture states the sequence will always reach 1 regaardess of starting n.""" - sequence = [n] - while n != 1: - if n % 2 == 0:# even - n //= 2 - else: - n = 3*n +1 - sequence.append(n) - return sequence + """Collatz conjecture: start with any positive integer n.Next termis obtained from the previous term as follows: + if the previous term is even, the next term is one half the previous term. + If the previous term is odd, the next term is 3 times the previous term plus 1. + The conjecture states the sequence will always reach 1 regaardess of starting n.""" + sequence = [n] + while n != 1: + if n % 2 == 0: # even + n //= 2 + else: + n = 3 * n + 1 + sequence.append(n) + return sequence -answer = max([(len(collatz_sequence(i)), i) for i in range(1,1000000)]) -print("Longest Collatz sequence under one million is %d with length %d" % (answer[1],answer[0])) \ No newline at end of file + +answer = max([(len(collatz_sequence(i)), i) for i in range(1, 1000000)]) +print("Longest Collatz sequence under one million is %d with length %d" % (answer[1], answer[0])) diff --git a/project_euler/problem_15/sol1.py b/project_euler/problem_15/sol1.py index d24748011ef9..7b6ac2561f2c 100644 --- a/project_euler/problem_15/sol1.py +++ b/project_euler/problem_15/sol1.py @@ -1,20 +1,23 @@ from __future__ import print_function + from math import factorial + def lattice_paths(n): - n = 2*n #middle entry of odd rows starting at row 3 is the solution for n = 1, 2, 3,... - k = n/2 + n = 2 * n # middle entry of odd rows starting at row 3 is the solution for n = 1, 2, 3,... + k = n / 2 + + return factorial(n) / (factorial(k) * factorial(n - k)) - return factorial(n)/(factorial(k)*factorial(n-k)) if __name__ == '__main__': - import sys + import sys - if len(sys.argv) == 1: - print(lattice_paths(20)) - else: - try: - n = int(sys.argv[1]) - print(lattice_paths(n)) - except ValueError: - print('Invalid entry - please enter a number.') + if len(sys.argv) == 1: + print(lattice_paths(20)) + else: + try: + n = int(sys.argv[1]) + print(lattice_paths(n)) + except ValueError: + print('Invalid entry - please enter a number.') diff --git a/project_euler/problem_16/sol1.py b/project_euler/problem_16/sol1.py index 05c7916bd10a..e0d08b5fb2b9 100644 --- a/project_euler/problem_16/sol1.py +++ b/project_euler/problem_16/sol1.py @@ -1,5 +1,5 @@ power = int(input("Enter the power of 2: ")) -num = 2**power +num = 2 ** power string_num = str(num) @@ -7,9 +7,9 @@ sum_of_num = 0 -print("2 ^",power,"=",num) +print("2 ^", power, "=", num) for i in list_num: sum_of_num += int(i) -print("Sum of the digits are:",sum_of_num) +print("Sum of the digits are:", sum_of_num) diff --git a/project_euler/problem_17/sol1.py b/project_euler/problem_17/sol1.py index 8dd6f1af2093..702224724b2b 100644 --- a/project_euler/problem_17/sol1.py +++ b/project_euler/problem_17/sol1.py @@ -1,4 +1,5 @@ from __future__ import print_function + ''' Number letter counts Problem 17 @@ -12,24 +13,24 @@ contains 20 letters. The use of "and" when writing out numbers is in compliance with British usage. ''' -ones_counts = [0, 3, 3, 5, 4, 4, 3, 5, 5, 4, 3, 6, 6, 8, 8, 7, 7, 9, 8, 8] #number of letters in zero, one, two, ..., nineteen (0 for zero since it's never said aloud) -tens_counts = [0, 0, 6, 6, 5, 5, 5, 7, 6, 6] #number of letters in twenty, thirty, ..., ninety (0 for numbers less than 20 due to inconsistency in teens) +ones_counts = [0, 3, 3, 5, 4, 4, 3, 5, 5, 4, 3, 6, 6, 8, 8, 7, 7, 9, 8, 8] # number of letters in zero, one, two, ..., nineteen (0 for zero since it's never said aloud) +tens_counts = [0, 0, 6, 6, 5, 5, 5, 7, 6, 6] # number of letters in twenty, thirty, ..., ninety (0 for numbers less than 20 due to inconsistency in teens) count = 0 for i in range(1, 1001): - if i < 1000: - if i >= 100: - count += ones_counts[i/100] + 7 #add number of letters for "n hundred" - - if i%100 != 0: - count += 3 #add number of letters for "and" if number is not multiple of 100 - - if 0 < i%100 < 20: - count += ones_counts[i%100] #add number of letters for one, two, three, ..., nineteen (could be combined with below if not for inconsistency in teens) - else: - count += ones_counts[i%10] + tens_counts[(i%100-i%10)/10] #add number of letters for twenty, twenty one, ..., ninety nine - else: - count += ones_counts[i/1000] + 8 + if i < 1000: + if i >= 100: + count += ones_counts[i / 100] + 7 # add number of letters for "n hundred" + + if i % 100 != 0: + count += 3 # add number of letters for "and" if number is not multiple of 100 + + if 0 < i % 100 < 20: + count += ones_counts[i % 100] # add number of letters for one, two, three, ..., nineteen (could be combined with below if not for inconsistency in teens) + else: + count += ones_counts[i % 10] + tens_counts[(i % 100 - i % 10) / 10] # add number of letters for twenty, twenty one, ..., ninety nine + else: + count += ones_counts[i / 1000] + 8 print(count) diff --git a/project_euler/problem_19/sol1.py b/project_euler/problem_19/sol1.py index 13e520ca76e4..614f1426fada 100644 --- a/project_euler/problem_19/sol1.py +++ b/project_euler/problem_19/sol1.py @@ -1,4 +1,5 @@ from __future__ import print_function + ''' Counting Sundays Problem 19 @@ -27,25 +28,25 @@ sundays = 0 while year < 2001: - day += 7 - - if (year%4 == 0 and not year%100 == 0) or (year%400 == 0): - if day > days_per_month[month-1] and month != 2: - month += 1 - day = day-days_per_month[month-2] - elif day > 29 and month == 2: - month += 1 - day = day-29 - else: - if day > days_per_month[month-1]: - month += 1 - day = day-days_per_month[month-2] - - if month > 12: - year += 1 - month = 1 - - if year < 2001 and day == 1: - sundays += 1 + day += 7 + + if (year % 4 == 0 and not year % 100 == 0) or (year % 400 == 0): + if day > days_per_month[month - 1] and month != 2: + month += 1 + day = day - days_per_month[month - 2] + elif day > 29 and month == 2: + month += 1 + day = day - 29 + else: + if day > days_per_month[month - 1]: + month += 1 + day = day - days_per_month[month - 2] + + if month > 12: + year += 1 + month = 1 + + if year < 2001 and day == 1: + sundays += 1 print(sundays) diff --git a/project_euler/problem_20/sol1.py b/project_euler/problem_20/sol1.py index 73e41d5cc8fa..21687afccd2e 100644 --- a/project_euler/problem_20/sol1.py +++ b/project_euler/problem_20/sol1.py @@ -1,19 +1,21 @@ # Finding the factorial. def factorial(n): fact = 1 - for i in range(1,n+1): + for i in range(1, n + 1): fact *= i return fact + # Spliting the digits and adding it. def split_and_add(number): sum_of_digits = 0 - while(number>0): + while (number > 0): last_digit = number % 10 sum_of_digits += last_digit - number = int(number/10) # Removing the last_digit from the given number. + number = int(number / 10) # Removing the last_digit from the given number. return sum_of_digits + # Taking the user input. number = int(input("Enter the Number: ")) diff --git a/project_euler/problem_20/sol2.py b/project_euler/problem_20/sol2.py index bca9af9cb9ef..c2fbed02f763 100644 --- a/project_euler/problem_20/sol2.py +++ b/project_euler/problem_20/sol2.py @@ -1,5 +1,9 @@ from math import factorial + + def main(): - print(sum([int(x) for x in str(factorial(100))])) + print(sum([int(x) for x in str(factorial(100))])) + + if __name__ == '__main__': - main() \ No newline at end of file + main() diff --git a/project_euler/problem_21/sol1.py b/project_euler/problem_21/sol1.py index da29a5c7b631..d209a6e57a80 100644 --- a/project_euler/problem_21/sol1.py +++ b/project_euler/problem_21/sol1.py @@ -1,6 +1,8 @@ -#-.- coding: latin-1 -.- +# -.- coding: latin-1 -.- from __future__ import print_function + from math import sqrt + ''' Amicable Numbers Problem 21 @@ -13,18 +15,20 @@ Evaluate the sum of all the amicable numbers under 10000. ''' try: - xrange #Python 2 + xrange # Python 2 except NameError: - xrange = range #Python 3 + xrange = range # Python 3 + def sum_of_divisors(n): - total = 0 - for i in xrange(1, int(sqrt(n)+1)): - if n%i == 0 and i != sqrt(n): - total += i + n//i - elif i == sqrt(n): - total += i - return total-n - -total = [i for i in range(1,10000) if sum_of_divisors(sum_of_divisors(i)) == i and sum_of_divisors(i) != i] + total = 0 + for i in xrange(1, int(sqrt(n) + 1)): + if n % i == 0 and i != sqrt(n): + total += i + n // i + elif i == sqrt(n): + total += i + return total - n + + +total = [i for i in range(1, 10000) if sum_of_divisors(sum_of_divisors(i)) == i and sum_of_divisors(i) != i] print(sum(total)) diff --git a/project_euler/problem_22/sol1.py b/project_euler/problem_22/sol1.py index 7754306583dc..45c0460a0dbf 100644 --- a/project_euler/problem_22/sol1.py +++ b/project_euler/problem_22/sol1.py @@ -1,5 +1,6 @@ # -*- coding: latin-1 -*- from __future__ import print_function + ''' Name scores Problem 22 @@ -14,13 +15,13 @@ What is the total of all the name scores in the file? ''' try: - xrange #Python 2 + xrange # Python 2 except NameError: - xrange = range #Python 3 + xrange = range # Python 3 with open('p022_names.txt') as file: - names = str(file.readlines()[0]) - names = names.replace('"', '').split(',') + names = str(file.readlines()[0]) + names = names.replace('"', '').split(',') names.sort() @@ -28,10 +29,10 @@ total_score = 0 for i, name in enumerate(names): - for letter in name: - name_score += ord(letter) - 64 + for letter in name: + name_score += ord(letter) - 64 - total_score += (i+1)*name_score - name_score = 0 + total_score += (i + 1) * name_score + name_score = 0 -print(total_score) \ No newline at end of file +print(total_score) diff --git a/project_euler/problem_22/sol2.py b/project_euler/problem_22/sol2.py index d7f9abf09d49..c8bb33d07b18 100644 --- a/project_euler/problem_22/sol2.py +++ b/project_euler/problem_22/sol2.py @@ -1,533 +1,533 @@ def main(): - name = [ - "MARY", "PATRICIA", "LINDA", "BARBARA", "ELIZABETH", "JENNIFER", "MARIA", "SUSAN", "MARGARET", "DOROTHY", - "LISA", "NANCY", "KAREN", "BETTY", "HELEN", "SANDRA", "DONNA", "CAROL", "RUTH", "SHARON", - "MICHELLE", "LAURA", "SARAH", "KIMBERLY", "DEBORAH", "JESSICA", "SHIRLEY", "CYNTHIA", "ANGELA", "MELISSA", - "BRENDA", "AMY", "ANNA", "REBECCA", "VIRGINIA", "KATHLEEN", "PAMELA", "MARTHA", "DEBRA", "AMANDA", - "STEPHANIE", "CAROLYN", "CHRISTINE", "MARIE", "JANET", "CATHERINE", "FRANCES", "ANN", "JOYCE", "DIANE", - "ALICE", "JULIE", "HEATHER", "TERESA", "DORIS", "GLORIA", "EVELYN", "JEAN", "CHERYL", "MILDRED", - "KATHERINE", "JOAN", "ASHLEY", "JUDITH", "ROSE", "JANICE", "KELLY", "NICOLE", "JUDY", "CHRISTINA", - "KATHY", "THERESA", "BEVERLY", "DENISE", "TAMMY", "IRENE", "JANE", "LORI", "RACHEL", "MARILYN", - "ANDREA", "KATHRYN", "LOUISE", "SARA", "ANNE", "JACQUELINE", "WANDA", "BONNIE", "JULIA", "RUBY", - "LOIS", "TINA", "PHYLLIS", "NORMA", "PAULA", "DIANA", "ANNIE", "LILLIAN", "EMILY", "ROBIN", - "PEGGY", "CRYSTAL", "GLADYS", "RITA", "DAWN", "CONNIE", "FLORENCE", "TRACY", "EDNA", "TIFFANY", - "CARMEN", "ROSA", "CINDY", "GRACE", "WENDY", "VICTORIA", "EDITH", "KIM", "SHERRY", "SYLVIA", - "JOSEPHINE", "THELMA", "SHANNON", "SHEILA", "ETHEL", "ELLEN", "ELAINE", "MARJORIE", "CARRIE", "CHARLOTTE", - "MONICA", "ESTHER", "PAULINE", "EMMA", "JUANITA", "ANITA", "RHONDA", "HAZEL", "AMBER", "EVA", - "DEBBIE", "APRIL", "LESLIE", "CLARA", "LUCILLE", "JAMIE", "JOANNE", "ELEANOR", "VALERIE", "DANIELLE", - "MEGAN", "ALICIA", "SUZANNE", "MICHELE", "GAIL", "BERTHA", "DARLENE", "VERONICA", "JILL", "ERIN", - "GERALDINE", "LAUREN", "CATHY", "JOANN", "LORRAINE", "LYNN", "SALLY", "REGINA", "ERICA", "BEATRICE", - "DOLORES", "BERNICE", "AUDREY", "YVONNE", "ANNETTE", "JUNE", "SAMANTHA", "MARION", "DANA", "STACY", - "ANA", "RENEE", "IDA", "VIVIAN", "ROBERTA", "HOLLY", "BRITTANY", "MELANIE", "LORETTA", "YOLANDA", - "JEANETTE", "LAURIE", "KATIE", "KRISTEN", "VANESSA", "ALMA", "SUE", "ELSIE", "BETH", "JEANNE", - "VICKI", "CARLA", "TARA", "ROSEMARY", "EILEEN", "TERRI", "GERTRUDE", "LUCY", "TONYA", "ELLA", - "STACEY", "WILMA", "GINA", "KRISTIN", "JESSIE", "NATALIE", "AGNES", "VERA", "WILLIE", "CHARLENE", - "BESSIE", "DELORES", "MELINDA", "PEARL", "ARLENE", "MAUREEN", "COLLEEN", "ALLISON", "TAMARA", "JOY", - "GEORGIA", "CONSTANCE", "LILLIE", "CLAUDIA", "JACKIE", "MARCIA", "TANYA", "NELLIE", "MINNIE", "MARLENE", - "HEIDI", "GLENDA", "LYDIA", "VIOLA", "COURTNEY", "MARIAN", "STELLA", "CAROLINE", "DORA", "JO", - "VICKIE", "MATTIE", "TERRY", "MAXINE", "IRMA", "MABEL", "MARSHA", "MYRTLE", "LENA", "CHRISTY", - "DEANNA", "PATSY", "HILDA", "GWENDOLYN", "JENNIE", "NORA", "MARGIE", "NINA", "CASSANDRA", "LEAH", - "PENNY", "KAY", "PRISCILLA", "NAOMI", "CAROLE", "BRANDY", "OLGA", "BILLIE", "DIANNE", "TRACEY", - "LEONA", "JENNY", "FELICIA", "SONIA", "MIRIAM", "VELMA", "BECKY", "BOBBIE", "VIOLET", "KRISTINA", - "TONI", "MISTY", "MAE", "SHELLY", "DAISY", "RAMONA", "SHERRI", "ERIKA", "KATRINA", "CLAIRE", - "LINDSEY", "LINDSAY", "GENEVA", "GUADALUPE", "BELINDA", "MARGARITA", "SHERYL", "CORA", "FAYE", "ADA", - "NATASHA", "SABRINA", "ISABEL", "MARGUERITE", "HATTIE", "HARRIET", "MOLLY", "CECILIA", "KRISTI", "BRANDI", - "BLANCHE", "SANDY", "ROSIE", "JOANNA", "IRIS", "EUNICE", "ANGIE", "INEZ", "LYNDA", "MADELINE", - "AMELIA", "ALBERTA", "GENEVIEVE", "MONIQUE", "JODI", "JANIE", "MAGGIE", "KAYLA", "SONYA", "JAN", - "LEE", "KRISTINE", "CANDACE", "FANNIE", "MARYANN", "OPAL", "ALISON", "YVETTE", "MELODY", "LUZ", - "SUSIE", "OLIVIA", "FLORA", "SHELLEY", "KRISTY", "MAMIE", "LULA", "LOLA", "VERNA", "BEULAH", - "ANTOINETTE", "CANDICE", "JUANA", "JEANNETTE", "PAM", "KELLI", "HANNAH", "WHITNEY", "BRIDGET", "KARLA", - "CELIA", "LATOYA", "PATTY", "SHELIA", "GAYLE", "DELLA", "VICKY", "LYNNE", "SHERI", "MARIANNE", - "KARA", "JACQUELYN", "ERMA", "BLANCA", "MYRA", "LETICIA", "PAT", "KRISTA", "ROXANNE", "ANGELICA", - "JOHNNIE", "ROBYN", "FRANCIS", "ADRIENNE", "ROSALIE", "ALEXANDRA", "BROOKE", "BETHANY", "SADIE", "BERNADETTE", - "TRACI", "JODY", "KENDRA", "JASMINE", "NICHOLE", "RACHAEL", "CHELSEA", "MABLE", "ERNESTINE", "MURIEL", - "MARCELLA", "ELENA", "KRYSTAL", "ANGELINA", "NADINE", "KARI", "ESTELLE", "DIANNA", "PAULETTE", "LORA", - "MONA", "DOREEN", "ROSEMARIE", "ANGEL", "DESIREE", "ANTONIA", "HOPE", "GINGER", "JANIS", "BETSY", - "CHRISTIE", "FREDA", "MERCEDES", "MEREDITH", "LYNETTE", "TERI", "CRISTINA", "EULA", "LEIGH", "MEGHAN", - "SOPHIA", "ELOISE", "ROCHELLE", "GRETCHEN", "CECELIA", "RAQUEL", "HENRIETTA", "ALYSSA", "JANA", "KELLEY", - "GWEN", "KERRY", "JENNA", "TRICIA", "LAVERNE", "OLIVE", "ALEXIS", "TASHA", "SILVIA", "ELVIRA", - "CASEY", "DELIA", "SOPHIE", "KATE", "PATTI", "LORENA", "KELLIE", "SONJA", "LILA", "LANA", - "DARLA", "MAY", "MINDY", "ESSIE", "MANDY", "LORENE", "ELSA", "JOSEFINA", "JEANNIE", "MIRANDA", - "DIXIE", "LUCIA", "MARTA", "FAITH", "LELA", "JOHANNA", "SHARI", "CAMILLE", "TAMI", "SHAWNA", - "ELISA", "EBONY", "MELBA", "ORA", "NETTIE", "TABITHA", "OLLIE", "JAIME", "WINIFRED", "KRISTIE", - "MARINA", "ALISHA", "AIMEE", "RENA", "MYRNA", "MARLA", "TAMMIE", "LATASHA", "BONITA", "PATRICE", - "RONDA", "SHERRIE", "ADDIE", "FRANCINE", "DELORIS", "STACIE", "ADRIANA", "CHERI", "SHELBY", "ABIGAIL", - "CELESTE", "JEWEL", "CARA", "ADELE", "REBEKAH", "LUCINDA", "DORTHY", "CHRIS", "EFFIE", "TRINA", - "REBA", "SHAWN", "SALLIE", "AURORA", "LENORA", "ETTA", "LOTTIE", "KERRI", "TRISHA", "NIKKI", - "ESTELLA", "FRANCISCA", "JOSIE", "TRACIE", "MARISSA", "KARIN", "BRITTNEY", "JANELLE", "LOURDES", "LAUREL", - "HELENE", "FERN", "ELVA", "CORINNE", "KELSEY", "INA", "BETTIE", "ELISABETH", "AIDA", "CAITLIN", - "INGRID", "IVA", "EUGENIA", "CHRISTA", "GOLDIE", "CASSIE", "MAUDE", "JENIFER", "THERESE", "FRANKIE", - "DENA", "LORNA", "JANETTE", "LATONYA", "CANDY", "MORGAN", "CONSUELO", "TAMIKA", "ROSETTA", "DEBORA", - "CHERIE", "POLLY", "DINA", "JEWELL", "FAY", "JILLIAN", "DOROTHEA", "NELL", "TRUDY", "ESPERANZA", - "PATRICA", "KIMBERLEY", "SHANNA", "HELENA", "CAROLINA", "CLEO", "STEFANIE", "ROSARIO", "OLA", "JANINE", - "MOLLIE", "LUPE", "ALISA", "LOU", "MARIBEL", "SUSANNE", "BETTE", "SUSANA", "ELISE", "CECILE", - "ISABELLE", "LESLEY", "JOCELYN", "PAIGE", "JONI", "RACHELLE", "LEOLA", "DAPHNE", "ALTA", "ESTER", - "PETRA", "GRACIELA", "IMOGENE", "JOLENE", "KEISHA", "LACEY", "GLENNA", "GABRIELA", "KERI", "URSULA", - "LIZZIE", "KIRSTEN", "SHANA", "ADELINE", "MAYRA", "JAYNE", "JACLYN", "GRACIE", "SONDRA", "CARMELA", - "MARISA", "ROSALIND", "CHARITY", "TONIA", "BEATRIZ", "MARISOL", "CLARICE", "JEANINE", "SHEENA", "ANGELINE", - "FRIEDA", "LILY", "ROBBIE", "SHAUNA", "MILLIE", "CLAUDETTE", "CATHLEEN", "ANGELIA", "GABRIELLE", "AUTUMN", - "KATHARINE", "SUMMER", "JODIE", "STACI", "LEA", "CHRISTI", "JIMMIE", "JUSTINE", "ELMA", "LUELLA", - "MARGRET", "DOMINIQUE", "SOCORRO", "RENE", "MARTINA", "MARGO", "MAVIS", "CALLIE", "BOBBI", "MARITZA", - "LUCILE", "LEANNE", "JEANNINE", "DEANA", "AILEEN", "LORIE", "LADONNA", "WILLA", "MANUELA", "GALE", - "SELMA", "DOLLY", "SYBIL", "ABBY", "LARA", "DALE", "IVY", "DEE", "WINNIE", "MARCY", - "LUISA", "JERI", "MAGDALENA", "OFELIA", "MEAGAN", "AUDRA", "MATILDA", "LEILA", "CORNELIA", "BIANCA", - "SIMONE", "BETTYE", "RANDI", "VIRGIE", "LATISHA", "BARBRA", "GEORGINA", "ELIZA", "LEANN", "BRIDGETTE", - "RHODA", "HALEY", "ADELA", "NOLA", "BERNADINE", "FLOSSIE", "ILA", "GRETA", "RUTHIE", "NELDA", - "MINERVA", "LILLY", "TERRIE", "LETHA", "HILARY", "ESTELA", "VALARIE", "BRIANNA", "ROSALYN", "EARLINE", - "CATALINA", "AVA", "MIA", "CLARISSA", "LIDIA", "CORRINE", "ALEXANDRIA", "CONCEPCION", "TIA", "SHARRON", - "RAE", "DONA", "ERICKA", "JAMI", "ELNORA", "CHANDRA", "LENORE", "NEVA", "MARYLOU", "MELISA", - "TABATHA", "SERENA", "AVIS", "ALLIE", "SOFIA", "JEANIE", "ODESSA", "NANNIE", "HARRIETT", "LORAINE", - "PENELOPE", "MILAGROS", "EMILIA", "BENITA", "ALLYSON", "ASHLEE", "TANIA", "TOMMIE", "ESMERALDA", "KARINA", - "EVE", "PEARLIE", "ZELMA", "MALINDA", "NOREEN", "TAMEKA", "SAUNDRA", "HILLARY", "AMIE", "ALTHEA", - "ROSALINDA", "JORDAN", "LILIA", "ALANA", "GAY", "CLARE", "ALEJANDRA", "ELINOR", "MICHAEL", "LORRIE", - "JERRI", "DARCY", "EARNESTINE", "CARMELLA", "TAYLOR", "NOEMI", "MARCIE", "LIZA", "ANNABELLE", "LOUISA", - "EARLENE", "MALLORY", "CARLENE", "NITA", "SELENA", "TANISHA", "KATY", "JULIANNE", "JOHN", "LAKISHA", - "EDWINA", "MARICELA", "MARGERY", "KENYA", "DOLLIE", "ROXIE", "ROSLYN", "KATHRINE", "NANETTE", "CHARMAINE", - "LAVONNE", "ILENE", "KRIS", "TAMMI", "SUZETTE", "CORINE", "KAYE", "JERRY", "MERLE", "CHRYSTAL", - "LINA", "DEANNE", "LILIAN", "JULIANA", "ALINE", "LUANN", "KASEY", "MARYANNE", "EVANGELINE", "COLETTE", - "MELVA", "LAWANDA", "YESENIA", "NADIA", "MADGE", "KATHIE", "EDDIE", "OPHELIA", "VALERIA", "NONA", - "MITZI", "MARI", "GEORGETTE", "CLAUDINE", "FRAN", "ALISSA", "ROSEANN", "LAKEISHA", "SUSANNA", "REVA", - "DEIDRE", "CHASITY", "SHEREE", "CARLY", "JAMES", "ELVIA", "ALYCE", "DEIRDRE", "GENA", "BRIANA", - "ARACELI", "KATELYN", "ROSANNE", "WENDI", "TESSA", "BERTA", "MARVA", "IMELDA", "MARIETTA", "MARCI", - "LEONOR", "ARLINE", "SASHA", "MADELYN", "JANNA", "JULIETTE", "DEENA", "AURELIA", "JOSEFA", "AUGUSTA", - "LILIANA", "YOUNG", "CHRISTIAN", "LESSIE", "AMALIA", "SAVANNAH", "ANASTASIA", "VILMA", "NATALIA", "ROSELLA", - "LYNNETTE", "CORINA", "ALFREDA", "LEANNA", "CAREY", "AMPARO", "COLEEN", "TAMRA", "AISHA", "WILDA", - "KARYN", "CHERRY", "QUEEN", "MAURA", "MAI", "EVANGELINA", "ROSANNA", "HALLIE", "ERNA", "ENID", - "MARIANA", "LACY", "JULIET", "JACKLYN", "FREIDA", "MADELEINE", "MARA", "HESTER", "CATHRYN", "LELIA", - "CASANDRA", "BRIDGETT", "ANGELITA", "JANNIE", "DIONNE", "ANNMARIE", "KATINA", "BERYL", "PHOEBE", "MILLICENT", - "KATHERYN", "DIANN", "CARISSA", "MARYELLEN", "LIZ", "LAURI", "HELGA", "GILDA", "ADRIAN", "RHEA", - "MARQUITA", "HOLLIE", "TISHA", "TAMERA", "ANGELIQUE", "FRANCESCA", "BRITNEY", "KAITLIN", "LOLITA", "FLORINE", - "ROWENA", "REYNA", "TWILA", "FANNY", "JANELL", "INES", "CONCETTA", "BERTIE", "ALBA", "BRIGITTE", - "ALYSON", "VONDA", "PANSY", "ELBA", "NOELLE", "LETITIA", "KITTY", "DEANN", "BRANDIE", "LOUELLA", - "LETA", "FELECIA", "SHARLENE", "LESA", "BEVERLEY", "ROBERT", "ISABELLA", "HERMINIA", "TERRA", "CELINA", - "TORI", "OCTAVIA", "JADE", "DENICE", "GERMAINE", "SIERRA", "MICHELL", "CORTNEY", "NELLY", "DORETHA", - "SYDNEY", "DEIDRA", "MONIKA", "LASHONDA", "JUDI", "CHELSEY", "ANTIONETTE", "MARGOT", "BOBBY", "ADELAIDE", - "NAN", "LEEANN", "ELISHA", "DESSIE", "LIBBY", "KATHI", "GAYLA", "LATANYA", "MINA", "MELLISA", - "KIMBERLEE", "JASMIN", "RENAE", "ZELDA", "ELDA", "MA", "JUSTINA", "GUSSIE", "EMILIE", "CAMILLA", - "ABBIE", "ROCIO", "KAITLYN", "JESSE", "EDYTHE", "ASHLEIGH", "SELINA", "LAKESHA", "GERI", "ALLENE", - "PAMALA", "MICHAELA", "DAYNA", "CARYN", "ROSALIA", "SUN", "JACQULINE", "REBECA", "MARYBETH", "KRYSTLE", - "IOLA", "DOTTIE", "BENNIE", "BELLE", "AUBREY", "GRISELDA", "ERNESTINA", "ELIDA", "ADRIANNE", "DEMETRIA", - "DELMA", "CHONG", "JAQUELINE", "DESTINY", "ARLEEN", "VIRGINA", "RETHA", "FATIMA", "TILLIE", "ELEANORE", - "CARI", "TREVA", "BIRDIE", "WILHELMINA", "ROSALEE", "MAURINE", "LATRICE", "YONG", "JENA", "TARYN", - "ELIA", "DEBBY", "MAUDIE", "JEANNA", "DELILAH", "CATRINA", "SHONDA", "HORTENCIA", "THEODORA", "TERESITA", - "ROBBIN", "DANETTE", "MARYJANE", "FREDDIE", "DELPHINE", "BRIANNE", "NILDA", "DANNA", "CINDI", "BESS", - "IONA", "HANNA", "ARIEL", "WINONA", "VIDA", "ROSITA", "MARIANNA", "WILLIAM", "RACHEAL", "GUILLERMINA", - "ELOISA", "CELESTINE", "CAREN", "MALISSA", "LONA", "CHANTEL", "SHELLIE", "MARISELA", "LEORA", "AGATHA", - "SOLEDAD", "MIGDALIA", "IVETTE", "CHRISTEN", "ATHENA", "JANEL", "CHLOE", "VEDA", "PATTIE", "TESSIE", - "TERA", "MARILYNN", "LUCRETIA", "KARRIE", "DINAH", "DANIELA", "ALECIA", "ADELINA", "VERNICE", "SHIELA", - "PORTIA", "MERRY", "LASHAWN", "DEVON", "DARA", "TAWANA", "OMA", "VERDA", "CHRISTIN", "ALENE", - "ZELLA", "SANDI", "RAFAELA", "MAYA", "KIRA", "CANDIDA", "ALVINA", "SUZAN", "SHAYLA", "LYN", - "LETTIE", "ALVA", "SAMATHA", "ORALIA", "MATILDE", "MADONNA", "LARISSA", "VESTA", "RENITA", "INDIA", - "DELOIS", "SHANDA", "PHILLIS", "LORRI", "ERLINDA", "CRUZ", "CATHRINE", "BARB", "ZOE", "ISABELL", - "IONE", "GISELA", "CHARLIE", "VALENCIA", "ROXANNA", "MAYME", "KISHA", "ELLIE", "MELLISSA", "DORRIS", - "DALIA", "BELLA", "ANNETTA", "ZOILA", "RETA", "REINA", "LAURETTA", "KYLIE", "CHRISTAL", "PILAR", - "CHARLA", "ELISSA", "TIFFANI", "TANA", "PAULINA", "LEOTA", "BREANNA", "JAYME", "CARMEL", "VERNELL", - "TOMASA", "MANDI", "DOMINGA", "SANTA", "MELODIE", "LURA", "ALEXA", "TAMELA", "RYAN", "MIRNA", - "KERRIE", "VENUS", "NOEL", "FELICITA", "CRISTY", "CARMELITA", "BERNIECE", "ANNEMARIE", "TIARA", "ROSEANNE", - "MISSY", "CORI", "ROXANA", "PRICILLA", "KRISTAL", "JUNG", "ELYSE", "HAYDEE", "ALETHA", "BETTINA", - "MARGE", "GILLIAN", "FILOMENA", "CHARLES", "ZENAIDA", "HARRIETTE", "CARIDAD", "VADA", "UNA", "ARETHA", - "PEARLINE", "MARJORY", "MARCELA", "FLOR", "EVETTE", "ELOUISE", "ALINA", "TRINIDAD", "DAVID", "DAMARIS", - "CATHARINE", "CARROLL", "BELVA", "NAKIA", "MARLENA", "LUANNE", "LORINE", "KARON", "DORENE", "DANITA", - "BRENNA", "TATIANA", "SAMMIE", "LOUANN", "LOREN", "JULIANNA", "ANDRIA", "PHILOMENA", "LUCILA", "LEONORA", - "DOVIE", "ROMONA", "MIMI", "JACQUELIN", "GAYE", "TONJA", "MISTI", "JOE", "GENE", "CHASTITY", - "STACIA", "ROXANN", "MICAELA", "NIKITA", "MEI", "VELDA", "MARLYS", "JOHNNA", "AURA", "LAVERN", - "IVONNE", "HAYLEY", "NICKI", "MAJORIE", "HERLINDA", "GEORGE", "ALPHA", "YADIRA", "PERLA", "GREGORIA", - "DANIEL", "ANTONETTE", "SHELLI", "MOZELLE", "MARIAH", "JOELLE", "CORDELIA", "JOSETTE", "CHIQUITA", "TRISTA", - "LOUIS", "LAQUITA", "GEORGIANA", "CANDI", "SHANON", "LONNIE", "HILDEGARD", "CECIL", "VALENTINA", "STEPHANY", - "MAGDA", "KAROL", "GERRY", "GABRIELLA", "TIANA", "ROMA", "RICHELLE", "RAY", "PRINCESS", "OLETA", - "JACQUE", "IDELLA", "ALAINA", "SUZANNA", "JOVITA", "BLAIR", "TOSHA", "RAVEN", "NEREIDA", "MARLYN", - "KYLA", "JOSEPH", "DELFINA", "TENA", "STEPHENIE", "SABINA", "NATHALIE", "MARCELLE", "GERTIE", "DARLEEN", - "THEA", "SHARONDA", "SHANTEL", "BELEN", "VENESSA", "ROSALINA", "ONA", "GENOVEVA", "COREY", "CLEMENTINE", - "ROSALBA", "RENATE", "RENATA", "MI", "IVORY", "GEORGIANNA", "FLOY", "DORCAS", "ARIANA", "TYRA", - "THEDA", "MARIAM", "JULI", "JESICA", "DONNIE", "VIKKI", "VERLA", "ROSELYN", "MELVINA", "JANNETTE", - "GINNY", "DEBRAH", "CORRIE", "ASIA", "VIOLETA", "MYRTIS", "LATRICIA", "COLLETTE", "CHARLEEN", "ANISSA", - "VIVIANA", "TWYLA", "PRECIOUS", "NEDRA", "LATONIA", "LAN", "HELLEN", "FABIOLA", "ANNAMARIE", "ADELL", - "SHARYN", "CHANTAL", "NIKI", "MAUD", "LIZETTE", "LINDY", "KIA", "KESHA", "JEANA", "DANELLE", - "CHARLINE", "CHANEL", "CARROL", "VALORIE", "LIA", "DORTHA", "CRISTAL", "SUNNY", "LEONE", "LEILANI", - "GERRI", "DEBI", "ANDRA", "KESHIA", "IMA", "EULALIA", "EASTER", "DULCE", "NATIVIDAD", "LINNIE", - "KAMI", "GEORGIE", "CATINA", "BROOK", "ALDA", "WINNIFRED", "SHARLA", "RUTHANN", "MEAGHAN", "MAGDALENE", - "LISSETTE", "ADELAIDA", "VENITA", "TRENA", "SHIRLENE", "SHAMEKA", "ELIZEBETH", "DIAN", "SHANTA", "MICKEY", - "LATOSHA", "CARLOTTA", "WINDY", "SOON", "ROSINA", "MARIANN", "LEISA", "JONNIE", "DAWNA", "CATHIE", - "BILLY", "ASTRID", "SIDNEY", "LAUREEN", "JANEEN", "HOLLI", "FAWN", "VICKEY", "TERESSA", "SHANTE", - "RUBYE", "MARCELINA", "CHANDA", "CARY", "TERESE", "SCARLETT", "MARTY", "MARNIE", "LULU", "LISETTE", - "JENIFFER", "ELENOR", "DORINDA", "DONITA", "CARMAN", "BERNITA", "ALTAGRACIA", "ALETA", "ADRIANNA", "ZORAIDA", - "RONNIE", "NICOLA", "LYNDSEY", "KENDALL", "JANINA", "CHRISSY", "AMI", "STARLA", "PHYLIS", "PHUONG", - "KYRA", "CHARISSE", "BLANCH", "SANJUANITA", "RONA", "NANCI", "MARILEE", "MARANDA", "CORY", "BRIGETTE", - "SANJUANA", "MARITA", "KASSANDRA", "JOYCELYN", "IRA", "FELIPA", "CHELSIE", "BONNY", "MIREYA", "LORENZA", - "KYONG", "ILEANA", "CANDELARIA", "TONY", "TOBY", "SHERIE", "OK", "MARK", "LUCIE", "LEATRICE", - "LAKESHIA", "GERDA", "EDIE", "BAMBI", "MARYLIN", "LAVON", "HORTENSE", "GARNET", "EVIE", "TRESSA", - "SHAYNA", "LAVINA", "KYUNG", "JEANETTA", "SHERRILL", "SHARA", "PHYLISS", "MITTIE", "ANABEL", "ALESIA", - "THUY", "TAWANDA", "RICHARD", "JOANIE", "TIFFANIE", "LASHANDA", "KARISSA", "ENRIQUETA", "DARIA", "DANIELLA", - "CORINNA", "ALANNA", "ABBEY", "ROXANE", "ROSEANNA", "MAGNOLIA", "LIDA", "KYLE", "JOELLEN", "ERA", - "CORAL", "CARLEEN", "TRESA", "PEGGIE", "NOVELLA", "NILA", "MAYBELLE", "JENELLE", "CARINA", "NOVA", - "MELINA", "MARQUERITE", "MARGARETTE", "JOSEPHINA", "EVONNE", "DEVIN", "CINTHIA", "ALBINA", "TOYA", "TAWNYA", - "SHERITA", "SANTOS", "MYRIAM", "LIZABETH", "LISE", "KEELY", "JENNI", "GISELLE", "CHERYLE", "ARDITH", - "ARDIS", "ALESHA", "ADRIANE", "SHAINA", "LINNEA", "KAROLYN", "HONG", "FLORIDA", "FELISHA", "DORI", - "DARCI", "ARTIE", "ARMIDA", "ZOLA", "XIOMARA", "VERGIE", "SHAMIKA", "NENA", "NANNETTE", "MAXIE", - "LOVIE", "JEANE", "JAIMIE", "INGE", "FARRAH", "ELAINA", "CAITLYN", "STARR", "FELICITAS", "CHERLY", - "CARYL", "YOLONDA", "YASMIN", "TEENA", "PRUDENCE", "PENNIE", "NYDIA", "MACKENZIE", "ORPHA", "MARVEL", - "LIZBETH", "LAURETTE", "JERRIE", "HERMELINDA", "CAROLEE", "TIERRA", "MIRIAN", "META", "MELONY", "KORI", - "JENNETTE", "JAMILA", "ENA", "ANH", "YOSHIKO", "SUSANNAH", "SALINA", "RHIANNON", "JOLEEN", "CRISTINE", - "ASHTON", "ARACELY", "TOMEKA", "SHALONDA", "MARTI", "LACIE", "KALA", "JADA", "ILSE", "HAILEY", - "BRITTANI", "ZONA", "SYBLE", "SHERRYL", "RANDY", "NIDIA", "MARLO", "KANDICE", "KANDI", "DEB", - "DEAN", "AMERICA", "ALYCIA", "TOMMY", "RONNA", "NORENE", "MERCY", "JOSE", "INGEBORG", "GIOVANNA", - "GEMMA", "CHRISTEL", "AUDRY", "ZORA", "VITA", "VAN", "TRISH", "STEPHAINE", "SHIRLEE", "SHANIKA", - "MELONIE", "MAZIE", "JAZMIN", "INGA", "HOA", "HETTIE", "GERALYN", "FONDA", "ESTRELLA", "ADELLA", - "SU", "SARITA", "RINA", "MILISSA", "MARIBETH", "GOLDA", "EVON", "ETHELYN", "ENEDINA", "CHERISE", - "CHANA", "VELVA", "TAWANNA", "SADE", "MIRTA", "LI", "KARIE", "JACINTA", "ELNA", "DAVINA", - "CIERRA", "ASHLIE", "ALBERTHA", "TANESHA", "STEPHANI", "NELLE", "MINDI", "LU", "LORINDA", "LARUE", - "FLORENE", "DEMETRA", "DEDRA", "CIARA", "CHANTELLE", "ASHLY", "SUZY", "ROSALVA", "NOELIA", "LYDA", - "LEATHA", "KRYSTYNA", "KRISTAN", "KARRI", "DARLINE", "DARCIE", "CINDA", "CHEYENNE", "CHERRIE", "AWILDA", - "ALMEDA", "ROLANDA", "LANETTE", "JERILYN", "GISELE", "EVALYN", "CYNDI", "CLETA", "CARIN", "ZINA", - "ZENA", "VELIA", "TANIKA", "PAUL", "CHARISSA", "THOMAS", "TALIA", "MARGARETE", "LAVONDA", "KAYLEE", - "KATHLENE", "JONNA", "IRENA", "ILONA", "IDALIA", "CANDIS", "CANDANCE", "BRANDEE", "ANITRA", "ALIDA", - "SIGRID", "NICOLETTE", "MARYJO", "LINETTE", "HEDWIG", "CHRISTIANA", "CASSIDY", "ALEXIA", "TRESSIE", "MODESTA", - "LUPITA", "LITA", "GLADIS", "EVELIA", "DAVIDA", "CHERRI", "CECILY", "ASHELY", "ANNABEL", "AGUSTINA", - "WANITA", "SHIRLY", "ROSAURA", "HULDA", "EUN", "BAILEY", "YETTA", "VERONA", "THOMASINA", "SIBYL", - "SHANNAN", "MECHELLE", "LUE", "LEANDRA", "LANI", "KYLEE", "KANDY", "JOLYNN", "FERNE", "EBONI", - "CORENE", "ALYSIA", "ZULA", "NADA", "MOIRA", "LYNDSAY", "LORRETTA", "JUAN", "JAMMIE", "HORTENSIA", - "GAYNELL", "CAMERON", "ADRIA", "VINA", "VICENTA", "TANGELA", "STEPHINE", "NORINE", "NELLA", "LIANA", - "LESLEE", "KIMBERELY", "ILIANA", "GLORY", "FELICA", "EMOGENE", "ELFRIEDE", "EDEN", "EARTHA", "CARMA", - "BEA", "OCIE", "MARRY", "LENNIE", "KIARA", "JACALYN", "CARLOTA", "ARIELLE", "YU", "STAR", - "OTILIA", "KIRSTIN", "KACEY", "JOHNETTA", "JOEY", "JOETTA", "JERALDINE", "JAUNITA", "ELANA", "DORTHEA", - "CAMI", "AMADA", "ADELIA", "VERNITA", "TAMAR", "SIOBHAN", "RENEA", "RASHIDA", "OUIDA", "ODELL", - "NILSA", "MERYL", "KRISTYN", "JULIETA", "DANICA", "BREANNE", "AUREA", "ANGLEA", "SHERRON", "ODETTE", - "MALIA", "LORELEI", "LIN", "LEESA", "KENNA", "KATHLYN", "FIONA", "CHARLETTE", "SUZIE", "SHANTELL", - "SABRA", "RACQUEL", "MYONG", "MIRA", "MARTINE", "LUCIENNE", "LAVADA", "JULIANN", "JOHNIE", "ELVERA", - "DELPHIA", "CLAIR", "CHRISTIANE", "CHAROLETTE", "CARRI", "AUGUSTINE", "ASHA", "ANGELLA", "PAOLA", "NINFA", - "LEDA", "LAI", "EDA", "SUNSHINE", "STEFANI", "SHANELL", "PALMA", "MACHELLE", "LISSA", "KECIA", - "KATHRYNE", "KARLENE", "JULISSA", "JETTIE", "JENNIFFER", "HUI", "CORRINA", "CHRISTOPHER", "CAROLANN", "ALENA", - "TESS", "ROSARIA", "MYRTICE", "MARYLEE", "LIANE", "KENYATTA", "JUDIE", "JANEY", "IN", "ELMIRA", - "ELDORA", "DENNA", "CRISTI", "CATHI", "ZAIDA", "VONNIE", "VIVA", "VERNIE", "ROSALINE", "MARIELA", - "LUCIANA", "LESLI", "KARAN", "FELICE", "DENEEN", "ADINA", "WYNONA", "TARSHA", "SHERON", "SHASTA", - "SHANITA", "SHANI", "SHANDRA", "RANDA", "PINKIE", "PARIS", "NELIDA", "MARILOU", "LYLA", "LAURENE", - "LACI", "JOI", "JANENE", "DOROTHA", "DANIELE", "DANI", "CAROLYNN", "CARLYN", "BERENICE", "AYESHA", - "ANNELIESE", "ALETHEA", "THERSA", "TAMIKO", "RUFINA", "OLIVA", "MOZELL", "MARYLYN", "MADISON", "KRISTIAN", - "KATHYRN", "KASANDRA", "KANDACE", "JANAE", "GABRIEL", "DOMENICA", "DEBBRA", "DANNIELLE", "CHUN", "BUFFY", - "BARBIE", "ARCELIA", "AJA", "ZENOBIA", "SHAREN", "SHAREE", "PATRICK", "PAGE", "MY", "LAVINIA", - "KUM", "KACIE", "JACKELINE", "HUONG", "FELISA", "EMELIA", "ELEANORA", "CYTHIA", "CRISTIN", "CLYDE", - "CLARIBEL", "CARON", "ANASTACIA", "ZULMA", "ZANDRA", "YOKO", "TENISHA", "SUSANN", "SHERILYN", "SHAY", - "SHAWANDA", "SABINE", "ROMANA", "MATHILDA", "LINSEY", "KEIKO", "JOANA", "ISELA", "GRETTA", "GEORGETTA", - "EUGENIE", "DUSTY", "DESIRAE", "DELORA", "CORAZON", "ANTONINA", "ANIKA", "WILLENE", "TRACEE", "TAMATHA", - "REGAN", "NICHELLE", "MICKIE", "MAEGAN", "LUANA", "LANITA", "KELSIE", "EDELMIRA", "BREE", "AFTON", - "TEODORA", "TAMIE", "SHENA", "MEG", "LINH", "KELI", "KACI", "DANYELLE", "BRITT", "ARLETTE", - "ALBERTINE", "ADELLE", "TIFFINY", "STORMY", "SIMONA", "NUMBERS", "NICOLASA", "NICHOL", "NIA", "NAKISHA", - "MEE", "MAIRA", "LOREEN", "KIZZY", "JOHNNY", "JAY", "FALLON", "CHRISTENE", "BOBBYE", "ANTHONY", - "YING", "VINCENZA", "TANJA", "RUBIE", "RONI", "QUEENIE", "MARGARETT", "KIMBERLI", "IRMGARD", "IDELL", - "HILMA", "EVELINA", "ESTA", "EMILEE", "DENNISE", "DANIA", "CARL", "CARIE", "ANTONIO", "WAI", - "SANG", "RISA", "RIKKI", "PARTICIA", "MUI", "MASAKO", "MARIO", "LUVENIA", "LOREE", "LONI", - "LIEN", "KEVIN", "GIGI", "FLORENCIA", "DORIAN", "DENITA", "DALLAS", "CHI", "BILLYE", "ALEXANDER", - "TOMIKA", "SHARITA", "RANA", "NIKOLE", "NEOMA", "MARGARITE", "MADALYN", "LUCINA", "LAILA", "KALI", - "JENETTE", "GABRIELE", "EVELYNE", "ELENORA", "CLEMENTINA", "ALEJANDRINA", "ZULEMA", "VIOLETTE", "VANNESSA", "THRESA", - "RETTA", "PIA", "PATIENCE", "NOELLA", "NICKIE", "JONELL", "DELTA", "CHUNG", "CHAYA", "CAMELIA", - "BETHEL", "ANYA", "ANDREW", "THANH", "SUZANN", "SPRING", "SHU", "MILA", "LILLA", "LAVERNA", - "KEESHA", "KATTIE", "GIA", "GEORGENE", "EVELINE", "ESTELL", "ELIZBETH", "VIVIENNE", "VALLIE", "TRUDIE", - "STEPHANE", "MICHEL", "MAGALY", "MADIE", "KENYETTA", "KARREN", "JANETTA", "HERMINE", "HARMONY", "DRUCILLA", - "DEBBI", "CELESTINA", "CANDIE", "BRITNI", "BECKIE", "AMINA", "ZITA", "YUN", "YOLANDE", "VIVIEN", - "VERNETTA", "TRUDI", "SOMMER", "PEARLE", "PATRINA", "OSSIE", "NICOLLE", "LOYCE", "LETTY", "LARISA", - "KATHARINA", "JOSELYN", "JONELLE", "JENELL", "IESHA", "HEIDE", "FLORINDA", "FLORENTINA", "FLO", "ELODIA", - "DORINE", "BRUNILDA", "BRIGID", "ASHLI", "ARDELLA", "TWANA", "THU", "TARAH", "SUNG", "SHEA", - "SHAVON", "SHANE", "SERINA", "RAYNA", "RAMONITA", "NGA", "MARGURITE", "LUCRECIA", "KOURTNEY", "KATI", - "JESUS", "JESENIA", "DIAMOND", "CRISTA", "AYANA", "ALICA", "ALIA", "VINNIE", "SUELLEN", "ROMELIA", - "RACHELL", "PIPER", "OLYMPIA", "MICHIKO", "KATHALEEN", "JOLIE", "JESSI", "JANESSA", "HANA", "HA", - "ELEASE", "CARLETTA", "BRITANY", "SHONA", "SALOME", "ROSAMOND", "REGENA", "RAINA", "NGOC", "NELIA", - "LOUVENIA", "LESIA", "LATRINA", "LATICIA", "LARHONDA", "JINA", "JACKI", "HOLLIS", "HOLLEY", "EMMY", - "DEEANN", "CORETTA", "ARNETTA", "VELVET", "THALIA", "SHANICE", "NETA", "MIKKI", "MICKI", "LONNA", - "LEANA", "LASHUNDA", "KILEY", "JOYE", "JACQULYN", "IGNACIA", "HYUN", "HIROKO", "HENRY", "HENRIETTE", - "ELAYNE", "DELINDA", "DARNELL", "DAHLIA", "COREEN", "CONSUELA", "CONCHITA", "CELINE", "BABETTE", "AYANNA", - "ANETTE", "ALBERTINA", "SKYE", "SHAWNEE", "SHANEKA", "QUIANA", "PAMELIA", "MIN", "MERRI", "MERLENE", - "MARGIT", "KIESHA", "KIERA", "KAYLENE", "JODEE", "JENISE", "ERLENE", "EMMIE", "ELSE", "DARYL", - "DALILA", "DAISEY", "CODY", "CASIE", "BELIA", "BABARA", "VERSIE", "VANESA", "SHELBA", "SHAWNDA", - "SAM", "NORMAN", "NIKIA", "NAOMA", "MARNA", "MARGERET", "MADALINE", "LAWANA", "KINDRA", "JUTTA", - "JAZMINE", "JANETT", "HANNELORE", "GLENDORA", "GERTRUD", "GARNETT", "FREEDA", "FREDERICA", "FLORANCE", "FLAVIA", - "DENNIS", "CARLINE", "BEVERLEE", "ANJANETTE", "VALDA", "TRINITY", "TAMALA", "STEVIE", "SHONNA", "SHA", - "SARINA", "ONEIDA", "MICAH", "MERILYN", "MARLEEN", "LURLINE", "LENNA", "KATHERIN", "JIN", "JENI", - "HAE", "GRACIA", "GLADY", "FARAH", "ERIC", "ENOLA", "EMA", "DOMINQUE", "DEVONA", "DELANA", - "CECILA", "CAPRICE", "ALYSHA", "ALI", "ALETHIA", "VENA", "THERESIA", "TAWNY", "SONG", "SHAKIRA", - "SAMARA", "SACHIKO", "RACHELE", "PAMELLA", "NICKY", "MARNI", "MARIEL", "MAREN", "MALISA", "LIGIA", - "LERA", "LATORIA", "LARAE", "KIMBER", "KATHERN", "KAREY", "JENNEFER", "JANETH", "HALINA", "FREDIA", - "DELISA", "DEBROAH", "CIERA", "CHIN", "ANGELIKA", "ANDREE", "ALTHA", "YEN", "VIVAN", "TERRESA", - "TANNA", "SUK", "SUDIE", "SOO", "SIGNE", "SALENA", "RONNI", "REBBECCA", "MYRTIE", "MCKENZIE", - "MALIKA", "MAIDA", "LOAN", "LEONARDA", "KAYLEIGH", "FRANCE", "ETHYL", "ELLYN", "DAYLE", "CAMMIE", - "BRITTNI", "BIRGIT", "AVELINA", "ASUNCION", "ARIANNA", "AKIKO", "VENICE", "TYESHA", "TONIE", "TIESHA", - "TAKISHA", "STEFFANIE", "SINDY", "SANTANA", "MEGHANN", "MANDA", "MACIE", "LADY", "KELLYE", "KELLEE", - "JOSLYN", "JASON", "INGER", "INDIRA", "GLINDA", "GLENNIS", "FERNANDA", "FAUSTINA", "ENEIDA", "ELICIA", - "DOT", "DIGNA", "DELL", "ARLETTA", "ANDRE", "WILLIA", "TAMMARA", "TABETHA", "SHERRELL", "SARI", - "REFUGIO", "REBBECA", "PAULETTA", "NIEVES", "NATOSHA", "NAKITA", "MAMMIE", "KENISHA", "KAZUKO", "KASSIE", - "GARY", "EARLEAN", "DAPHINE", "CORLISS", "CLOTILDE", "CAROLYNE", "BERNETTA", "AUGUSTINA", "AUDREA", "ANNIS", - "ANNABELL", "YAN", "TENNILLE", "TAMICA", "SELENE", "SEAN", "ROSANA", "REGENIA", "QIANA", "MARKITA", - "MACY", "LEEANNE", "LAURINE", "KYM", "JESSENIA", "JANITA", "GEORGINE", "GENIE", "EMIKO", "ELVIE", - "DEANDRA", "DAGMAR", "CORIE", "COLLEN", "CHERISH", "ROMAINE", "PORSHA", "PEARLENE", "MICHELINE", "MERNA", - "MARGORIE", "MARGARETTA", "LORE", "KENNETH", "JENINE", "HERMINA", "FREDERICKA", "ELKE", "DRUSILLA", "DORATHY", - "DIONE", "DESIRE", "CELENA", "BRIGIDA", "ANGELES", "ALLEGRA", "THEO", "TAMEKIA", "SYNTHIA", "STEPHEN", - "SOOK", "SLYVIA", "ROSANN", "REATHA", "RAYE", "MARQUETTA", "MARGART", "LING", "LAYLA", "KYMBERLY", - "KIANA", "KAYLEEN", "KATLYN", "KARMEN", "JOELLA", "IRINA", "EMELDA", "ELENI", "DETRA", "CLEMMIE", - "CHERYLL", "CHANTELL", "CATHEY", "ARNITA", "ARLA", "ANGLE", "ANGELIC", "ALYSE", "ZOFIA", "THOMASINE", - "TENNIE", "SON", "SHERLY", "SHERLEY", "SHARYL", "REMEDIOS", "PETRINA", "NICKOLE", "MYUNG", "MYRLE", - "MOZELLA", "LOUANNE", "LISHA", "LATIA", "LANE", "KRYSTA", "JULIENNE", "JOEL", "JEANENE", "JACQUALINE", - "ISAURA", "GWENDA", "EARLEEN", "DONALD", "CLEOPATRA", "CARLIE", "AUDIE", "ANTONIETTA", "ALISE", "ALEX", - "VERDELL", "VAL", "TYLER", "TOMOKO", "THAO", "TALISHA", "STEVEN", "SO", "SHEMIKA", "SHAUN", - "SCARLET", "SAVANNA", "SANTINA", "ROSIA", "RAEANN", "ODILIA", "NANA", "MINNA", "MAGAN", "LYNELLE", - "LE", "KARMA", "JOEANN", "IVANA", "INELL", "ILANA", "HYE", "HONEY", "HEE", "GUDRUN", - "FRANK", "DREAMA", "CRISSY", "CHANTE", "CARMELINA", "ARVILLA", "ARTHUR", "ANNAMAE", "ALVERA", "ALEIDA", - "AARON", "YEE", "YANIRA", "VANDA", "TIANNA", "TAM", "STEFANIA", "SHIRA", "PERRY", "NICOL", - "NANCIE", "MONSERRATE", "MINH", "MELYNDA", "MELANY", "MATTHEW", "LOVELLA", "LAURE", "KIRBY", "KACY", - "JACQUELYNN", "HYON", "GERTHA", "FRANCISCO", "ELIANA", "CHRISTENA", "CHRISTEEN", "CHARISE", "CATERINA", "CARLEY", - "CANDYCE", "ARLENA", "AMMIE", "YANG", "WILLETTE", "VANITA", "TUYET", "TINY", "SYREETA", "SILVA", - "SCOTT", "RONALD", "PENNEY", "NYLA", "MICHAL", "MAURICE", "MARYAM", "MARYA", "MAGEN", "LUDIE", - "LOMA", "LIVIA", "LANELL", "KIMBERLIE", "JULEE", "DONETTA", "DIEDRA", "DENISHA", "DEANE", "DAWNE", - "CLARINE", "CHERRYL", "BRONWYN", "BRANDON", "ALLA", "VALERY", "TONDA", "SUEANN", "SORAYA", "SHOSHANA", - "SHELA", "SHARLEEN", "SHANELLE", "NERISSA", "MICHEAL", "MERIDITH", "MELLIE", "MAYE", "MAPLE", "MAGARET", - "LUIS", "LILI", "LEONILA", "LEONIE", "LEEANNA", "LAVONIA", "LAVERA", "KRISTEL", "KATHEY", "KATHE", - "JUSTIN", "JULIAN", "JIMMY", "JANN", "ILDA", "HILDRED", "HILDEGARDE", "GENIA", "FUMIKO", "EVELIN", - "ERMELINDA", "ELLY", "DUNG", "DOLORIS", "DIONNA", "DANAE", "BERNEICE", "ANNICE", "ALIX", "VERENA", - "VERDIE", "TRISTAN", "SHAWNNA", "SHAWANA", "SHAUNNA", "ROZELLA", "RANDEE", "RANAE", "MILAGRO", "LYNELL", - "LUISE", "LOUIE", "LOIDA", "LISBETH", "KARLEEN", "JUNITA", "JONA", "ISIS", "HYACINTH", "HEDY", - "GWENN", "ETHELENE", "ERLINE", "EDWARD", "DONYA", "DOMONIQUE", "DELICIA", "DANNETTE", "CICELY", "BRANDA", - "BLYTHE", "BETHANN", "ASHLYN", "ANNALEE", "ALLINE", "YUKO", "VELLA", "TRANG", "TOWANDA", "TESHA", - "SHERLYN", "NARCISA", "MIGUELINA", "MERI", "MAYBELL", "MARLANA", "MARGUERITA", "MADLYN", "LUNA", "LORY", - "LORIANN", "LIBERTY", "LEONORE", "LEIGHANN", "LAURICE", "LATESHA", "LARONDA", "KATRICE", "KASIE", "KARL", - "KALEY", "JADWIGA", "GLENNIE", "GEARLDINE", "FRANCINA", "EPIFANIA", "DYAN", "DORIE", "DIEDRE", "DENESE", - "DEMETRICE", "DELENA", "DARBY", "CRISTIE", "CLEORA", "CATARINA", "CARISA", "BERNIE", "BARBERA", "ALMETA", - "TRULA", "TEREASA", "SOLANGE", "SHEILAH", "SHAVONNE", "SANORA", "ROCHELL", "MATHILDE", "MARGARETA", "MAIA", - "LYNSEY", "LAWANNA", "LAUNA", "KENA", "KEENA", "KATIA", "JAMEY", "GLYNDA", "GAYLENE", "ELVINA", - "ELANOR", "DANUTA", "DANIKA", "CRISTEN", "CORDIE", "COLETTA", "CLARITA", "CARMON", "BRYNN", "AZUCENA", - "AUNDREA", "ANGELE", "YI", "WALTER", "VERLIE", "VERLENE", "TAMESHA", "SILVANA", "SEBRINA", "SAMIRA", - "REDA", "RAYLENE", "PENNI", "PANDORA", "NORAH", "NOMA", "MIREILLE", "MELISSIA", "MARYALICE", "LARAINE", - "KIMBERY", "KARYL", "KARINE", "KAM", "JOLANDA", "JOHANA", "JESUSA", "JALEESA", "JAE", "JACQUELYNE", - "IRISH", "ILUMINADA", "HILARIA", "HANH", "GENNIE", "FRANCIE", "FLORETTA", "EXIE", "EDDA", "DREMA", - "DELPHA", "BEV", "BARBAR", "ASSUNTA", "ARDELL", "ANNALISA", "ALISIA", "YUKIKO", "YOLANDO", "WONDA", - "WEI", "WALTRAUD", "VETA", "TEQUILA", "TEMEKA", "TAMEIKA", "SHIRLEEN", "SHENITA", "PIEDAD", "OZELLA", - "MIRTHA", "MARILU", "KIMIKO", "JULIANE", "JENICE", "JEN", "JANAY", "JACQUILINE", "HILDE", "FE", - "FAE", "EVAN", "EUGENE", "ELOIS", "ECHO", "DEVORAH", "CHAU", "BRINDA", "BETSEY", "ARMINDA", - "ARACELIS", "APRYL", "ANNETT", "ALISHIA", "VEOLA", "USHA", "TOSHIKO", "THEOLA", "TASHIA", "TALITHA", - "SHERY", "RUDY", "RENETTA", "REIKO", "RASHEEDA", "OMEGA", "OBDULIA", "MIKA", "MELAINE", "MEGGAN", - "MARTIN", "MARLEN", "MARGET", "MARCELINE", "MANA", "MAGDALEN", "LIBRADA", "LEZLIE", "LEXIE", "LATASHIA", - "LASANDRA", "KELLE", "ISIDRA", "ISA", "INOCENCIA", "GWYN", "FRANCOISE", "ERMINIA", "ERINN", "DIMPLE", - "DEVORA", "CRISELDA", "ARMANDA", "ARIE", "ARIANE", "ANGELO", "ANGELENA", "ALLEN", "ALIZA", "ADRIENE", - "ADALINE", "XOCHITL", "TWANNA", "TRAN", "TOMIKO", "TAMISHA", "TAISHA", "SUSY", "SIU", "RUTHA", - "ROXY", "RHONA", "RAYMOND", "OTHA", "NORIKO", "NATASHIA", "MERRIE", "MELVIN", "MARINDA", "MARIKO", - "MARGERT", "LORIS", "LIZZETTE", "LEISHA", "KAILA", "KA", "JOANNIE", "JERRICA", "JENE", "JANNET", - "JANEE", "JACINDA", "HERTA", "ELENORE", "DORETTA", "DELAINE", "DANIELL", "CLAUDIE", "CHINA", "BRITTA", - "APOLONIA", "AMBERLY", "ALEASE", "YURI", "YUK", "WEN", "WANETA", "UTE", "TOMI", "SHARRI", - "SANDIE", "ROSELLE", "REYNALDA", "RAGUEL", "PHYLICIA", "PATRIA", "OLIMPIA", "ODELIA", "MITZIE", "MITCHELL", - "MISS", "MINDA", "MIGNON", "MICA", "MENDY", "MARIVEL", "MAILE", "LYNETTA", "LAVETTE", "LAURYN", - "LATRISHA", "LAKIESHA", "KIERSTEN", "KARY", "JOSPHINE", "JOLYN", "JETTA", "JANISE", "JACQUIE", "IVELISSE", - "GLYNIS", "GIANNA", "GAYNELLE", "EMERALD", "DEMETRIUS", "DANYELL", "DANILLE", "DACIA", "CORALEE", "CHER", - "CEOLA", "BRETT", "BELL", "ARIANNE", "ALESHIA", "YUNG", "WILLIEMAE", "TROY", "TRINH", "THORA", - "TAI", "SVETLANA", "SHERIKA", "SHEMEKA", "SHAUNDA", "ROSELINE", "RICKI", "MELDA", "MALLIE", "LAVONNA", - "LATINA", "LARRY", "LAQUANDA", "LALA", "LACHELLE", "KLARA", "KANDIS", "JOHNA", "JEANMARIE", "JAYE", - "HANG", "GRAYCE", "GERTUDE", "EMERITA", "EBONIE", "CLORINDA", "CHING", "CHERY", "CAROLA", "BREANN", - "BLOSSOM", "BERNARDINE", "BECKI", "ARLETHA", "ARGELIA", "ARA", "ALITA", "YULANDA", "YON", "YESSENIA", - "TOBI", "TASIA", "SYLVIE", "SHIRL", "SHIRELY", "SHERIDAN", "SHELLA", "SHANTELLE", "SACHA", "ROYCE", - "REBECKA", "REAGAN", "PROVIDENCIA", "PAULENE", "MISHA", "MIKI", "MARLINE", "MARICA", "LORITA", "LATOYIA", - "LASONYA", "KERSTIN", "KENDA", "KEITHA", "KATHRIN", "JAYMIE", "JACK", "GRICELDA", "GINETTE", "ERYN", - "ELINA", "ELFRIEDA", "DANYEL", "CHEREE", "CHANELLE", "BARRIE", "AVERY", "AURORE", "ANNAMARIA", "ALLEEN", - "AILENE", "AIDE", "YASMINE", "VASHTI", "VALENTINE", "TREASA", "TORY", "TIFFANEY", "SHERYLL", "SHARIE", - "SHANAE", "SAU", "RAISA", "PA", "NEDA", "MITSUKO", "MIRELLA", "MILDA", "MARYANNA", "MARAGRET", - "MABELLE", "LUETTA", "LORINA", "LETISHA", "LATARSHA", "LANELLE", "LAJUANA", "KRISSY", "KARLY", "KARENA", - "JON", "JESSIKA", "JERICA", "JEANELLE", "JANUARY", "JALISA", "JACELYN", "IZOLA", "IVEY", "GREGORY", - "EUNA", "ETHA", "DREW", "DOMITILA", "DOMINICA", "DAINA", "CREOLA", "CARLI", "CAMIE", "BUNNY", - "BRITTNY", "ASHANTI", "ANISHA", "ALEEN", "ADAH", "YASUKO", "WINTER", "VIKI", "VALRIE", "TONA", - "TINISHA", "THI", "TERISA", "TATUM", "TANEKA", "SIMONNE", "SHALANDA", "SERITA", "RESSIE", "REFUGIA", - "PAZ", "OLENE", "NA", "MERRILL", "MARGHERITA", "MANDIE", "MAN", "MAIRE", "LYNDIA", "LUCI", - "LORRIANE", "LORETA", "LEONIA", "LAVONA", "LASHAWNDA", "LAKIA", "KYOKO", "KRYSTINA", "KRYSTEN", "KENIA", - "KELSI", "JUDE", "JEANICE", "ISOBEL", "GEORGIANN", "GENNY", "FELICIDAD", "EILENE", "DEON", "DELOISE", - "DEEDEE", "DANNIE", "CONCEPTION", "CLORA", "CHERILYN", "CHANG", "CALANDRA", "BERRY", "ARMANDINA", "ANISA", - "ULA", "TIMOTHY", "TIERA", "THERESSA", "STEPHANIA", "SIMA", "SHYLA", "SHONTA", "SHERA", "SHAQUITA", - "SHALA", "SAMMY", "ROSSANA", "NOHEMI", "NERY", "MORIAH", "MELITA", "MELIDA", "MELANI", "MARYLYNN", - "MARISHA", "MARIETTE", "MALORIE", "MADELENE", "LUDIVINA", "LORIA", "LORETTE", "LORALEE", "LIANNE", "LEON", - "LAVENIA", "LAURINDA", "LASHON", "KIT", "KIMI", "KEILA", "KATELYNN", "KAI", "JONE", "JOANE", - "JI", "JAYNA", "JANELLA", "JA", "HUE", "HERTHA", "FRANCENE", "ELINORE", "DESPINA", "DELSIE", - "DEEDRA", "CLEMENCIA", "CARRY", "CAROLIN", "CARLOS", "BULAH", "BRITTANIE", "BOK", "BLONDELL", "BIBI", - "BEAULAH", "BEATA", "ANNITA", "AGRIPINA", "VIRGEN", "VALENE", "UN", "TWANDA", "TOMMYE", "TOI", - "TARRA", "TARI", "TAMMERA", "SHAKIA", "SADYE", "RUTHANNE", "ROCHEL", "RIVKA", "PURA", "NENITA", - "NATISHA", "MING", "MERRILEE", "MELODEE", "MARVIS", "LUCILLA", "LEENA", "LAVETA", "LARITA", "LANIE", - "KEREN", "ILEEN", "GEORGEANN", "GENNA", "GENESIS", "FRIDA", "EWA", "EUFEMIA", "EMELY", "ELA", - "EDYTH", "DEONNA", "DEADRA", "DARLENA", "CHANELL", "CHAN", "CATHERN", "CASSONDRA", "CASSAUNDRA", "BERNARDA", - "BERNA", "ARLINDA", "ANAMARIA", "ALBERT", "WESLEY", "VERTIE", "VALERI", "TORRI", "TATYANA", "STASIA", - "SHERISE", "SHERILL", "SEASON", "SCOTTIE", "SANDA", "RUTHE", "ROSY", "ROBERTO", "ROBBI", "RANEE", - "QUYEN", "PEARLY", "PALMIRA", "ONITA", "NISHA", "NIESHA", "NIDA", "NEVADA", "NAM", "MERLYN", - "MAYOLA", "MARYLOUISE", "MARYLAND", "MARX", "MARTH", "MARGENE", "MADELAINE", "LONDA", "LEONTINE", "LEOMA", - "LEIA", "LAWRENCE", "LAURALEE", "LANORA", "LAKITA", "KIYOKO", "KETURAH", "KATELIN", "KAREEN", "JONIE", - "JOHNETTE", "JENEE", "JEANETT", "IZETTA", "HIEDI", "HEIKE", "HASSIE", "HAROLD", "GIUSEPPINA", "GEORGANN", - "FIDELA", "FERNANDE", "ELWANDA", "ELLAMAE", "ELIZ", "DUSTI", "DOTTY", "CYNDY", "CORALIE", "CELESTA", - "ARGENTINA", "ALVERTA", "XENIA", "WAVA", "VANETTA", "TORRIE", "TASHINA", "TANDY", "TAMBRA", "TAMA", - "STEPANIE", "SHILA", "SHAUNTA", "SHARAN", "SHANIQUA", "SHAE", "SETSUKO", "SERAFINA", "SANDEE", "ROSAMARIA", - "PRISCILA", "OLINDA", "NADENE", "MUOI", "MICHELINA", "MERCEDEZ", "MARYROSE", "MARIN", "MARCENE", "MAO", - "MAGALI", "MAFALDA", "LOGAN", "LINN", "LANNIE", "KAYCE", "KAROLINE", "KAMILAH", "KAMALA", "JUSTA", - "JOLINE", "JENNINE", "JACQUETTA", "IRAIDA", "GERALD", "GEORGEANNA", "FRANCHESCA", "FAIRY", "EMELINE", "ELANE", - "EHTEL", "EARLIE", "DULCIE", "DALENE", "CRIS", "CLASSIE", "CHERE", "CHARIS", "CAROYLN", "CARMINA", - "CARITA", "BRIAN", "BETHANIE", "AYAKO", "ARICA", "AN", "ALYSA", "ALESSANDRA", "AKILAH", "ADRIEN", - "ZETTA", "YOULANDA", "YELENA", "YAHAIRA", "XUAN", "WENDOLYN", "VICTOR", "TIJUANA", "TERRELL", "TERINA", - "TERESIA", "SUZI", "SUNDAY", "SHERELL", "SHAVONDA", "SHAUNTE", "SHARDA", "SHAKITA", "SENA", "RYANN", - "RUBI", "RIVA", "REGINIA", "REA", "RACHAL", "PARTHENIA", "PAMULA", "MONNIE", "MONET", "MICHAELE", - "MELIA", "MARINE", "MALKA", "MAISHA", "LISANDRA", "LEO", "LEKISHA", "LEAN", "LAURENCE", "LAKENDRA", - "KRYSTIN", "KORTNEY", "KIZZIE", "KITTIE", "KERA", "KENDAL", "KEMBERLY", "KANISHA", "JULENE", "JULE", - "JOSHUA", "JOHANNE", "JEFFREY", "JAMEE", "HAN", "HALLEY", "GIDGET", "GALINA", "FREDRICKA", "FLETA", - "FATIMAH", "EUSEBIA", "ELZA", "ELEONORE", "DORTHEY", "DORIA", "DONELLA", "DINORAH", "DELORSE", "CLARETHA", - "CHRISTINIA", "CHARLYN", "BONG", "BELKIS", "AZZIE", "ANDERA", "AIKO", "ADENA", "YER", "YAJAIRA", - "WAN", "VANIA", "ULRIKE", "TOSHIA", "TIFANY", "STEFANY", "SHIZUE", "SHENIKA", "SHAWANNA", "SHAROLYN", - "SHARILYN", "SHAQUANA", "SHANTAY", "SEE", "ROZANNE", "ROSELEE", "RICKIE", "REMONA", "REANNA", "RAELENE", - "QUINN", "PHUNG", "PETRONILA", "NATACHA", "NANCEY", "MYRL", "MIYOKO", "MIESHA", "MERIDETH", "MARVELLA", - "MARQUITTA", "MARHTA", "MARCHELLE", "LIZETH", "LIBBIE", "LAHOMA", "LADAWN", "KINA", "KATHELEEN", "KATHARYN", - "KARISA", "KALEIGH", "JUNIE", "JULIEANN", "JOHNSIE", "JANEAN", "JAIMEE", "JACKQUELINE", "HISAKO", "HERMA", - "HELAINE", "GWYNETH", "GLENN", "GITA", "EUSTOLIA", "EMELINA", "ELIN", "EDRIS", "DONNETTE", "DONNETTA", - "DIERDRE", "DENAE", "DARCEL", "CLAUDE", "CLARISA", "CINDERELLA", "CHIA", "CHARLESETTA", "CHARITA", "CELSA", - "CASSY", "CASSI", "CARLEE", "BRUNA", "BRITTANEY", "BRANDE", "BILLI", "BAO", "ANTONETTA", "ANGLA", - "ANGELYN", "ANALISA", "ALANE", "WENONA", "WENDIE", "VERONIQUE", "VANNESA", "TOBIE", "TEMPIE", "SUMIKO", - "SULEMA", "SPARKLE", "SOMER", "SHEBA", "SHAYNE", "SHARICE", "SHANEL", "SHALON", "SAGE", "ROY", - "ROSIO", "ROSELIA", "RENAY", "REMA", "REENA", "PORSCHE", "PING", "PEG", "OZIE", "ORETHA", - "ORALEE", "ODA", "NU", "NGAN", "NAKESHA", "MILLY", "MARYBELLE", "MARLIN", "MARIS", "MARGRETT", - "MARAGARET", "MANIE", "LURLENE", "LILLIA", "LIESELOTTE", "LAVELLE", "LASHAUNDA", "LAKEESHA", "KEITH", "KAYCEE", - "KALYN", "JOYA", "JOETTE", "JENAE", "JANIECE", "ILLA", "GRISEL", "GLAYDS", "GENEVIE", "GALA", - "FREDDA", "FRED", "ELMER", "ELEONOR", "DEBERA", "DEANDREA", "DAN", "CORRINNE", "CORDIA", "CONTESSA", - "COLENE", "CLEOTILDE", "CHARLOTT", "CHANTAY", "CECILLE", "BEATRIS", "AZALEE", "ARLEAN", "ARDATH", "ANJELICA", - "ANJA", "ALFREDIA", "ALEISHA", "ADAM", "ZADA", "YUONNE", "XIAO", "WILLODEAN", "WHITLEY", "VENNIE", - "VANNA", "TYISHA", "TOVA", "TORIE", "TONISHA", "TILDA", "TIEN", "TEMPLE", "SIRENA", "SHERRIL", - "SHANTI", "SHAN", "SENAIDA", "SAMELLA", "ROBBYN", "RENDA", "REITA", "PHEBE", "PAULITA", "NOBUKO", - "NGUYET", "NEOMI", "MOON", "MIKAELA", "MELANIA", "MAXIMINA", "MARG", "MAISIE", "LYNNA", "LILLI", - "LAYNE", "LASHAUN", "LAKENYA", "LAEL", "KIRSTIE", "KATHLINE", "KASHA", "KARLYN", "KARIMA", "JOVAN", - "JOSEFINE", "JENNELL", "JACQUI", "JACKELYN", "HYO", "HIEN", "GRAZYNA", "FLORRIE", "FLORIA", "ELEONORA", - "DWANA", "DORLA", "DONG", "DELMY", "DEJA", "DEDE", "DANN", "CRYSTA", "CLELIA", "CLARIS", - "CLARENCE", "CHIEKO", "CHERLYN", "CHERELLE", "CHARMAIN", "CHARA", "CAMMY", "BEE", "ARNETTE", "ARDELLE", - "ANNIKA", "AMIEE", "AMEE", "ALLENA", "YVONE", "YUKI", "YOSHIE", "YEVETTE", "YAEL", "WILLETTA", - "VONCILE", "VENETTA", "TULA", "TONETTE", "TIMIKA", "TEMIKA", "TELMA", "TEISHA", "TAREN", "TA", - "STACEE", "SHIN", "SHAWNTA", "SATURNINA", "RICARDA", "POK", "PASTY", "ONIE", "NUBIA", "MORA", - "MIKE", "MARIELLE", "MARIELLA", "MARIANELA", "MARDELL", "MANY", "LUANNA", "LOISE", "LISABETH", "LINDSY", - "LILLIANA", "LILLIAM", "LELAH", "LEIGHA", "LEANORA", "LANG", "KRISTEEN", "KHALILAH", "KEELEY", "KANDRA", - "JUNKO", "JOAQUINA", "JERLENE", "JANI", "JAMIKA", "JAME", "HSIU", "HERMILA", "GOLDEN", "GENEVIVE", - "EVIA", "EUGENA", "EMMALINE", "ELFREDA", "ELENE", "DONETTE", "DELCIE", "DEEANNA", "DARCEY", "CUC", - "CLARINDA", "CIRA", "CHAE", "CELINDA", "CATHERYN", "CATHERIN", "CASIMIRA", "CARMELIA", "CAMELLIA", "BREANA", - "BOBETTE", "BERNARDINA", "BEBE", "BASILIA", "ARLYNE", "AMAL", "ALAYNA", "ZONIA", "ZENIA", "YURIKO", - "YAEKO", "WYNELL", "WILLOW", "WILLENA", "VERNIA", "TU", "TRAVIS", "TORA", "TERRILYN", "TERICA", - "TENESHA", "TAWNA", "TAJUANA", "TAINA", "STEPHNIE", "SONA", "SOL", "SINA", "SHONDRA", "SHIZUKO", - "SHERLENE", "SHERICE", "SHARIKA", "ROSSIE", "ROSENA", "RORY", "RIMA", "RIA", "RHEBA", "RENNA", - "PETER", "NATALYA", "NANCEE", "MELODI", "MEDA", "MAXIMA", "MATHA", "MARKETTA", "MARICRUZ", "MARCELENE", - "MALVINA", "LUBA", "LOUETTA", "LEIDA", "LECIA", "LAURAN", "LASHAWNA", "LAINE", "KHADIJAH", "KATERINE", - "KASI", "KALLIE", "JULIETTA", "JESUSITA", "JESTINE", "JESSIA", "JEREMY", "JEFFIE", "JANYCE", "ISADORA", - "GEORGIANNE", "FIDELIA", "EVITA", "EURA", "EULAH", "ESTEFANA", "ELSY", "ELIZABET", "ELADIA", "DODIE", - "DION", "DIA", "DENISSE", "DELORAS", "DELILA", "DAYSI", "DAKOTA", "CURTIS", "CRYSTLE", "CONCHA", - "COLBY", "CLARETTA", "CHU", "CHRISTIA", "CHARLSIE", "CHARLENA", "CARYLON", "BETTYANN", "ASLEY", "ASHLEA", - "AMIRA", "AI", "AGUEDA", "AGNUS", "YUETTE", "VINITA", "VICTORINA", "TYNISHA", "TREENA", "TOCCARA", - "TISH", "THOMASENA", "TEGAN", "SOILA", "SHILOH", "SHENNA", "SHARMAINE", "SHANTAE", "SHANDI", "SEPTEMBER", - "SARAN", "SARAI", "SANA", "SAMUEL", "SALLEY", "ROSETTE", "ROLANDE", "REGINE", "OTELIA", "OSCAR", - "OLEVIA", "NICHOLLE", "NECOLE", "NAIDA", "MYRTA", "MYESHA", "MITSUE", "MINTA", "MERTIE", "MARGY", - "MAHALIA", "MADALENE", "LOVE", "LOURA", "LOREAN", "LEWIS", "LESHA", "LEONIDA", "LENITA", "LAVONE", - "LASHELL", "LASHANDRA", "LAMONICA", "KIMBRA", "KATHERINA", "KARRY", "KANESHA", "JULIO", "JONG", "JENEVA", - "JAQUELYN", "HWA", "GILMA", "GHISLAINE", "GERTRUDIS", "FRANSISCA", "FERMINA", "ETTIE", "ETSUKO", "ELLIS", - "ELLAN", "ELIDIA", "EDRA", "DORETHEA", "DOREATHA", "DENYSE", "DENNY", "DEETTA", "DAINE", "CYRSTAL", - "CORRIN", "CAYLA", "CARLITA", "CAMILA", "BURMA", "BULA", "BUENA", "BLAKE", "BARABARA", "AVRIL", - "AUSTIN", "ALAINE", "ZANA", "WILHEMINA", "WANETTA", "VIRGIL", "VI", "VERONIKA", "VERNON", "VERLINE", - "VASILIKI", "TONITA", "TISA", "TEOFILA", "TAYNA", "TAUNYA", "TANDRA", "TAKAKO", "SUNNI", "SUANNE", - "SIXTA", "SHARELL", "SEEMA", "RUSSELL", "ROSENDA", "ROBENA", "RAYMONDE", "PEI", "PAMILA", "OZELL", - "NEIDA", "NEELY", "MISTIE", "MICHA", "MERISSA", "MAURITA", "MARYLN", "MARYETTA", "MARSHALL", "MARCELL", - "MALENA", "MAKEDA", "MADDIE", "LOVETTA", "LOURIE", "LORRINE", "LORILEE", "LESTER", "LAURENA", "LASHAY", - "LARRAINE", "LAREE", "LACRESHA", "KRISTLE", "KRISHNA", "KEVA", "KEIRA", "KAROLE", "JOIE", "JINNY", - "JEANNETTA", "JAMA", "HEIDY", "GILBERTE", "GEMA", "FAVIOLA", "EVELYNN", "ENDA", "ELLI", "ELLENA", - "DIVINA", "DAGNY", "COLLENE", "CODI", "CINDIE", "CHASSIDY", "CHASIDY", "CATRICE", "CATHERINA", "CASSEY", - "CAROLL", "CARLENA", "CANDRA", "CALISTA", "BRYANNA", "BRITTENY", "BEULA", "BARI", "AUDRIE", "AUDRIA", - "ARDELIA", "ANNELLE", "ANGILA", "ALONA", "ALLYN", "DOUGLAS", "ROGER", "JONATHAN", "RALPH", "NICHOLAS", - "BENJAMIN", "BRUCE", "HARRY", "WAYNE", "STEVE", "HOWARD", "ERNEST", "PHILLIP", "TODD", "CRAIG", - "ALAN", "PHILIP", "EARL", "DANNY", "BRYAN", "STANLEY", "LEONARD", "NATHAN", "MANUEL", "RODNEY", - "MARVIN", "VINCENT", "JEFFERY", "JEFF", "CHAD", "JACOB", "ALFRED", "BRADLEY", "HERBERT", "FREDERICK", - "EDWIN", "DON", "RICKY", "RANDALL", "BARRY", "BERNARD", "LEROY", "MARCUS", "THEODORE", "CLIFFORD", - "MIGUEL", "JIM", "TOM", "CALVIN", "BILL", "LLOYD", "DEREK", "WARREN", "DARRELL", "JEROME", - "FLOYD", "ALVIN", "TIM", "GORDON", "GREG", "JORGE", "DUSTIN", "PEDRO", "DERRICK", "ZACHARY", - "HERMAN", "GLEN", "HECTOR", "RICARDO", "RICK", "BRENT", "RAMON", "GILBERT", "MARC", "REGINALD", - "RUBEN", "NATHANIEL", "RAFAEL", "EDGAR", "MILTON", "RAUL", "BEN", "CHESTER", "DUANE", "FRANKLIN", - "BRAD", "RON", "ROLAND", "ARNOLD", "HARVEY", "JARED", "ERIK", "DARRYL", "NEIL", "JAVIER", - "FERNANDO", "CLINTON", "TED", "MATHEW", "TYRONE", "DARREN", "LANCE", "KURT", "ALLAN", "NELSON", - "GUY", "CLAYTON", "HUGH", "MAX", "DWAYNE", "DWIGHT", "ARMANDO", "FELIX", "EVERETT", "IAN", - "WALLACE", "KEN", "BOB", "ALFREDO", "ALBERTO", "DAVE", "IVAN", "BYRON", "ISAAC", "MORRIS", - "CLIFTON", "WILLARD", "ROSS", "ANDY", "SALVADOR", "KIRK", "SERGIO", "SETH", "KENT", "TERRANCE", - "EDUARDO", "TERRENCE", "ENRIQUE", "WADE", "STUART", "FREDRICK", "ARTURO", "ALEJANDRO", "NICK", "LUTHER", - "WENDELL", "JEREMIAH", "JULIUS", "OTIS", "TREVOR", "OLIVER", "LUKE", "HOMER", "GERARD", "DOUG", - "KENNY", "HUBERT", "LYLE", "MATT", "ALFONSO", "ORLANDO", "REX", "CARLTON", "ERNESTO", "NEAL", - "PABLO", "LORENZO", "OMAR", "WILBUR", "GRANT", "HORACE", "RODERICK", "ABRAHAM", "WILLIS", "RICKEY", - "ANDRES", "CESAR", "JOHNATHAN", "MALCOLM", "RUDOLPH", "DAMON", "KELVIN", "PRESTON", "ALTON", "ARCHIE", - "MARCO", "WM", "PETE", "RANDOLPH", "GARRY", "GEOFFREY", "JONATHON", "FELIPE", "GERARDO", "ED", - "DOMINIC", "DELBERT", "COLIN", "GUILLERMO", "EARNEST", "LUCAS", "BENNY", "SPENCER", "RODOLFO", "MYRON", - "EDMUND", "GARRETT", "SALVATORE", "CEDRIC", "LOWELL", "GREGG", "SHERMAN", "WILSON", "SYLVESTER", "ROOSEVELT", - "ISRAEL", "JERMAINE", "FORREST", "WILBERT", "LELAND", "SIMON", "CLARK", "IRVING", "BRYANT", "OWEN", - "RUFUS", "WOODROW", "KRISTOPHER", "MACK", "LEVI", "MARCOS", "GUSTAVO", "JAKE", "LIONEL", "GILBERTO", - "CLINT", "NICOLAS", "ISMAEL", "ORVILLE", "ERVIN", "DEWEY", "AL", "WILFRED", "JOSH", "HUGO", - "IGNACIO", "CALEB", "TOMAS", "SHELDON", "ERICK", "STEWART", "DOYLE", "DARREL", "ROGELIO", "TERENCE", - "SANTIAGO", "ALONZO", "ELIAS", "BERT", "ELBERT", "RAMIRO", "CONRAD", "NOAH", "GRADY", "PHIL", - "CORNELIUS", "LAMAR", "ROLANDO", "CLAY", "PERCY", "DEXTER", "BRADFORD", "DARIN", "AMOS", "MOSES", - "IRVIN", "SAUL", "ROMAN", "RANDAL", "TIMMY", "DARRIN", "WINSTON", "BRENDAN", "ABEL", "DOMINICK", - "BOYD", "EMILIO", "ELIJAH", "DOMINGO", "EMMETT", "MARLON", "EMANUEL", "JERALD", "EDMOND", "EMIL", - "DEWAYNE", "WILL", "OTTO", "TEDDY", "REYNALDO", "BRET", "JESS", "TRENT", "HUMBERTO", "EMMANUEL", - "STEPHAN", "VICENTE", "LAMONT", "GARLAND", "MILES", "EFRAIN", "HEATH", "RODGER", "HARLEY", "ETHAN", - "ELDON", "ROCKY", "PIERRE", "JUNIOR", "FREDDY", "ELI", "BRYCE", "ANTOINE", "STERLING", "CHASE", - "GROVER", "ELTON", "CLEVELAND", "DYLAN", "CHUCK", "DAMIAN", "REUBEN", "STAN", "AUGUST", "LEONARDO", - "JASPER", "RUSSEL", "ERWIN", "BENITO", "HANS", "MONTE", "BLAINE", "ERNIE", "CURT", "QUENTIN", - "AGUSTIN", "MURRAY", "JAMAL", "ADOLFO", "HARRISON", "TYSON", "BURTON", "BRADY", "ELLIOTT", "WILFREDO", - "BART", "JARROD", "VANCE", "DENIS", "DAMIEN", "JOAQUIN", "HARLAN", "DESMOND", "ELLIOT", "DARWIN", - "GREGORIO", "BUDDY", "XAVIER", "KERMIT", "ROSCOE", "ESTEBAN", "ANTON", "SOLOMON", "SCOTTY", "NORBERT", - "ELVIN", "WILLIAMS", "NOLAN", "ROD", "QUINTON", "HAL", "BRAIN", "ROB", "ELWOOD", "KENDRICK", - "DARIUS", "MOISES", "FIDEL", "THADDEUS", "CLIFF", "MARCEL", "JACKSON", "RAPHAEL", "BRYON", "ARMAND", - "ALVARO", "JEFFRY", "DANE", "JOESPH", "THURMAN", "NED", "RUSTY", "MONTY", "FABIAN", "REGGIE", - "MASON", "GRAHAM", "ISAIAH", "VAUGHN", "GUS", "LOYD", "DIEGO", "ADOLPH", "NORRIS", "MILLARD", - "ROCCO", "GONZALO", "DERICK", "RODRIGO", "WILEY", "RIGOBERTO", "ALPHONSO", "TY", "NOE", "VERN", - "REED", "JEFFERSON", "ELVIS", "BERNARDO", "MAURICIO", "HIRAM", "DONOVAN", "BASIL", "RILEY", "NICKOLAS", - "MAYNARD", "SCOT", "VINCE", "QUINCY", "EDDY", "SEBASTIAN", "FEDERICO", "ULYSSES", "HERIBERTO", "DONNELL", - "COLE", "DAVIS", "GAVIN", "EMERY", "WARD", "ROMEO", "JAYSON", "DANTE", "CLEMENT", "COY", - "MAXWELL", "JARVIS", "BRUNO", "ISSAC", "DUDLEY", "BROCK", "SANFORD", "CARMELO", "BARNEY", "NESTOR", - "STEFAN", "DONNY", "ART", "LINWOOD", "BEAU", "WELDON", "GALEN", "ISIDRO", "TRUMAN", "DELMAR", - "JOHNATHON", "SILAS", "FREDERIC", "DICK", "IRWIN", "MERLIN", "CHARLEY", "MARCELINO", "HARRIS", "CARLO", - "TRENTON", "KURTIS", "HUNTER", "AURELIO", "WINFRED", "VITO", "COLLIN", "DENVER", "CARTER", "LEONEL", - "EMORY", "PASQUALE", "MOHAMMAD", "MARIANO", "DANIAL", "LANDON", "DIRK", "BRANDEN", "ADAN", "BUFORD", - "GERMAN", "WILMER", "EMERSON", "ZACHERY", "FLETCHER", "JACQUES", "ERROL", "DALTON", "MONROE", "JOSUE", - "EDWARDO", "BOOKER", "WILFORD", "SONNY", "SHELTON", "CARSON", "THERON", "RAYMUNDO", "DAREN", "HOUSTON", - "ROBBY", "LINCOLN", "GENARO", "BENNETT", "OCTAVIO", "CORNELL", "HUNG", "ARRON", "ANTONY", "HERSCHEL", - "GIOVANNI", "GARTH", "CYRUS", "CYRIL", "RONNY", "LON", "FREEMAN", "DUNCAN", "KENNITH", "CARMINE", - "ERICH", "CHADWICK", "WILBURN", "RUSS", "REID", "MYLES", "ANDERSON", "MORTON", "JONAS", "FOREST", - "MITCHEL", "MERVIN", "ZANE", "RICH", "JAMEL", "LAZARO", "ALPHONSE", "RANDELL", "MAJOR", "JARRETT", - "BROOKS", "ABDUL", "LUCIANO", "SEYMOUR", "EUGENIO", "MOHAMMED", "VALENTIN", "CHANCE", "ARNULFO", "LUCIEN", - "FERDINAND", "THAD", "EZRA", "ALDO", "RUBIN", "ROYAL", "MITCH", "EARLE", "ABE", "WYATT", - "MARQUIS", "LANNY", "KAREEM", "JAMAR", "BORIS", "ISIAH", "EMILE", "ELMO", "ARON", "LEOPOLDO", - "EVERETTE", "JOSEF", "ELOY", "RODRICK", "REINALDO", "LUCIO", "JERROD", "WESTON", "HERSHEL", "BARTON", - "PARKER", "LEMUEL", "BURT", "JULES", "GIL", "ELISEO", "AHMAD", "NIGEL", "EFREN", "ANTWAN", - "ALDEN", "MARGARITO", "COLEMAN", "DINO", "OSVALDO", "LES", "DEANDRE", "NORMAND", "KIETH", "TREY", - "NORBERTO", "NAPOLEON", "JEROLD", "FRITZ", "ROSENDO", "MILFORD", "CHRISTOPER", "ALFONZO", "LYMAN", "JOSIAH", - "BRANT", "WILTON", "RICO", "JAMAAL", "DEWITT", "BRENTON", "OLIN", "FOSTER", "FAUSTINO", "CLAUDIO", - "JUDSON", "GINO", "EDGARDO", "ALEC", "TANNER", "JARRED", "DONN", "TAD", "PRINCE", "PORFIRIO", - "ODIS", "LENARD", "CHAUNCEY", "TOD", "MEL", "MARCELO", "KORY", "AUGUSTUS", "KEVEN", "HILARIO", - "BUD", "SAL", "ORVAL", "MAURO", "ZACHARIAH", "OLEN", "ANIBAL", "MILO", "JED", "DILLON", - "AMADO", "NEWTON", "LENNY", "RICHIE", "HORACIO", "BRICE", "MOHAMED", "DELMER", "DARIO", "REYES", - "MAC", "JONAH", "JERROLD", "ROBT", "HANK", "RUPERT", "ROLLAND", "KENTON", "DAMION", "ANTONE", - "WALDO", "FREDRIC", "BRADLY", "KIP", "BURL", "WALKER", "TYREE", "JEFFEREY", "AHMED", "WILLY", - "STANFORD", "OREN", "NOBLE", "MOSHE", "MIKEL", "ENOCH", "BRENDON", "QUINTIN", "JAMISON", "FLORENCIO", - "DARRICK", "TOBIAS", "HASSAN", "GIUSEPPE", "DEMARCUS", "CLETUS", "TYRELL", "LYNDON", "KEENAN", "WERNER", - "GERALDO", "COLUMBUS", "CHET", "BERTRAM", "MARKUS", "HUEY", "HILTON", "DWAIN", "DONTE", "TYRON", - "OMER", "ISAIAS", "HIPOLITO", "FERMIN", "ADALBERTO", "BO", "BARRETT", "TEODORO", "MCKINLEY", "MAXIMO", - "GARFIELD", "RALEIGH", "LAWERENCE", "ABRAM", "RASHAD", "KING", "EMMITT", "DARON", "SAMUAL", "MIQUEL", - "EUSEBIO", "DOMENIC", "DARRON", "BUSTER", "WILBER", "RENATO", "JC", "HOYT", "HAYWOOD", "EZEKIEL", - "CHAS", "FLORENTINO", "ELROY", "CLEMENTE", "ARDEN", "NEVILLE", "EDISON", "DESHAWN", "NATHANIAL", "JORDON", - "DANILO", "CLAUD", "SHERWOOD", "RAYMON", "RAYFORD", "CRISTOBAL", "AMBROSE", "TITUS", "HYMAN", "FELTON", - "EZEQUIEL", "ERASMO", "STANTON", "LONNY", "LEN", "IKE", "MILAN", "LINO", "JAROD", "HERB", - "ANDREAS", "WALTON", "RHETT", "PALMER", "DOUGLASS", "CORDELL", "OSWALDO", "ELLSWORTH", "VIRGILIO", "TONEY", - "NATHANAEL", "DEL", "BENEDICT", "MOSE", "JOHNSON", "ISREAL", "GARRET", "FAUSTO", "ASA", "ARLEN", - "ZACK", "WARNER", "MODESTO", "FRANCESCO", "MANUAL", "GAYLORD", "GASTON", "FILIBERTO", "DEANGELO", "MICHALE", - "GRANVILLE", "WES", "MALIK", "ZACKARY", "TUAN", "ELDRIDGE", "CRISTOPHER", "CORTEZ", "ANTIONE", "MALCOM", - "LONG", "KOREY", "JOSPEH", "COLTON", "WAYLON", "VON", "HOSEA", "SHAD", "SANTO", "RUDOLF", - "ROLF", "REY", "RENALDO", "MARCELLUS", "LUCIUS", "KRISTOFER", "BOYCE", "BENTON", "HAYDEN", "HARLAND", - "ARNOLDO", "RUEBEN", "LEANDRO", "KRAIG", "JERRELL", "JEROMY", "HOBERT", "CEDRICK", "ARLIE", "WINFORD", - "WALLY", "LUIGI", "KENETH", "JACINTO", "GRAIG", "FRANKLYN", "EDMUNDO", "SID", "PORTER", "LEIF", - "JERAMY", "BUCK", "WILLIAN", "VINCENZO", "SHON", "LYNWOOD", "JERE", "HAI", "ELDEN", "DORSEY", - "DARELL", "BRODERICK", "ALONSO" - ] - total_sum = 0 - temp_sum = 0 - name.sort() - for i in range(len(name)): - for j in name[i]: - temp_sum += ord(j) - ord('A') + 1 - total_sum += (i + 1) * temp_sum - temp_sum = 0 - print(total_sum) + name = [ + "MARY", "PATRICIA", "LINDA", "BARBARA", "ELIZABETH", "JENNIFER", "MARIA", "SUSAN", "MARGARET", "DOROTHY", + "LISA", "NANCY", "KAREN", "BETTY", "HELEN", "SANDRA", "DONNA", "CAROL", "RUTH", "SHARON", + "MICHELLE", "LAURA", "SARAH", "KIMBERLY", "DEBORAH", "JESSICA", "SHIRLEY", "CYNTHIA", "ANGELA", "MELISSA", + "BRENDA", "AMY", "ANNA", "REBECCA", "VIRGINIA", "KATHLEEN", "PAMELA", "MARTHA", "DEBRA", "AMANDA", + "STEPHANIE", "CAROLYN", "CHRISTINE", "MARIE", "JANET", "CATHERINE", "FRANCES", "ANN", "JOYCE", "DIANE", + "ALICE", "JULIE", "HEATHER", "TERESA", "DORIS", "GLORIA", "EVELYN", "JEAN", "CHERYL", "MILDRED", + "KATHERINE", "JOAN", "ASHLEY", "JUDITH", "ROSE", "JANICE", "KELLY", "NICOLE", "JUDY", "CHRISTINA", + "KATHY", "THERESA", "BEVERLY", "DENISE", "TAMMY", "IRENE", "JANE", "LORI", "RACHEL", "MARILYN", + "ANDREA", "KATHRYN", "LOUISE", "SARA", "ANNE", "JACQUELINE", "WANDA", "BONNIE", "JULIA", "RUBY", + "LOIS", "TINA", "PHYLLIS", "NORMA", "PAULA", "DIANA", "ANNIE", "LILLIAN", "EMILY", "ROBIN", + "PEGGY", "CRYSTAL", "GLADYS", "RITA", "DAWN", "CONNIE", "FLORENCE", "TRACY", "EDNA", "TIFFANY", + "CARMEN", "ROSA", "CINDY", "GRACE", "WENDY", "VICTORIA", "EDITH", "KIM", "SHERRY", "SYLVIA", + "JOSEPHINE", "THELMA", "SHANNON", "SHEILA", "ETHEL", "ELLEN", "ELAINE", "MARJORIE", "CARRIE", "CHARLOTTE", + "MONICA", "ESTHER", "PAULINE", "EMMA", "JUANITA", "ANITA", "RHONDA", "HAZEL", "AMBER", "EVA", + "DEBBIE", "APRIL", "LESLIE", "CLARA", "LUCILLE", "JAMIE", "JOANNE", "ELEANOR", "VALERIE", "DANIELLE", + "MEGAN", "ALICIA", "SUZANNE", "MICHELE", "GAIL", "BERTHA", "DARLENE", "VERONICA", "JILL", "ERIN", + "GERALDINE", "LAUREN", "CATHY", "JOANN", "LORRAINE", "LYNN", "SALLY", "REGINA", "ERICA", "BEATRICE", + "DOLORES", "BERNICE", "AUDREY", "YVONNE", "ANNETTE", "JUNE", "SAMANTHA", "MARION", "DANA", "STACY", + "ANA", "RENEE", "IDA", "VIVIAN", "ROBERTA", "HOLLY", "BRITTANY", "MELANIE", "LORETTA", "YOLANDA", + "JEANETTE", "LAURIE", "KATIE", "KRISTEN", "VANESSA", "ALMA", "SUE", "ELSIE", "BETH", "JEANNE", + "VICKI", "CARLA", "TARA", "ROSEMARY", "EILEEN", "TERRI", "GERTRUDE", "LUCY", "TONYA", "ELLA", + "STACEY", "WILMA", "GINA", "KRISTIN", "JESSIE", "NATALIE", "AGNES", "VERA", "WILLIE", "CHARLENE", + "BESSIE", "DELORES", "MELINDA", "PEARL", "ARLENE", "MAUREEN", "COLLEEN", "ALLISON", "TAMARA", "JOY", + "GEORGIA", "CONSTANCE", "LILLIE", "CLAUDIA", "JACKIE", "MARCIA", "TANYA", "NELLIE", "MINNIE", "MARLENE", + "HEIDI", "GLENDA", "LYDIA", "VIOLA", "COURTNEY", "MARIAN", "STELLA", "CAROLINE", "DORA", "JO", + "VICKIE", "MATTIE", "TERRY", "MAXINE", "IRMA", "MABEL", "MARSHA", "MYRTLE", "LENA", "CHRISTY", + "DEANNA", "PATSY", "HILDA", "GWENDOLYN", "JENNIE", "NORA", "MARGIE", "NINA", "CASSANDRA", "LEAH", + "PENNY", "KAY", "PRISCILLA", "NAOMI", "CAROLE", "BRANDY", "OLGA", "BILLIE", "DIANNE", "TRACEY", + "LEONA", "JENNY", "FELICIA", "SONIA", "MIRIAM", "VELMA", "BECKY", "BOBBIE", "VIOLET", "KRISTINA", + "TONI", "MISTY", "MAE", "SHELLY", "DAISY", "RAMONA", "SHERRI", "ERIKA", "KATRINA", "CLAIRE", + "LINDSEY", "LINDSAY", "GENEVA", "GUADALUPE", "BELINDA", "MARGARITA", "SHERYL", "CORA", "FAYE", "ADA", + "NATASHA", "SABRINA", "ISABEL", "MARGUERITE", "HATTIE", "HARRIET", "MOLLY", "CECILIA", "KRISTI", "BRANDI", + "BLANCHE", "SANDY", "ROSIE", "JOANNA", "IRIS", "EUNICE", "ANGIE", "INEZ", "LYNDA", "MADELINE", + "AMELIA", "ALBERTA", "GENEVIEVE", "MONIQUE", "JODI", "JANIE", "MAGGIE", "KAYLA", "SONYA", "JAN", + "LEE", "KRISTINE", "CANDACE", "FANNIE", "MARYANN", "OPAL", "ALISON", "YVETTE", "MELODY", "LUZ", + "SUSIE", "OLIVIA", "FLORA", "SHELLEY", "KRISTY", "MAMIE", "LULA", "LOLA", "VERNA", "BEULAH", + "ANTOINETTE", "CANDICE", "JUANA", "JEANNETTE", "PAM", "KELLI", "HANNAH", "WHITNEY", "BRIDGET", "KARLA", + "CELIA", "LATOYA", "PATTY", "SHELIA", "GAYLE", "DELLA", "VICKY", "LYNNE", "SHERI", "MARIANNE", + "KARA", "JACQUELYN", "ERMA", "BLANCA", "MYRA", "LETICIA", "PAT", "KRISTA", "ROXANNE", "ANGELICA", + "JOHNNIE", "ROBYN", "FRANCIS", "ADRIENNE", "ROSALIE", "ALEXANDRA", "BROOKE", "BETHANY", "SADIE", "BERNADETTE", + "TRACI", "JODY", "KENDRA", "JASMINE", "NICHOLE", "RACHAEL", "CHELSEA", "MABLE", "ERNESTINE", "MURIEL", + "MARCELLA", "ELENA", "KRYSTAL", "ANGELINA", "NADINE", "KARI", "ESTELLE", "DIANNA", "PAULETTE", "LORA", + "MONA", "DOREEN", "ROSEMARIE", "ANGEL", "DESIREE", "ANTONIA", "HOPE", "GINGER", "JANIS", "BETSY", + "CHRISTIE", "FREDA", "MERCEDES", "MEREDITH", "LYNETTE", "TERI", "CRISTINA", "EULA", "LEIGH", "MEGHAN", + "SOPHIA", "ELOISE", "ROCHELLE", "GRETCHEN", "CECELIA", "RAQUEL", "HENRIETTA", "ALYSSA", "JANA", "KELLEY", + "GWEN", "KERRY", "JENNA", "TRICIA", "LAVERNE", "OLIVE", "ALEXIS", "TASHA", "SILVIA", "ELVIRA", + "CASEY", "DELIA", "SOPHIE", "KATE", "PATTI", "LORENA", "KELLIE", "SONJA", "LILA", "LANA", + "DARLA", "MAY", "MINDY", "ESSIE", "MANDY", "LORENE", "ELSA", "JOSEFINA", "JEANNIE", "MIRANDA", + "DIXIE", "LUCIA", "MARTA", "FAITH", "LELA", "JOHANNA", "SHARI", "CAMILLE", "TAMI", "SHAWNA", + "ELISA", "EBONY", "MELBA", "ORA", "NETTIE", "TABITHA", "OLLIE", "JAIME", "WINIFRED", "KRISTIE", + "MARINA", "ALISHA", "AIMEE", "RENA", "MYRNA", "MARLA", "TAMMIE", "LATASHA", "BONITA", "PATRICE", + "RONDA", "SHERRIE", "ADDIE", "FRANCINE", "DELORIS", "STACIE", "ADRIANA", "CHERI", "SHELBY", "ABIGAIL", + "CELESTE", "JEWEL", "CARA", "ADELE", "REBEKAH", "LUCINDA", "DORTHY", "CHRIS", "EFFIE", "TRINA", + "REBA", "SHAWN", "SALLIE", "AURORA", "LENORA", "ETTA", "LOTTIE", "KERRI", "TRISHA", "NIKKI", + "ESTELLA", "FRANCISCA", "JOSIE", "TRACIE", "MARISSA", "KARIN", "BRITTNEY", "JANELLE", "LOURDES", "LAUREL", + "HELENE", "FERN", "ELVA", "CORINNE", "KELSEY", "INA", "BETTIE", "ELISABETH", "AIDA", "CAITLIN", + "INGRID", "IVA", "EUGENIA", "CHRISTA", "GOLDIE", "CASSIE", "MAUDE", "JENIFER", "THERESE", "FRANKIE", + "DENA", "LORNA", "JANETTE", "LATONYA", "CANDY", "MORGAN", "CONSUELO", "TAMIKA", "ROSETTA", "DEBORA", + "CHERIE", "POLLY", "DINA", "JEWELL", "FAY", "JILLIAN", "DOROTHEA", "NELL", "TRUDY", "ESPERANZA", + "PATRICA", "KIMBERLEY", "SHANNA", "HELENA", "CAROLINA", "CLEO", "STEFANIE", "ROSARIO", "OLA", "JANINE", + "MOLLIE", "LUPE", "ALISA", "LOU", "MARIBEL", "SUSANNE", "BETTE", "SUSANA", "ELISE", "CECILE", + "ISABELLE", "LESLEY", "JOCELYN", "PAIGE", "JONI", "RACHELLE", "LEOLA", "DAPHNE", "ALTA", "ESTER", + "PETRA", "GRACIELA", "IMOGENE", "JOLENE", "KEISHA", "LACEY", "GLENNA", "GABRIELA", "KERI", "URSULA", + "LIZZIE", "KIRSTEN", "SHANA", "ADELINE", "MAYRA", "JAYNE", "JACLYN", "GRACIE", "SONDRA", "CARMELA", + "MARISA", "ROSALIND", "CHARITY", "TONIA", "BEATRIZ", "MARISOL", "CLARICE", "JEANINE", "SHEENA", "ANGELINE", + "FRIEDA", "LILY", "ROBBIE", "SHAUNA", "MILLIE", "CLAUDETTE", "CATHLEEN", "ANGELIA", "GABRIELLE", "AUTUMN", + "KATHARINE", "SUMMER", "JODIE", "STACI", "LEA", "CHRISTI", "JIMMIE", "JUSTINE", "ELMA", "LUELLA", + "MARGRET", "DOMINIQUE", "SOCORRO", "RENE", "MARTINA", "MARGO", "MAVIS", "CALLIE", "BOBBI", "MARITZA", + "LUCILE", "LEANNE", "JEANNINE", "DEANA", "AILEEN", "LORIE", "LADONNA", "WILLA", "MANUELA", "GALE", + "SELMA", "DOLLY", "SYBIL", "ABBY", "LARA", "DALE", "IVY", "DEE", "WINNIE", "MARCY", + "LUISA", "JERI", "MAGDALENA", "OFELIA", "MEAGAN", "AUDRA", "MATILDA", "LEILA", "CORNELIA", "BIANCA", + "SIMONE", "BETTYE", "RANDI", "VIRGIE", "LATISHA", "BARBRA", "GEORGINA", "ELIZA", "LEANN", "BRIDGETTE", + "RHODA", "HALEY", "ADELA", "NOLA", "BERNADINE", "FLOSSIE", "ILA", "GRETA", "RUTHIE", "NELDA", + "MINERVA", "LILLY", "TERRIE", "LETHA", "HILARY", "ESTELA", "VALARIE", "BRIANNA", "ROSALYN", "EARLINE", + "CATALINA", "AVA", "MIA", "CLARISSA", "LIDIA", "CORRINE", "ALEXANDRIA", "CONCEPCION", "TIA", "SHARRON", + "RAE", "DONA", "ERICKA", "JAMI", "ELNORA", "CHANDRA", "LENORE", "NEVA", "MARYLOU", "MELISA", + "TABATHA", "SERENA", "AVIS", "ALLIE", "SOFIA", "JEANIE", "ODESSA", "NANNIE", "HARRIETT", "LORAINE", + "PENELOPE", "MILAGROS", "EMILIA", "BENITA", "ALLYSON", "ASHLEE", "TANIA", "TOMMIE", "ESMERALDA", "KARINA", + "EVE", "PEARLIE", "ZELMA", "MALINDA", "NOREEN", "TAMEKA", "SAUNDRA", "HILLARY", "AMIE", "ALTHEA", + "ROSALINDA", "JORDAN", "LILIA", "ALANA", "GAY", "CLARE", "ALEJANDRA", "ELINOR", "MICHAEL", "LORRIE", + "JERRI", "DARCY", "EARNESTINE", "CARMELLA", "TAYLOR", "NOEMI", "MARCIE", "LIZA", "ANNABELLE", "LOUISA", + "EARLENE", "MALLORY", "CARLENE", "NITA", "SELENA", "TANISHA", "KATY", "JULIANNE", "JOHN", "LAKISHA", + "EDWINA", "MARICELA", "MARGERY", "KENYA", "DOLLIE", "ROXIE", "ROSLYN", "KATHRINE", "NANETTE", "CHARMAINE", + "LAVONNE", "ILENE", "KRIS", "TAMMI", "SUZETTE", "CORINE", "KAYE", "JERRY", "MERLE", "CHRYSTAL", + "LINA", "DEANNE", "LILIAN", "JULIANA", "ALINE", "LUANN", "KASEY", "MARYANNE", "EVANGELINE", "COLETTE", + "MELVA", "LAWANDA", "YESENIA", "NADIA", "MADGE", "KATHIE", "EDDIE", "OPHELIA", "VALERIA", "NONA", + "MITZI", "MARI", "GEORGETTE", "CLAUDINE", "FRAN", "ALISSA", "ROSEANN", "LAKEISHA", "SUSANNA", "REVA", + "DEIDRE", "CHASITY", "SHEREE", "CARLY", "JAMES", "ELVIA", "ALYCE", "DEIRDRE", "GENA", "BRIANA", + "ARACELI", "KATELYN", "ROSANNE", "WENDI", "TESSA", "BERTA", "MARVA", "IMELDA", "MARIETTA", "MARCI", + "LEONOR", "ARLINE", "SASHA", "MADELYN", "JANNA", "JULIETTE", "DEENA", "AURELIA", "JOSEFA", "AUGUSTA", + "LILIANA", "YOUNG", "CHRISTIAN", "LESSIE", "AMALIA", "SAVANNAH", "ANASTASIA", "VILMA", "NATALIA", "ROSELLA", + "LYNNETTE", "CORINA", "ALFREDA", "LEANNA", "CAREY", "AMPARO", "COLEEN", "TAMRA", "AISHA", "WILDA", + "KARYN", "CHERRY", "QUEEN", "MAURA", "MAI", "EVANGELINA", "ROSANNA", "HALLIE", "ERNA", "ENID", + "MARIANA", "LACY", "JULIET", "JACKLYN", "FREIDA", "MADELEINE", "MARA", "HESTER", "CATHRYN", "LELIA", + "CASANDRA", "BRIDGETT", "ANGELITA", "JANNIE", "DIONNE", "ANNMARIE", "KATINA", "BERYL", "PHOEBE", "MILLICENT", + "KATHERYN", "DIANN", "CARISSA", "MARYELLEN", "LIZ", "LAURI", "HELGA", "GILDA", "ADRIAN", "RHEA", + "MARQUITA", "HOLLIE", "TISHA", "TAMERA", "ANGELIQUE", "FRANCESCA", "BRITNEY", "KAITLIN", "LOLITA", "FLORINE", + "ROWENA", "REYNA", "TWILA", "FANNY", "JANELL", "INES", "CONCETTA", "BERTIE", "ALBA", "BRIGITTE", + "ALYSON", "VONDA", "PANSY", "ELBA", "NOELLE", "LETITIA", "KITTY", "DEANN", "BRANDIE", "LOUELLA", + "LETA", "FELECIA", "SHARLENE", "LESA", "BEVERLEY", "ROBERT", "ISABELLA", "HERMINIA", "TERRA", "CELINA", + "TORI", "OCTAVIA", "JADE", "DENICE", "GERMAINE", "SIERRA", "MICHELL", "CORTNEY", "NELLY", "DORETHA", + "SYDNEY", "DEIDRA", "MONIKA", "LASHONDA", "JUDI", "CHELSEY", "ANTIONETTE", "MARGOT", "BOBBY", "ADELAIDE", + "NAN", "LEEANN", "ELISHA", "DESSIE", "LIBBY", "KATHI", "GAYLA", "LATANYA", "MINA", "MELLISA", + "KIMBERLEE", "JASMIN", "RENAE", "ZELDA", "ELDA", "MA", "JUSTINA", "GUSSIE", "EMILIE", "CAMILLA", + "ABBIE", "ROCIO", "KAITLYN", "JESSE", "EDYTHE", "ASHLEIGH", "SELINA", "LAKESHA", "GERI", "ALLENE", + "PAMALA", "MICHAELA", "DAYNA", "CARYN", "ROSALIA", "SUN", "JACQULINE", "REBECA", "MARYBETH", "KRYSTLE", + "IOLA", "DOTTIE", "BENNIE", "BELLE", "AUBREY", "GRISELDA", "ERNESTINA", "ELIDA", "ADRIANNE", "DEMETRIA", + "DELMA", "CHONG", "JAQUELINE", "DESTINY", "ARLEEN", "VIRGINA", "RETHA", "FATIMA", "TILLIE", "ELEANORE", + "CARI", "TREVA", "BIRDIE", "WILHELMINA", "ROSALEE", "MAURINE", "LATRICE", "YONG", "JENA", "TARYN", + "ELIA", "DEBBY", "MAUDIE", "JEANNA", "DELILAH", "CATRINA", "SHONDA", "HORTENCIA", "THEODORA", "TERESITA", + "ROBBIN", "DANETTE", "MARYJANE", "FREDDIE", "DELPHINE", "BRIANNE", "NILDA", "DANNA", "CINDI", "BESS", + "IONA", "HANNA", "ARIEL", "WINONA", "VIDA", "ROSITA", "MARIANNA", "WILLIAM", "RACHEAL", "GUILLERMINA", + "ELOISA", "CELESTINE", "CAREN", "MALISSA", "LONA", "CHANTEL", "SHELLIE", "MARISELA", "LEORA", "AGATHA", + "SOLEDAD", "MIGDALIA", "IVETTE", "CHRISTEN", "ATHENA", "JANEL", "CHLOE", "VEDA", "PATTIE", "TESSIE", + "TERA", "MARILYNN", "LUCRETIA", "KARRIE", "DINAH", "DANIELA", "ALECIA", "ADELINA", "VERNICE", "SHIELA", + "PORTIA", "MERRY", "LASHAWN", "DEVON", "DARA", "TAWANA", "OMA", "VERDA", "CHRISTIN", "ALENE", + "ZELLA", "SANDI", "RAFAELA", "MAYA", "KIRA", "CANDIDA", "ALVINA", "SUZAN", "SHAYLA", "LYN", + "LETTIE", "ALVA", "SAMATHA", "ORALIA", "MATILDE", "MADONNA", "LARISSA", "VESTA", "RENITA", "INDIA", + "DELOIS", "SHANDA", "PHILLIS", "LORRI", "ERLINDA", "CRUZ", "CATHRINE", "BARB", "ZOE", "ISABELL", + "IONE", "GISELA", "CHARLIE", "VALENCIA", "ROXANNA", "MAYME", "KISHA", "ELLIE", "MELLISSA", "DORRIS", + "DALIA", "BELLA", "ANNETTA", "ZOILA", "RETA", "REINA", "LAURETTA", "KYLIE", "CHRISTAL", "PILAR", + "CHARLA", "ELISSA", "TIFFANI", "TANA", "PAULINA", "LEOTA", "BREANNA", "JAYME", "CARMEL", "VERNELL", + "TOMASA", "MANDI", "DOMINGA", "SANTA", "MELODIE", "LURA", "ALEXA", "TAMELA", "RYAN", "MIRNA", + "KERRIE", "VENUS", "NOEL", "FELICITA", "CRISTY", "CARMELITA", "BERNIECE", "ANNEMARIE", "TIARA", "ROSEANNE", + "MISSY", "CORI", "ROXANA", "PRICILLA", "KRISTAL", "JUNG", "ELYSE", "HAYDEE", "ALETHA", "BETTINA", + "MARGE", "GILLIAN", "FILOMENA", "CHARLES", "ZENAIDA", "HARRIETTE", "CARIDAD", "VADA", "UNA", "ARETHA", + "PEARLINE", "MARJORY", "MARCELA", "FLOR", "EVETTE", "ELOUISE", "ALINA", "TRINIDAD", "DAVID", "DAMARIS", + "CATHARINE", "CARROLL", "BELVA", "NAKIA", "MARLENA", "LUANNE", "LORINE", "KARON", "DORENE", "DANITA", + "BRENNA", "TATIANA", "SAMMIE", "LOUANN", "LOREN", "JULIANNA", "ANDRIA", "PHILOMENA", "LUCILA", "LEONORA", + "DOVIE", "ROMONA", "MIMI", "JACQUELIN", "GAYE", "TONJA", "MISTI", "JOE", "GENE", "CHASTITY", + "STACIA", "ROXANN", "MICAELA", "NIKITA", "MEI", "VELDA", "MARLYS", "JOHNNA", "AURA", "LAVERN", + "IVONNE", "HAYLEY", "NICKI", "MAJORIE", "HERLINDA", "GEORGE", "ALPHA", "YADIRA", "PERLA", "GREGORIA", + "DANIEL", "ANTONETTE", "SHELLI", "MOZELLE", "MARIAH", "JOELLE", "CORDELIA", "JOSETTE", "CHIQUITA", "TRISTA", + "LOUIS", "LAQUITA", "GEORGIANA", "CANDI", "SHANON", "LONNIE", "HILDEGARD", "CECIL", "VALENTINA", "STEPHANY", + "MAGDA", "KAROL", "GERRY", "GABRIELLA", "TIANA", "ROMA", "RICHELLE", "RAY", "PRINCESS", "OLETA", + "JACQUE", "IDELLA", "ALAINA", "SUZANNA", "JOVITA", "BLAIR", "TOSHA", "RAVEN", "NEREIDA", "MARLYN", + "KYLA", "JOSEPH", "DELFINA", "TENA", "STEPHENIE", "SABINA", "NATHALIE", "MARCELLE", "GERTIE", "DARLEEN", + "THEA", "SHARONDA", "SHANTEL", "BELEN", "VENESSA", "ROSALINA", "ONA", "GENOVEVA", "COREY", "CLEMENTINE", + "ROSALBA", "RENATE", "RENATA", "MI", "IVORY", "GEORGIANNA", "FLOY", "DORCAS", "ARIANA", "TYRA", + "THEDA", "MARIAM", "JULI", "JESICA", "DONNIE", "VIKKI", "VERLA", "ROSELYN", "MELVINA", "JANNETTE", + "GINNY", "DEBRAH", "CORRIE", "ASIA", "VIOLETA", "MYRTIS", "LATRICIA", "COLLETTE", "CHARLEEN", "ANISSA", + "VIVIANA", "TWYLA", "PRECIOUS", "NEDRA", "LATONIA", "LAN", "HELLEN", "FABIOLA", "ANNAMARIE", "ADELL", + "SHARYN", "CHANTAL", "NIKI", "MAUD", "LIZETTE", "LINDY", "KIA", "KESHA", "JEANA", "DANELLE", + "CHARLINE", "CHANEL", "CARROL", "VALORIE", "LIA", "DORTHA", "CRISTAL", "SUNNY", "LEONE", "LEILANI", + "GERRI", "DEBI", "ANDRA", "KESHIA", "IMA", "EULALIA", "EASTER", "DULCE", "NATIVIDAD", "LINNIE", + "KAMI", "GEORGIE", "CATINA", "BROOK", "ALDA", "WINNIFRED", "SHARLA", "RUTHANN", "MEAGHAN", "MAGDALENE", + "LISSETTE", "ADELAIDA", "VENITA", "TRENA", "SHIRLENE", "SHAMEKA", "ELIZEBETH", "DIAN", "SHANTA", "MICKEY", + "LATOSHA", "CARLOTTA", "WINDY", "SOON", "ROSINA", "MARIANN", "LEISA", "JONNIE", "DAWNA", "CATHIE", + "BILLY", "ASTRID", "SIDNEY", "LAUREEN", "JANEEN", "HOLLI", "FAWN", "VICKEY", "TERESSA", "SHANTE", + "RUBYE", "MARCELINA", "CHANDA", "CARY", "TERESE", "SCARLETT", "MARTY", "MARNIE", "LULU", "LISETTE", + "JENIFFER", "ELENOR", "DORINDA", "DONITA", "CARMAN", "BERNITA", "ALTAGRACIA", "ALETA", "ADRIANNA", "ZORAIDA", + "RONNIE", "NICOLA", "LYNDSEY", "KENDALL", "JANINA", "CHRISSY", "AMI", "STARLA", "PHYLIS", "PHUONG", + "KYRA", "CHARISSE", "BLANCH", "SANJUANITA", "RONA", "NANCI", "MARILEE", "MARANDA", "CORY", "BRIGETTE", + "SANJUANA", "MARITA", "KASSANDRA", "JOYCELYN", "IRA", "FELIPA", "CHELSIE", "BONNY", "MIREYA", "LORENZA", + "KYONG", "ILEANA", "CANDELARIA", "TONY", "TOBY", "SHERIE", "OK", "MARK", "LUCIE", "LEATRICE", + "LAKESHIA", "GERDA", "EDIE", "BAMBI", "MARYLIN", "LAVON", "HORTENSE", "GARNET", "EVIE", "TRESSA", + "SHAYNA", "LAVINA", "KYUNG", "JEANETTA", "SHERRILL", "SHARA", "PHYLISS", "MITTIE", "ANABEL", "ALESIA", + "THUY", "TAWANDA", "RICHARD", "JOANIE", "TIFFANIE", "LASHANDA", "KARISSA", "ENRIQUETA", "DARIA", "DANIELLA", + "CORINNA", "ALANNA", "ABBEY", "ROXANE", "ROSEANNA", "MAGNOLIA", "LIDA", "KYLE", "JOELLEN", "ERA", + "CORAL", "CARLEEN", "TRESA", "PEGGIE", "NOVELLA", "NILA", "MAYBELLE", "JENELLE", "CARINA", "NOVA", + "MELINA", "MARQUERITE", "MARGARETTE", "JOSEPHINA", "EVONNE", "DEVIN", "CINTHIA", "ALBINA", "TOYA", "TAWNYA", + "SHERITA", "SANTOS", "MYRIAM", "LIZABETH", "LISE", "KEELY", "JENNI", "GISELLE", "CHERYLE", "ARDITH", + "ARDIS", "ALESHA", "ADRIANE", "SHAINA", "LINNEA", "KAROLYN", "HONG", "FLORIDA", "FELISHA", "DORI", + "DARCI", "ARTIE", "ARMIDA", "ZOLA", "XIOMARA", "VERGIE", "SHAMIKA", "NENA", "NANNETTE", "MAXIE", + "LOVIE", "JEANE", "JAIMIE", "INGE", "FARRAH", "ELAINA", "CAITLYN", "STARR", "FELICITAS", "CHERLY", + "CARYL", "YOLONDA", "YASMIN", "TEENA", "PRUDENCE", "PENNIE", "NYDIA", "MACKENZIE", "ORPHA", "MARVEL", + "LIZBETH", "LAURETTE", "JERRIE", "HERMELINDA", "CAROLEE", "TIERRA", "MIRIAN", "META", "MELONY", "KORI", + "JENNETTE", "JAMILA", "ENA", "ANH", "YOSHIKO", "SUSANNAH", "SALINA", "RHIANNON", "JOLEEN", "CRISTINE", + "ASHTON", "ARACELY", "TOMEKA", "SHALONDA", "MARTI", "LACIE", "KALA", "JADA", "ILSE", "HAILEY", + "BRITTANI", "ZONA", "SYBLE", "SHERRYL", "RANDY", "NIDIA", "MARLO", "KANDICE", "KANDI", "DEB", + "DEAN", "AMERICA", "ALYCIA", "TOMMY", "RONNA", "NORENE", "MERCY", "JOSE", "INGEBORG", "GIOVANNA", + "GEMMA", "CHRISTEL", "AUDRY", "ZORA", "VITA", "VAN", "TRISH", "STEPHAINE", "SHIRLEE", "SHANIKA", + "MELONIE", "MAZIE", "JAZMIN", "INGA", "HOA", "HETTIE", "GERALYN", "FONDA", "ESTRELLA", "ADELLA", + "SU", "SARITA", "RINA", "MILISSA", "MARIBETH", "GOLDA", "EVON", "ETHELYN", "ENEDINA", "CHERISE", + "CHANA", "VELVA", "TAWANNA", "SADE", "MIRTA", "LI", "KARIE", "JACINTA", "ELNA", "DAVINA", + "CIERRA", "ASHLIE", "ALBERTHA", "TANESHA", "STEPHANI", "NELLE", "MINDI", "LU", "LORINDA", "LARUE", + "FLORENE", "DEMETRA", "DEDRA", "CIARA", "CHANTELLE", "ASHLY", "SUZY", "ROSALVA", "NOELIA", "LYDA", + "LEATHA", "KRYSTYNA", "KRISTAN", "KARRI", "DARLINE", "DARCIE", "CINDA", "CHEYENNE", "CHERRIE", "AWILDA", + "ALMEDA", "ROLANDA", "LANETTE", "JERILYN", "GISELE", "EVALYN", "CYNDI", "CLETA", "CARIN", "ZINA", + "ZENA", "VELIA", "TANIKA", "PAUL", "CHARISSA", "THOMAS", "TALIA", "MARGARETE", "LAVONDA", "KAYLEE", + "KATHLENE", "JONNA", "IRENA", "ILONA", "IDALIA", "CANDIS", "CANDANCE", "BRANDEE", "ANITRA", "ALIDA", + "SIGRID", "NICOLETTE", "MARYJO", "LINETTE", "HEDWIG", "CHRISTIANA", "CASSIDY", "ALEXIA", "TRESSIE", "MODESTA", + "LUPITA", "LITA", "GLADIS", "EVELIA", "DAVIDA", "CHERRI", "CECILY", "ASHELY", "ANNABEL", "AGUSTINA", + "WANITA", "SHIRLY", "ROSAURA", "HULDA", "EUN", "BAILEY", "YETTA", "VERONA", "THOMASINA", "SIBYL", + "SHANNAN", "MECHELLE", "LUE", "LEANDRA", "LANI", "KYLEE", "KANDY", "JOLYNN", "FERNE", "EBONI", + "CORENE", "ALYSIA", "ZULA", "NADA", "MOIRA", "LYNDSAY", "LORRETTA", "JUAN", "JAMMIE", "HORTENSIA", + "GAYNELL", "CAMERON", "ADRIA", "VINA", "VICENTA", "TANGELA", "STEPHINE", "NORINE", "NELLA", "LIANA", + "LESLEE", "KIMBERELY", "ILIANA", "GLORY", "FELICA", "EMOGENE", "ELFRIEDE", "EDEN", "EARTHA", "CARMA", + "BEA", "OCIE", "MARRY", "LENNIE", "KIARA", "JACALYN", "CARLOTA", "ARIELLE", "YU", "STAR", + "OTILIA", "KIRSTIN", "KACEY", "JOHNETTA", "JOEY", "JOETTA", "JERALDINE", "JAUNITA", "ELANA", "DORTHEA", + "CAMI", "AMADA", "ADELIA", "VERNITA", "TAMAR", "SIOBHAN", "RENEA", "RASHIDA", "OUIDA", "ODELL", + "NILSA", "MERYL", "KRISTYN", "JULIETA", "DANICA", "BREANNE", "AUREA", "ANGLEA", "SHERRON", "ODETTE", + "MALIA", "LORELEI", "LIN", "LEESA", "KENNA", "KATHLYN", "FIONA", "CHARLETTE", "SUZIE", "SHANTELL", + "SABRA", "RACQUEL", "MYONG", "MIRA", "MARTINE", "LUCIENNE", "LAVADA", "JULIANN", "JOHNIE", "ELVERA", + "DELPHIA", "CLAIR", "CHRISTIANE", "CHAROLETTE", "CARRI", "AUGUSTINE", "ASHA", "ANGELLA", "PAOLA", "NINFA", + "LEDA", "LAI", "EDA", "SUNSHINE", "STEFANI", "SHANELL", "PALMA", "MACHELLE", "LISSA", "KECIA", + "KATHRYNE", "KARLENE", "JULISSA", "JETTIE", "JENNIFFER", "HUI", "CORRINA", "CHRISTOPHER", "CAROLANN", "ALENA", + "TESS", "ROSARIA", "MYRTICE", "MARYLEE", "LIANE", "KENYATTA", "JUDIE", "JANEY", "IN", "ELMIRA", + "ELDORA", "DENNA", "CRISTI", "CATHI", "ZAIDA", "VONNIE", "VIVA", "VERNIE", "ROSALINE", "MARIELA", + "LUCIANA", "LESLI", "KARAN", "FELICE", "DENEEN", "ADINA", "WYNONA", "TARSHA", "SHERON", "SHASTA", + "SHANITA", "SHANI", "SHANDRA", "RANDA", "PINKIE", "PARIS", "NELIDA", "MARILOU", "LYLA", "LAURENE", + "LACI", "JOI", "JANENE", "DOROTHA", "DANIELE", "DANI", "CAROLYNN", "CARLYN", "BERENICE", "AYESHA", + "ANNELIESE", "ALETHEA", "THERSA", "TAMIKO", "RUFINA", "OLIVA", "MOZELL", "MARYLYN", "MADISON", "KRISTIAN", + "KATHYRN", "KASANDRA", "KANDACE", "JANAE", "GABRIEL", "DOMENICA", "DEBBRA", "DANNIELLE", "CHUN", "BUFFY", + "BARBIE", "ARCELIA", "AJA", "ZENOBIA", "SHAREN", "SHAREE", "PATRICK", "PAGE", "MY", "LAVINIA", + "KUM", "KACIE", "JACKELINE", "HUONG", "FELISA", "EMELIA", "ELEANORA", "CYTHIA", "CRISTIN", "CLYDE", + "CLARIBEL", "CARON", "ANASTACIA", "ZULMA", "ZANDRA", "YOKO", "TENISHA", "SUSANN", "SHERILYN", "SHAY", + "SHAWANDA", "SABINE", "ROMANA", "MATHILDA", "LINSEY", "KEIKO", "JOANA", "ISELA", "GRETTA", "GEORGETTA", + "EUGENIE", "DUSTY", "DESIRAE", "DELORA", "CORAZON", "ANTONINA", "ANIKA", "WILLENE", "TRACEE", "TAMATHA", + "REGAN", "NICHELLE", "MICKIE", "MAEGAN", "LUANA", "LANITA", "KELSIE", "EDELMIRA", "BREE", "AFTON", + "TEODORA", "TAMIE", "SHENA", "MEG", "LINH", "KELI", "KACI", "DANYELLE", "BRITT", "ARLETTE", + "ALBERTINE", "ADELLE", "TIFFINY", "STORMY", "SIMONA", "NUMBERS", "NICOLASA", "NICHOL", "NIA", "NAKISHA", + "MEE", "MAIRA", "LOREEN", "KIZZY", "JOHNNY", "JAY", "FALLON", "CHRISTENE", "BOBBYE", "ANTHONY", + "YING", "VINCENZA", "TANJA", "RUBIE", "RONI", "QUEENIE", "MARGARETT", "KIMBERLI", "IRMGARD", "IDELL", + "HILMA", "EVELINA", "ESTA", "EMILEE", "DENNISE", "DANIA", "CARL", "CARIE", "ANTONIO", "WAI", + "SANG", "RISA", "RIKKI", "PARTICIA", "MUI", "MASAKO", "MARIO", "LUVENIA", "LOREE", "LONI", + "LIEN", "KEVIN", "GIGI", "FLORENCIA", "DORIAN", "DENITA", "DALLAS", "CHI", "BILLYE", "ALEXANDER", + "TOMIKA", "SHARITA", "RANA", "NIKOLE", "NEOMA", "MARGARITE", "MADALYN", "LUCINA", "LAILA", "KALI", + "JENETTE", "GABRIELE", "EVELYNE", "ELENORA", "CLEMENTINA", "ALEJANDRINA", "ZULEMA", "VIOLETTE", "VANNESSA", "THRESA", + "RETTA", "PIA", "PATIENCE", "NOELLA", "NICKIE", "JONELL", "DELTA", "CHUNG", "CHAYA", "CAMELIA", + "BETHEL", "ANYA", "ANDREW", "THANH", "SUZANN", "SPRING", "SHU", "MILA", "LILLA", "LAVERNA", + "KEESHA", "KATTIE", "GIA", "GEORGENE", "EVELINE", "ESTELL", "ELIZBETH", "VIVIENNE", "VALLIE", "TRUDIE", + "STEPHANE", "MICHEL", "MAGALY", "MADIE", "KENYETTA", "KARREN", "JANETTA", "HERMINE", "HARMONY", "DRUCILLA", + "DEBBI", "CELESTINA", "CANDIE", "BRITNI", "BECKIE", "AMINA", "ZITA", "YUN", "YOLANDE", "VIVIEN", + "VERNETTA", "TRUDI", "SOMMER", "PEARLE", "PATRINA", "OSSIE", "NICOLLE", "LOYCE", "LETTY", "LARISA", + "KATHARINA", "JOSELYN", "JONELLE", "JENELL", "IESHA", "HEIDE", "FLORINDA", "FLORENTINA", "FLO", "ELODIA", + "DORINE", "BRUNILDA", "BRIGID", "ASHLI", "ARDELLA", "TWANA", "THU", "TARAH", "SUNG", "SHEA", + "SHAVON", "SHANE", "SERINA", "RAYNA", "RAMONITA", "NGA", "MARGURITE", "LUCRECIA", "KOURTNEY", "KATI", + "JESUS", "JESENIA", "DIAMOND", "CRISTA", "AYANA", "ALICA", "ALIA", "VINNIE", "SUELLEN", "ROMELIA", + "RACHELL", "PIPER", "OLYMPIA", "MICHIKO", "KATHALEEN", "JOLIE", "JESSI", "JANESSA", "HANA", "HA", + "ELEASE", "CARLETTA", "BRITANY", "SHONA", "SALOME", "ROSAMOND", "REGENA", "RAINA", "NGOC", "NELIA", + "LOUVENIA", "LESIA", "LATRINA", "LATICIA", "LARHONDA", "JINA", "JACKI", "HOLLIS", "HOLLEY", "EMMY", + "DEEANN", "CORETTA", "ARNETTA", "VELVET", "THALIA", "SHANICE", "NETA", "MIKKI", "MICKI", "LONNA", + "LEANA", "LASHUNDA", "KILEY", "JOYE", "JACQULYN", "IGNACIA", "HYUN", "HIROKO", "HENRY", "HENRIETTE", + "ELAYNE", "DELINDA", "DARNELL", "DAHLIA", "COREEN", "CONSUELA", "CONCHITA", "CELINE", "BABETTE", "AYANNA", + "ANETTE", "ALBERTINA", "SKYE", "SHAWNEE", "SHANEKA", "QUIANA", "PAMELIA", "MIN", "MERRI", "MERLENE", + "MARGIT", "KIESHA", "KIERA", "KAYLENE", "JODEE", "JENISE", "ERLENE", "EMMIE", "ELSE", "DARYL", + "DALILA", "DAISEY", "CODY", "CASIE", "BELIA", "BABARA", "VERSIE", "VANESA", "SHELBA", "SHAWNDA", + "SAM", "NORMAN", "NIKIA", "NAOMA", "MARNA", "MARGERET", "MADALINE", "LAWANA", "KINDRA", "JUTTA", + "JAZMINE", "JANETT", "HANNELORE", "GLENDORA", "GERTRUD", "GARNETT", "FREEDA", "FREDERICA", "FLORANCE", "FLAVIA", + "DENNIS", "CARLINE", "BEVERLEE", "ANJANETTE", "VALDA", "TRINITY", "TAMALA", "STEVIE", "SHONNA", "SHA", + "SARINA", "ONEIDA", "MICAH", "MERILYN", "MARLEEN", "LURLINE", "LENNA", "KATHERIN", "JIN", "JENI", + "HAE", "GRACIA", "GLADY", "FARAH", "ERIC", "ENOLA", "EMA", "DOMINQUE", "DEVONA", "DELANA", + "CECILA", "CAPRICE", "ALYSHA", "ALI", "ALETHIA", "VENA", "THERESIA", "TAWNY", "SONG", "SHAKIRA", + "SAMARA", "SACHIKO", "RACHELE", "PAMELLA", "NICKY", "MARNI", "MARIEL", "MAREN", "MALISA", "LIGIA", + "LERA", "LATORIA", "LARAE", "KIMBER", "KATHERN", "KAREY", "JENNEFER", "JANETH", "HALINA", "FREDIA", + "DELISA", "DEBROAH", "CIERA", "CHIN", "ANGELIKA", "ANDREE", "ALTHA", "YEN", "VIVAN", "TERRESA", + "TANNA", "SUK", "SUDIE", "SOO", "SIGNE", "SALENA", "RONNI", "REBBECCA", "MYRTIE", "MCKENZIE", + "MALIKA", "MAIDA", "LOAN", "LEONARDA", "KAYLEIGH", "FRANCE", "ETHYL", "ELLYN", "DAYLE", "CAMMIE", + "BRITTNI", "BIRGIT", "AVELINA", "ASUNCION", "ARIANNA", "AKIKO", "VENICE", "TYESHA", "TONIE", "TIESHA", + "TAKISHA", "STEFFANIE", "SINDY", "SANTANA", "MEGHANN", "MANDA", "MACIE", "LADY", "KELLYE", "KELLEE", + "JOSLYN", "JASON", "INGER", "INDIRA", "GLINDA", "GLENNIS", "FERNANDA", "FAUSTINA", "ENEIDA", "ELICIA", + "DOT", "DIGNA", "DELL", "ARLETTA", "ANDRE", "WILLIA", "TAMMARA", "TABETHA", "SHERRELL", "SARI", + "REFUGIO", "REBBECA", "PAULETTA", "NIEVES", "NATOSHA", "NAKITA", "MAMMIE", "KENISHA", "KAZUKO", "KASSIE", + "GARY", "EARLEAN", "DAPHINE", "CORLISS", "CLOTILDE", "CAROLYNE", "BERNETTA", "AUGUSTINA", "AUDREA", "ANNIS", + "ANNABELL", "YAN", "TENNILLE", "TAMICA", "SELENE", "SEAN", "ROSANA", "REGENIA", "QIANA", "MARKITA", + "MACY", "LEEANNE", "LAURINE", "KYM", "JESSENIA", "JANITA", "GEORGINE", "GENIE", "EMIKO", "ELVIE", + "DEANDRA", "DAGMAR", "CORIE", "COLLEN", "CHERISH", "ROMAINE", "PORSHA", "PEARLENE", "MICHELINE", "MERNA", + "MARGORIE", "MARGARETTA", "LORE", "KENNETH", "JENINE", "HERMINA", "FREDERICKA", "ELKE", "DRUSILLA", "DORATHY", + "DIONE", "DESIRE", "CELENA", "BRIGIDA", "ANGELES", "ALLEGRA", "THEO", "TAMEKIA", "SYNTHIA", "STEPHEN", + "SOOK", "SLYVIA", "ROSANN", "REATHA", "RAYE", "MARQUETTA", "MARGART", "LING", "LAYLA", "KYMBERLY", + "KIANA", "KAYLEEN", "KATLYN", "KARMEN", "JOELLA", "IRINA", "EMELDA", "ELENI", "DETRA", "CLEMMIE", + "CHERYLL", "CHANTELL", "CATHEY", "ARNITA", "ARLA", "ANGLE", "ANGELIC", "ALYSE", "ZOFIA", "THOMASINE", + "TENNIE", "SON", "SHERLY", "SHERLEY", "SHARYL", "REMEDIOS", "PETRINA", "NICKOLE", "MYUNG", "MYRLE", + "MOZELLA", "LOUANNE", "LISHA", "LATIA", "LANE", "KRYSTA", "JULIENNE", "JOEL", "JEANENE", "JACQUALINE", + "ISAURA", "GWENDA", "EARLEEN", "DONALD", "CLEOPATRA", "CARLIE", "AUDIE", "ANTONIETTA", "ALISE", "ALEX", + "VERDELL", "VAL", "TYLER", "TOMOKO", "THAO", "TALISHA", "STEVEN", "SO", "SHEMIKA", "SHAUN", + "SCARLET", "SAVANNA", "SANTINA", "ROSIA", "RAEANN", "ODILIA", "NANA", "MINNA", "MAGAN", "LYNELLE", + "LE", "KARMA", "JOEANN", "IVANA", "INELL", "ILANA", "HYE", "HONEY", "HEE", "GUDRUN", + "FRANK", "DREAMA", "CRISSY", "CHANTE", "CARMELINA", "ARVILLA", "ARTHUR", "ANNAMAE", "ALVERA", "ALEIDA", + "AARON", "YEE", "YANIRA", "VANDA", "TIANNA", "TAM", "STEFANIA", "SHIRA", "PERRY", "NICOL", + "NANCIE", "MONSERRATE", "MINH", "MELYNDA", "MELANY", "MATTHEW", "LOVELLA", "LAURE", "KIRBY", "KACY", + "JACQUELYNN", "HYON", "GERTHA", "FRANCISCO", "ELIANA", "CHRISTENA", "CHRISTEEN", "CHARISE", "CATERINA", "CARLEY", + "CANDYCE", "ARLENA", "AMMIE", "YANG", "WILLETTE", "VANITA", "TUYET", "TINY", "SYREETA", "SILVA", + "SCOTT", "RONALD", "PENNEY", "NYLA", "MICHAL", "MAURICE", "MARYAM", "MARYA", "MAGEN", "LUDIE", + "LOMA", "LIVIA", "LANELL", "KIMBERLIE", "JULEE", "DONETTA", "DIEDRA", "DENISHA", "DEANE", "DAWNE", + "CLARINE", "CHERRYL", "BRONWYN", "BRANDON", "ALLA", "VALERY", "TONDA", "SUEANN", "SORAYA", "SHOSHANA", + "SHELA", "SHARLEEN", "SHANELLE", "NERISSA", "MICHEAL", "MERIDITH", "MELLIE", "MAYE", "MAPLE", "MAGARET", + "LUIS", "LILI", "LEONILA", "LEONIE", "LEEANNA", "LAVONIA", "LAVERA", "KRISTEL", "KATHEY", "KATHE", + "JUSTIN", "JULIAN", "JIMMY", "JANN", "ILDA", "HILDRED", "HILDEGARDE", "GENIA", "FUMIKO", "EVELIN", + "ERMELINDA", "ELLY", "DUNG", "DOLORIS", "DIONNA", "DANAE", "BERNEICE", "ANNICE", "ALIX", "VERENA", + "VERDIE", "TRISTAN", "SHAWNNA", "SHAWANA", "SHAUNNA", "ROZELLA", "RANDEE", "RANAE", "MILAGRO", "LYNELL", + "LUISE", "LOUIE", "LOIDA", "LISBETH", "KARLEEN", "JUNITA", "JONA", "ISIS", "HYACINTH", "HEDY", + "GWENN", "ETHELENE", "ERLINE", "EDWARD", "DONYA", "DOMONIQUE", "DELICIA", "DANNETTE", "CICELY", "BRANDA", + "BLYTHE", "BETHANN", "ASHLYN", "ANNALEE", "ALLINE", "YUKO", "VELLA", "TRANG", "TOWANDA", "TESHA", + "SHERLYN", "NARCISA", "MIGUELINA", "MERI", "MAYBELL", "MARLANA", "MARGUERITA", "MADLYN", "LUNA", "LORY", + "LORIANN", "LIBERTY", "LEONORE", "LEIGHANN", "LAURICE", "LATESHA", "LARONDA", "KATRICE", "KASIE", "KARL", + "KALEY", "JADWIGA", "GLENNIE", "GEARLDINE", "FRANCINA", "EPIFANIA", "DYAN", "DORIE", "DIEDRE", "DENESE", + "DEMETRICE", "DELENA", "DARBY", "CRISTIE", "CLEORA", "CATARINA", "CARISA", "BERNIE", "BARBERA", "ALMETA", + "TRULA", "TEREASA", "SOLANGE", "SHEILAH", "SHAVONNE", "SANORA", "ROCHELL", "MATHILDE", "MARGARETA", "MAIA", + "LYNSEY", "LAWANNA", "LAUNA", "KENA", "KEENA", "KATIA", "JAMEY", "GLYNDA", "GAYLENE", "ELVINA", + "ELANOR", "DANUTA", "DANIKA", "CRISTEN", "CORDIE", "COLETTA", "CLARITA", "CARMON", "BRYNN", "AZUCENA", + "AUNDREA", "ANGELE", "YI", "WALTER", "VERLIE", "VERLENE", "TAMESHA", "SILVANA", "SEBRINA", "SAMIRA", + "REDA", "RAYLENE", "PENNI", "PANDORA", "NORAH", "NOMA", "MIREILLE", "MELISSIA", "MARYALICE", "LARAINE", + "KIMBERY", "KARYL", "KARINE", "KAM", "JOLANDA", "JOHANA", "JESUSA", "JALEESA", "JAE", "JACQUELYNE", + "IRISH", "ILUMINADA", "HILARIA", "HANH", "GENNIE", "FRANCIE", "FLORETTA", "EXIE", "EDDA", "DREMA", + "DELPHA", "BEV", "BARBAR", "ASSUNTA", "ARDELL", "ANNALISA", "ALISIA", "YUKIKO", "YOLANDO", "WONDA", + "WEI", "WALTRAUD", "VETA", "TEQUILA", "TEMEKA", "TAMEIKA", "SHIRLEEN", "SHENITA", "PIEDAD", "OZELLA", + "MIRTHA", "MARILU", "KIMIKO", "JULIANE", "JENICE", "JEN", "JANAY", "JACQUILINE", "HILDE", "FE", + "FAE", "EVAN", "EUGENE", "ELOIS", "ECHO", "DEVORAH", "CHAU", "BRINDA", "BETSEY", "ARMINDA", + "ARACELIS", "APRYL", "ANNETT", "ALISHIA", "VEOLA", "USHA", "TOSHIKO", "THEOLA", "TASHIA", "TALITHA", + "SHERY", "RUDY", "RENETTA", "REIKO", "RASHEEDA", "OMEGA", "OBDULIA", "MIKA", "MELAINE", "MEGGAN", + "MARTIN", "MARLEN", "MARGET", "MARCELINE", "MANA", "MAGDALEN", "LIBRADA", "LEZLIE", "LEXIE", "LATASHIA", + "LASANDRA", "KELLE", "ISIDRA", "ISA", "INOCENCIA", "GWYN", "FRANCOISE", "ERMINIA", "ERINN", "DIMPLE", + "DEVORA", "CRISELDA", "ARMANDA", "ARIE", "ARIANE", "ANGELO", "ANGELENA", "ALLEN", "ALIZA", "ADRIENE", + "ADALINE", "XOCHITL", "TWANNA", "TRAN", "TOMIKO", "TAMISHA", "TAISHA", "SUSY", "SIU", "RUTHA", + "ROXY", "RHONA", "RAYMOND", "OTHA", "NORIKO", "NATASHIA", "MERRIE", "MELVIN", "MARINDA", "MARIKO", + "MARGERT", "LORIS", "LIZZETTE", "LEISHA", "KAILA", "KA", "JOANNIE", "JERRICA", "JENE", "JANNET", + "JANEE", "JACINDA", "HERTA", "ELENORE", "DORETTA", "DELAINE", "DANIELL", "CLAUDIE", "CHINA", "BRITTA", + "APOLONIA", "AMBERLY", "ALEASE", "YURI", "YUK", "WEN", "WANETA", "UTE", "TOMI", "SHARRI", + "SANDIE", "ROSELLE", "REYNALDA", "RAGUEL", "PHYLICIA", "PATRIA", "OLIMPIA", "ODELIA", "MITZIE", "MITCHELL", + "MISS", "MINDA", "MIGNON", "MICA", "MENDY", "MARIVEL", "MAILE", "LYNETTA", "LAVETTE", "LAURYN", + "LATRISHA", "LAKIESHA", "KIERSTEN", "KARY", "JOSPHINE", "JOLYN", "JETTA", "JANISE", "JACQUIE", "IVELISSE", + "GLYNIS", "GIANNA", "GAYNELLE", "EMERALD", "DEMETRIUS", "DANYELL", "DANILLE", "DACIA", "CORALEE", "CHER", + "CEOLA", "BRETT", "BELL", "ARIANNE", "ALESHIA", "YUNG", "WILLIEMAE", "TROY", "TRINH", "THORA", + "TAI", "SVETLANA", "SHERIKA", "SHEMEKA", "SHAUNDA", "ROSELINE", "RICKI", "MELDA", "MALLIE", "LAVONNA", + "LATINA", "LARRY", "LAQUANDA", "LALA", "LACHELLE", "KLARA", "KANDIS", "JOHNA", "JEANMARIE", "JAYE", + "HANG", "GRAYCE", "GERTUDE", "EMERITA", "EBONIE", "CLORINDA", "CHING", "CHERY", "CAROLA", "BREANN", + "BLOSSOM", "BERNARDINE", "BECKI", "ARLETHA", "ARGELIA", "ARA", "ALITA", "YULANDA", "YON", "YESSENIA", + "TOBI", "TASIA", "SYLVIE", "SHIRL", "SHIRELY", "SHERIDAN", "SHELLA", "SHANTELLE", "SACHA", "ROYCE", + "REBECKA", "REAGAN", "PROVIDENCIA", "PAULENE", "MISHA", "MIKI", "MARLINE", "MARICA", "LORITA", "LATOYIA", + "LASONYA", "KERSTIN", "KENDA", "KEITHA", "KATHRIN", "JAYMIE", "JACK", "GRICELDA", "GINETTE", "ERYN", + "ELINA", "ELFRIEDA", "DANYEL", "CHEREE", "CHANELLE", "BARRIE", "AVERY", "AURORE", "ANNAMARIA", "ALLEEN", + "AILENE", "AIDE", "YASMINE", "VASHTI", "VALENTINE", "TREASA", "TORY", "TIFFANEY", "SHERYLL", "SHARIE", + "SHANAE", "SAU", "RAISA", "PA", "NEDA", "MITSUKO", "MIRELLA", "MILDA", "MARYANNA", "MARAGRET", + "MABELLE", "LUETTA", "LORINA", "LETISHA", "LATARSHA", "LANELLE", "LAJUANA", "KRISSY", "KARLY", "KARENA", + "JON", "JESSIKA", "JERICA", "JEANELLE", "JANUARY", "JALISA", "JACELYN", "IZOLA", "IVEY", "GREGORY", + "EUNA", "ETHA", "DREW", "DOMITILA", "DOMINICA", "DAINA", "CREOLA", "CARLI", "CAMIE", "BUNNY", + "BRITTNY", "ASHANTI", "ANISHA", "ALEEN", "ADAH", "YASUKO", "WINTER", "VIKI", "VALRIE", "TONA", + "TINISHA", "THI", "TERISA", "TATUM", "TANEKA", "SIMONNE", "SHALANDA", "SERITA", "RESSIE", "REFUGIA", + "PAZ", "OLENE", "NA", "MERRILL", "MARGHERITA", "MANDIE", "MAN", "MAIRE", "LYNDIA", "LUCI", + "LORRIANE", "LORETA", "LEONIA", "LAVONA", "LASHAWNDA", "LAKIA", "KYOKO", "KRYSTINA", "KRYSTEN", "KENIA", + "KELSI", "JUDE", "JEANICE", "ISOBEL", "GEORGIANN", "GENNY", "FELICIDAD", "EILENE", "DEON", "DELOISE", + "DEEDEE", "DANNIE", "CONCEPTION", "CLORA", "CHERILYN", "CHANG", "CALANDRA", "BERRY", "ARMANDINA", "ANISA", + "ULA", "TIMOTHY", "TIERA", "THERESSA", "STEPHANIA", "SIMA", "SHYLA", "SHONTA", "SHERA", "SHAQUITA", + "SHALA", "SAMMY", "ROSSANA", "NOHEMI", "NERY", "MORIAH", "MELITA", "MELIDA", "MELANI", "MARYLYNN", + "MARISHA", "MARIETTE", "MALORIE", "MADELENE", "LUDIVINA", "LORIA", "LORETTE", "LORALEE", "LIANNE", "LEON", + "LAVENIA", "LAURINDA", "LASHON", "KIT", "KIMI", "KEILA", "KATELYNN", "KAI", "JONE", "JOANE", + "JI", "JAYNA", "JANELLA", "JA", "HUE", "HERTHA", "FRANCENE", "ELINORE", "DESPINA", "DELSIE", + "DEEDRA", "CLEMENCIA", "CARRY", "CAROLIN", "CARLOS", "BULAH", "BRITTANIE", "BOK", "BLONDELL", "BIBI", + "BEAULAH", "BEATA", "ANNITA", "AGRIPINA", "VIRGEN", "VALENE", "UN", "TWANDA", "TOMMYE", "TOI", + "TARRA", "TARI", "TAMMERA", "SHAKIA", "SADYE", "RUTHANNE", "ROCHEL", "RIVKA", "PURA", "NENITA", + "NATISHA", "MING", "MERRILEE", "MELODEE", "MARVIS", "LUCILLA", "LEENA", "LAVETA", "LARITA", "LANIE", + "KEREN", "ILEEN", "GEORGEANN", "GENNA", "GENESIS", "FRIDA", "EWA", "EUFEMIA", "EMELY", "ELA", + "EDYTH", "DEONNA", "DEADRA", "DARLENA", "CHANELL", "CHAN", "CATHERN", "CASSONDRA", "CASSAUNDRA", "BERNARDA", + "BERNA", "ARLINDA", "ANAMARIA", "ALBERT", "WESLEY", "VERTIE", "VALERI", "TORRI", "TATYANA", "STASIA", + "SHERISE", "SHERILL", "SEASON", "SCOTTIE", "SANDA", "RUTHE", "ROSY", "ROBERTO", "ROBBI", "RANEE", + "QUYEN", "PEARLY", "PALMIRA", "ONITA", "NISHA", "NIESHA", "NIDA", "NEVADA", "NAM", "MERLYN", + "MAYOLA", "MARYLOUISE", "MARYLAND", "MARX", "MARTH", "MARGENE", "MADELAINE", "LONDA", "LEONTINE", "LEOMA", + "LEIA", "LAWRENCE", "LAURALEE", "LANORA", "LAKITA", "KIYOKO", "KETURAH", "KATELIN", "KAREEN", "JONIE", + "JOHNETTE", "JENEE", "JEANETT", "IZETTA", "HIEDI", "HEIKE", "HASSIE", "HAROLD", "GIUSEPPINA", "GEORGANN", + "FIDELA", "FERNANDE", "ELWANDA", "ELLAMAE", "ELIZ", "DUSTI", "DOTTY", "CYNDY", "CORALIE", "CELESTA", + "ARGENTINA", "ALVERTA", "XENIA", "WAVA", "VANETTA", "TORRIE", "TASHINA", "TANDY", "TAMBRA", "TAMA", + "STEPANIE", "SHILA", "SHAUNTA", "SHARAN", "SHANIQUA", "SHAE", "SETSUKO", "SERAFINA", "SANDEE", "ROSAMARIA", + "PRISCILA", "OLINDA", "NADENE", "MUOI", "MICHELINA", "MERCEDEZ", "MARYROSE", "MARIN", "MARCENE", "MAO", + "MAGALI", "MAFALDA", "LOGAN", "LINN", "LANNIE", "KAYCE", "KAROLINE", "KAMILAH", "KAMALA", "JUSTA", + "JOLINE", "JENNINE", "JACQUETTA", "IRAIDA", "GERALD", "GEORGEANNA", "FRANCHESCA", "FAIRY", "EMELINE", "ELANE", + "EHTEL", "EARLIE", "DULCIE", "DALENE", "CRIS", "CLASSIE", "CHERE", "CHARIS", "CAROYLN", "CARMINA", + "CARITA", "BRIAN", "BETHANIE", "AYAKO", "ARICA", "AN", "ALYSA", "ALESSANDRA", "AKILAH", "ADRIEN", + "ZETTA", "YOULANDA", "YELENA", "YAHAIRA", "XUAN", "WENDOLYN", "VICTOR", "TIJUANA", "TERRELL", "TERINA", + "TERESIA", "SUZI", "SUNDAY", "SHERELL", "SHAVONDA", "SHAUNTE", "SHARDA", "SHAKITA", "SENA", "RYANN", + "RUBI", "RIVA", "REGINIA", "REA", "RACHAL", "PARTHENIA", "PAMULA", "MONNIE", "MONET", "MICHAELE", + "MELIA", "MARINE", "MALKA", "MAISHA", "LISANDRA", "LEO", "LEKISHA", "LEAN", "LAURENCE", "LAKENDRA", + "KRYSTIN", "KORTNEY", "KIZZIE", "KITTIE", "KERA", "KENDAL", "KEMBERLY", "KANISHA", "JULENE", "JULE", + "JOSHUA", "JOHANNE", "JEFFREY", "JAMEE", "HAN", "HALLEY", "GIDGET", "GALINA", "FREDRICKA", "FLETA", + "FATIMAH", "EUSEBIA", "ELZA", "ELEONORE", "DORTHEY", "DORIA", "DONELLA", "DINORAH", "DELORSE", "CLARETHA", + "CHRISTINIA", "CHARLYN", "BONG", "BELKIS", "AZZIE", "ANDERA", "AIKO", "ADENA", "YER", "YAJAIRA", + "WAN", "VANIA", "ULRIKE", "TOSHIA", "TIFANY", "STEFANY", "SHIZUE", "SHENIKA", "SHAWANNA", "SHAROLYN", + "SHARILYN", "SHAQUANA", "SHANTAY", "SEE", "ROZANNE", "ROSELEE", "RICKIE", "REMONA", "REANNA", "RAELENE", + "QUINN", "PHUNG", "PETRONILA", "NATACHA", "NANCEY", "MYRL", "MIYOKO", "MIESHA", "MERIDETH", "MARVELLA", + "MARQUITTA", "MARHTA", "MARCHELLE", "LIZETH", "LIBBIE", "LAHOMA", "LADAWN", "KINA", "KATHELEEN", "KATHARYN", + "KARISA", "KALEIGH", "JUNIE", "JULIEANN", "JOHNSIE", "JANEAN", "JAIMEE", "JACKQUELINE", "HISAKO", "HERMA", + "HELAINE", "GWYNETH", "GLENN", "GITA", "EUSTOLIA", "EMELINA", "ELIN", "EDRIS", "DONNETTE", "DONNETTA", + "DIERDRE", "DENAE", "DARCEL", "CLAUDE", "CLARISA", "CINDERELLA", "CHIA", "CHARLESETTA", "CHARITA", "CELSA", + "CASSY", "CASSI", "CARLEE", "BRUNA", "BRITTANEY", "BRANDE", "BILLI", "BAO", "ANTONETTA", "ANGLA", + "ANGELYN", "ANALISA", "ALANE", "WENONA", "WENDIE", "VERONIQUE", "VANNESA", "TOBIE", "TEMPIE", "SUMIKO", + "SULEMA", "SPARKLE", "SOMER", "SHEBA", "SHAYNE", "SHARICE", "SHANEL", "SHALON", "SAGE", "ROY", + "ROSIO", "ROSELIA", "RENAY", "REMA", "REENA", "PORSCHE", "PING", "PEG", "OZIE", "ORETHA", + "ORALEE", "ODA", "NU", "NGAN", "NAKESHA", "MILLY", "MARYBELLE", "MARLIN", "MARIS", "MARGRETT", + "MARAGARET", "MANIE", "LURLENE", "LILLIA", "LIESELOTTE", "LAVELLE", "LASHAUNDA", "LAKEESHA", "KEITH", "KAYCEE", + "KALYN", "JOYA", "JOETTE", "JENAE", "JANIECE", "ILLA", "GRISEL", "GLAYDS", "GENEVIE", "GALA", + "FREDDA", "FRED", "ELMER", "ELEONOR", "DEBERA", "DEANDREA", "DAN", "CORRINNE", "CORDIA", "CONTESSA", + "COLENE", "CLEOTILDE", "CHARLOTT", "CHANTAY", "CECILLE", "BEATRIS", "AZALEE", "ARLEAN", "ARDATH", "ANJELICA", + "ANJA", "ALFREDIA", "ALEISHA", "ADAM", "ZADA", "YUONNE", "XIAO", "WILLODEAN", "WHITLEY", "VENNIE", + "VANNA", "TYISHA", "TOVA", "TORIE", "TONISHA", "TILDA", "TIEN", "TEMPLE", "SIRENA", "SHERRIL", + "SHANTI", "SHAN", "SENAIDA", "SAMELLA", "ROBBYN", "RENDA", "REITA", "PHEBE", "PAULITA", "NOBUKO", + "NGUYET", "NEOMI", "MOON", "MIKAELA", "MELANIA", "MAXIMINA", "MARG", "MAISIE", "LYNNA", "LILLI", + "LAYNE", "LASHAUN", "LAKENYA", "LAEL", "KIRSTIE", "KATHLINE", "KASHA", "KARLYN", "KARIMA", "JOVAN", + "JOSEFINE", "JENNELL", "JACQUI", "JACKELYN", "HYO", "HIEN", "GRAZYNA", "FLORRIE", "FLORIA", "ELEONORA", + "DWANA", "DORLA", "DONG", "DELMY", "DEJA", "DEDE", "DANN", "CRYSTA", "CLELIA", "CLARIS", + "CLARENCE", "CHIEKO", "CHERLYN", "CHERELLE", "CHARMAIN", "CHARA", "CAMMY", "BEE", "ARNETTE", "ARDELLE", + "ANNIKA", "AMIEE", "AMEE", "ALLENA", "YVONE", "YUKI", "YOSHIE", "YEVETTE", "YAEL", "WILLETTA", + "VONCILE", "VENETTA", "TULA", "TONETTE", "TIMIKA", "TEMIKA", "TELMA", "TEISHA", "TAREN", "TA", + "STACEE", "SHIN", "SHAWNTA", "SATURNINA", "RICARDA", "POK", "PASTY", "ONIE", "NUBIA", "MORA", + "MIKE", "MARIELLE", "MARIELLA", "MARIANELA", "MARDELL", "MANY", "LUANNA", "LOISE", "LISABETH", "LINDSY", + "LILLIANA", "LILLIAM", "LELAH", "LEIGHA", "LEANORA", "LANG", "KRISTEEN", "KHALILAH", "KEELEY", "KANDRA", + "JUNKO", "JOAQUINA", "JERLENE", "JANI", "JAMIKA", "JAME", "HSIU", "HERMILA", "GOLDEN", "GENEVIVE", + "EVIA", "EUGENA", "EMMALINE", "ELFREDA", "ELENE", "DONETTE", "DELCIE", "DEEANNA", "DARCEY", "CUC", + "CLARINDA", "CIRA", "CHAE", "CELINDA", "CATHERYN", "CATHERIN", "CASIMIRA", "CARMELIA", "CAMELLIA", "BREANA", + "BOBETTE", "BERNARDINA", "BEBE", "BASILIA", "ARLYNE", "AMAL", "ALAYNA", "ZONIA", "ZENIA", "YURIKO", + "YAEKO", "WYNELL", "WILLOW", "WILLENA", "VERNIA", "TU", "TRAVIS", "TORA", "TERRILYN", "TERICA", + "TENESHA", "TAWNA", "TAJUANA", "TAINA", "STEPHNIE", "SONA", "SOL", "SINA", "SHONDRA", "SHIZUKO", + "SHERLENE", "SHERICE", "SHARIKA", "ROSSIE", "ROSENA", "RORY", "RIMA", "RIA", "RHEBA", "RENNA", + "PETER", "NATALYA", "NANCEE", "MELODI", "MEDA", "MAXIMA", "MATHA", "MARKETTA", "MARICRUZ", "MARCELENE", + "MALVINA", "LUBA", "LOUETTA", "LEIDA", "LECIA", "LAURAN", "LASHAWNA", "LAINE", "KHADIJAH", "KATERINE", + "KASI", "KALLIE", "JULIETTA", "JESUSITA", "JESTINE", "JESSIA", "JEREMY", "JEFFIE", "JANYCE", "ISADORA", + "GEORGIANNE", "FIDELIA", "EVITA", "EURA", "EULAH", "ESTEFANA", "ELSY", "ELIZABET", "ELADIA", "DODIE", + "DION", "DIA", "DENISSE", "DELORAS", "DELILA", "DAYSI", "DAKOTA", "CURTIS", "CRYSTLE", "CONCHA", + "COLBY", "CLARETTA", "CHU", "CHRISTIA", "CHARLSIE", "CHARLENA", "CARYLON", "BETTYANN", "ASLEY", "ASHLEA", + "AMIRA", "AI", "AGUEDA", "AGNUS", "YUETTE", "VINITA", "VICTORINA", "TYNISHA", "TREENA", "TOCCARA", + "TISH", "THOMASENA", "TEGAN", "SOILA", "SHILOH", "SHENNA", "SHARMAINE", "SHANTAE", "SHANDI", "SEPTEMBER", + "SARAN", "SARAI", "SANA", "SAMUEL", "SALLEY", "ROSETTE", "ROLANDE", "REGINE", "OTELIA", "OSCAR", + "OLEVIA", "NICHOLLE", "NECOLE", "NAIDA", "MYRTA", "MYESHA", "MITSUE", "MINTA", "MERTIE", "MARGY", + "MAHALIA", "MADALENE", "LOVE", "LOURA", "LOREAN", "LEWIS", "LESHA", "LEONIDA", "LENITA", "LAVONE", + "LASHELL", "LASHANDRA", "LAMONICA", "KIMBRA", "KATHERINA", "KARRY", "KANESHA", "JULIO", "JONG", "JENEVA", + "JAQUELYN", "HWA", "GILMA", "GHISLAINE", "GERTRUDIS", "FRANSISCA", "FERMINA", "ETTIE", "ETSUKO", "ELLIS", + "ELLAN", "ELIDIA", "EDRA", "DORETHEA", "DOREATHA", "DENYSE", "DENNY", "DEETTA", "DAINE", "CYRSTAL", + "CORRIN", "CAYLA", "CARLITA", "CAMILA", "BURMA", "BULA", "BUENA", "BLAKE", "BARABARA", "AVRIL", + "AUSTIN", "ALAINE", "ZANA", "WILHEMINA", "WANETTA", "VIRGIL", "VI", "VERONIKA", "VERNON", "VERLINE", + "VASILIKI", "TONITA", "TISA", "TEOFILA", "TAYNA", "TAUNYA", "TANDRA", "TAKAKO", "SUNNI", "SUANNE", + "SIXTA", "SHARELL", "SEEMA", "RUSSELL", "ROSENDA", "ROBENA", "RAYMONDE", "PEI", "PAMILA", "OZELL", + "NEIDA", "NEELY", "MISTIE", "MICHA", "MERISSA", "MAURITA", "MARYLN", "MARYETTA", "MARSHALL", "MARCELL", + "MALENA", "MAKEDA", "MADDIE", "LOVETTA", "LOURIE", "LORRINE", "LORILEE", "LESTER", "LAURENA", "LASHAY", + "LARRAINE", "LAREE", "LACRESHA", "KRISTLE", "KRISHNA", "KEVA", "KEIRA", "KAROLE", "JOIE", "JINNY", + "JEANNETTA", "JAMA", "HEIDY", "GILBERTE", "GEMA", "FAVIOLA", "EVELYNN", "ENDA", "ELLI", "ELLENA", + "DIVINA", "DAGNY", "COLLENE", "CODI", "CINDIE", "CHASSIDY", "CHASIDY", "CATRICE", "CATHERINA", "CASSEY", + "CAROLL", "CARLENA", "CANDRA", "CALISTA", "BRYANNA", "BRITTENY", "BEULA", "BARI", "AUDRIE", "AUDRIA", + "ARDELIA", "ANNELLE", "ANGILA", "ALONA", "ALLYN", "DOUGLAS", "ROGER", "JONATHAN", "RALPH", "NICHOLAS", + "BENJAMIN", "BRUCE", "HARRY", "WAYNE", "STEVE", "HOWARD", "ERNEST", "PHILLIP", "TODD", "CRAIG", + "ALAN", "PHILIP", "EARL", "DANNY", "BRYAN", "STANLEY", "LEONARD", "NATHAN", "MANUEL", "RODNEY", + "MARVIN", "VINCENT", "JEFFERY", "JEFF", "CHAD", "JACOB", "ALFRED", "BRADLEY", "HERBERT", "FREDERICK", + "EDWIN", "DON", "RICKY", "RANDALL", "BARRY", "BERNARD", "LEROY", "MARCUS", "THEODORE", "CLIFFORD", + "MIGUEL", "JIM", "TOM", "CALVIN", "BILL", "LLOYD", "DEREK", "WARREN", "DARRELL", "JEROME", + "FLOYD", "ALVIN", "TIM", "GORDON", "GREG", "JORGE", "DUSTIN", "PEDRO", "DERRICK", "ZACHARY", + "HERMAN", "GLEN", "HECTOR", "RICARDO", "RICK", "BRENT", "RAMON", "GILBERT", "MARC", "REGINALD", + "RUBEN", "NATHANIEL", "RAFAEL", "EDGAR", "MILTON", "RAUL", "BEN", "CHESTER", "DUANE", "FRANKLIN", + "BRAD", "RON", "ROLAND", "ARNOLD", "HARVEY", "JARED", "ERIK", "DARRYL", "NEIL", "JAVIER", + "FERNANDO", "CLINTON", "TED", "MATHEW", "TYRONE", "DARREN", "LANCE", "KURT", "ALLAN", "NELSON", + "GUY", "CLAYTON", "HUGH", "MAX", "DWAYNE", "DWIGHT", "ARMANDO", "FELIX", "EVERETT", "IAN", + "WALLACE", "KEN", "BOB", "ALFREDO", "ALBERTO", "DAVE", "IVAN", "BYRON", "ISAAC", "MORRIS", + "CLIFTON", "WILLARD", "ROSS", "ANDY", "SALVADOR", "KIRK", "SERGIO", "SETH", "KENT", "TERRANCE", + "EDUARDO", "TERRENCE", "ENRIQUE", "WADE", "STUART", "FREDRICK", "ARTURO", "ALEJANDRO", "NICK", "LUTHER", + "WENDELL", "JEREMIAH", "JULIUS", "OTIS", "TREVOR", "OLIVER", "LUKE", "HOMER", "GERARD", "DOUG", + "KENNY", "HUBERT", "LYLE", "MATT", "ALFONSO", "ORLANDO", "REX", "CARLTON", "ERNESTO", "NEAL", + "PABLO", "LORENZO", "OMAR", "WILBUR", "GRANT", "HORACE", "RODERICK", "ABRAHAM", "WILLIS", "RICKEY", + "ANDRES", "CESAR", "JOHNATHAN", "MALCOLM", "RUDOLPH", "DAMON", "KELVIN", "PRESTON", "ALTON", "ARCHIE", + "MARCO", "WM", "PETE", "RANDOLPH", "GARRY", "GEOFFREY", "JONATHON", "FELIPE", "GERARDO", "ED", + "DOMINIC", "DELBERT", "COLIN", "GUILLERMO", "EARNEST", "LUCAS", "BENNY", "SPENCER", "RODOLFO", "MYRON", + "EDMUND", "GARRETT", "SALVATORE", "CEDRIC", "LOWELL", "GREGG", "SHERMAN", "WILSON", "SYLVESTER", "ROOSEVELT", + "ISRAEL", "JERMAINE", "FORREST", "WILBERT", "LELAND", "SIMON", "CLARK", "IRVING", "BRYANT", "OWEN", + "RUFUS", "WOODROW", "KRISTOPHER", "MACK", "LEVI", "MARCOS", "GUSTAVO", "JAKE", "LIONEL", "GILBERTO", + "CLINT", "NICOLAS", "ISMAEL", "ORVILLE", "ERVIN", "DEWEY", "AL", "WILFRED", "JOSH", "HUGO", + "IGNACIO", "CALEB", "TOMAS", "SHELDON", "ERICK", "STEWART", "DOYLE", "DARREL", "ROGELIO", "TERENCE", + "SANTIAGO", "ALONZO", "ELIAS", "BERT", "ELBERT", "RAMIRO", "CONRAD", "NOAH", "GRADY", "PHIL", + "CORNELIUS", "LAMAR", "ROLANDO", "CLAY", "PERCY", "DEXTER", "BRADFORD", "DARIN", "AMOS", "MOSES", + "IRVIN", "SAUL", "ROMAN", "RANDAL", "TIMMY", "DARRIN", "WINSTON", "BRENDAN", "ABEL", "DOMINICK", + "BOYD", "EMILIO", "ELIJAH", "DOMINGO", "EMMETT", "MARLON", "EMANUEL", "JERALD", "EDMOND", "EMIL", + "DEWAYNE", "WILL", "OTTO", "TEDDY", "REYNALDO", "BRET", "JESS", "TRENT", "HUMBERTO", "EMMANUEL", + "STEPHAN", "VICENTE", "LAMONT", "GARLAND", "MILES", "EFRAIN", "HEATH", "RODGER", "HARLEY", "ETHAN", + "ELDON", "ROCKY", "PIERRE", "JUNIOR", "FREDDY", "ELI", "BRYCE", "ANTOINE", "STERLING", "CHASE", + "GROVER", "ELTON", "CLEVELAND", "DYLAN", "CHUCK", "DAMIAN", "REUBEN", "STAN", "AUGUST", "LEONARDO", + "JASPER", "RUSSEL", "ERWIN", "BENITO", "HANS", "MONTE", "BLAINE", "ERNIE", "CURT", "QUENTIN", + "AGUSTIN", "MURRAY", "JAMAL", "ADOLFO", "HARRISON", "TYSON", "BURTON", "BRADY", "ELLIOTT", "WILFREDO", + "BART", "JARROD", "VANCE", "DENIS", "DAMIEN", "JOAQUIN", "HARLAN", "DESMOND", "ELLIOT", "DARWIN", + "GREGORIO", "BUDDY", "XAVIER", "KERMIT", "ROSCOE", "ESTEBAN", "ANTON", "SOLOMON", "SCOTTY", "NORBERT", + "ELVIN", "WILLIAMS", "NOLAN", "ROD", "QUINTON", "HAL", "BRAIN", "ROB", "ELWOOD", "KENDRICK", + "DARIUS", "MOISES", "FIDEL", "THADDEUS", "CLIFF", "MARCEL", "JACKSON", "RAPHAEL", "BRYON", "ARMAND", + "ALVARO", "JEFFRY", "DANE", "JOESPH", "THURMAN", "NED", "RUSTY", "MONTY", "FABIAN", "REGGIE", + "MASON", "GRAHAM", "ISAIAH", "VAUGHN", "GUS", "LOYD", "DIEGO", "ADOLPH", "NORRIS", "MILLARD", + "ROCCO", "GONZALO", "DERICK", "RODRIGO", "WILEY", "RIGOBERTO", "ALPHONSO", "TY", "NOE", "VERN", + "REED", "JEFFERSON", "ELVIS", "BERNARDO", "MAURICIO", "HIRAM", "DONOVAN", "BASIL", "RILEY", "NICKOLAS", + "MAYNARD", "SCOT", "VINCE", "QUINCY", "EDDY", "SEBASTIAN", "FEDERICO", "ULYSSES", "HERIBERTO", "DONNELL", + "COLE", "DAVIS", "GAVIN", "EMERY", "WARD", "ROMEO", "JAYSON", "DANTE", "CLEMENT", "COY", + "MAXWELL", "JARVIS", "BRUNO", "ISSAC", "DUDLEY", "BROCK", "SANFORD", "CARMELO", "BARNEY", "NESTOR", + "STEFAN", "DONNY", "ART", "LINWOOD", "BEAU", "WELDON", "GALEN", "ISIDRO", "TRUMAN", "DELMAR", + "JOHNATHON", "SILAS", "FREDERIC", "DICK", "IRWIN", "MERLIN", "CHARLEY", "MARCELINO", "HARRIS", "CARLO", + "TRENTON", "KURTIS", "HUNTER", "AURELIO", "WINFRED", "VITO", "COLLIN", "DENVER", "CARTER", "LEONEL", + "EMORY", "PASQUALE", "MOHAMMAD", "MARIANO", "DANIAL", "LANDON", "DIRK", "BRANDEN", "ADAN", "BUFORD", + "GERMAN", "WILMER", "EMERSON", "ZACHERY", "FLETCHER", "JACQUES", "ERROL", "DALTON", "MONROE", "JOSUE", + "EDWARDO", "BOOKER", "WILFORD", "SONNY", "SHELTON", "CARSON", "THERON", "RAYMUNDO", "DAREN", "HOUSTON", + "ROBBY", "LINCOLN", "GENARO", "BENNETT", "OCTAVIO", "CORNELL", "HUNG", "ARRON", "ANTONY", "HERSCHEL", + "GIOVANNI", "GARTH", "CYRUS", "CYRIL", "RONNY", "LON", "FREEMAN", "DUNCAN", "KENNITH", "CARMINE", + "ERICH", "CHADWICK", "WILBURN", "RUSS", "REID", "MYLES", "ANDERSON", "MORTON", "JONAS", "FOREST", + "MITCHEL", "MERVIN", "ZANE", "RICH", "JAMEL", "LAZARO", "ALPHONSE", "RANDELL", "MAJOR", "JARRETT", + "BROOKS", "ABDUL", "LUCIANO", "SEYMOUR", "EUGENIO", "MOHAMMED", "VALENTIN", "CHANCE", "ARNULFO", "LUCIEN", + "FERDINAND", "THAD", "EZRA", "ALDO", "RUBIN", "ROYAL", "MITCH", "EARLE", "ABE", "WYATT", + "MARQUIS", "LANNY", "KAREEM", "JAMAR", "BORIS", "ISIAH", "EMILE", "ELMO", "ARON", "LEOPOLDO", + "EVERETTE", "JOSEF", "ELOY", "RODRICK", "REINALDO", "LUCIO", "JERROD", "WESTON", "HERSHEL", "BARTON", + "PARKER", "LEMUEL", "BURT", "JULES", "GIL", "ELISEO", "AHMAD", "NIGEL", "EFREN", "ANTWAN", + "ALDEN", "MARGARITO", "COLEMAN", "DINO", "OSVALDO", "LES", "DEANDRE", "NORMAND", "KIETH", "TREY", + "NORBERTO", "NAPOLEON", "JEROLD", "FRITZ", "ROSENDO", "MILFORD", "CHRISTOPER", "ALFONZO", "LYMAN", "JOSIAH", + "BRANT", "WILTON", "RICO", "JAMAAL", "DEWITT", "BRENTON", "OLIN", "FOSTER", "FAUSTINO", "CLAUDIO", + "JUDSON", "GINO", "EDGARDO", "ALEC", "TANNER", "JARRED", "DONN", "TAD", "PRINCE", "PORFIRIO", + "ODIS", "LENARD", "CHAUNCEY", "TOD", "MEL", "MARCELO", "KORY", "AUGUSTUS", "KEVEN", "HILARIO", + "BUD", "SAL", "ORVAL", "MAURO", "ZACHARIAH", "OLEN", "ANIBAL", "MILO", "JED", "DILLON", + "AMADO", "NEWTON", "LENNY", "RICHIE", "HORACIO", "BRICE", "MOHAMED", "DELMER", "DARIO", "REYES", + "MAC", "JONAH", "JERROLD", "ROBT", "HANK", "RUPERT", "ROLLAND", "KENTON", "DAMION", "ANTONE", + "WALDO", "FREDRIC", "BRADLY", "KIP", "BURL", "WALKER", "TYREE", "JEFFEREY", "AHMED", "WILLY", + "STANFORD", "OREN", "NOBLE", "MOSHE", "MIKEL", "ENOCH", "BRENDON", "QUINTIN", "JAMISON", "FLORENCIO", + "DARRICK", "TOBIAS", "HASSAN", "GIUSEPPE", "DEMARCUS", "CLETUS", "TYRELL", "LYNDON", "KEENAN", "WERNER", + "GERALDO", "COLUMBUS", "CHET", "BERTRAM", "MARKUS", "HUEY", "HILTON", "DWAIN", "DONTE", "TYRON", + "OMER", "ISAIAS", "HIPOLITO", "FERMIN", "ADALBERTO", "BO", "BARRETT", "TEODORO", "MCKINLEY", "MAXIMO", + "GARFIELD", "RALEIGH", "LAWERENCE", "ABRAM", "RASHAD", "KING", "EMMITT", "DARON", "SAMUAL", "MIQUEL", + "EUSEBIO", "DOMENIC", "DARRON", "BUSTER", "WILBER", "RENATO", "JC", "HOYT", "HAYWOOD", "EZEKIEL", + "CHAS", "FLORENTINO", "ELROY", "CLEMENTE", "ARDEN", "NEVILLE", "EDISON", "DESHAWN", "NATHANIAL", "JORDON", + "DANILO", "CLAUD", "SHERWOOD", "RAYMON", "RAYFORD", "CRISTOBAL", "AMBROSE", "TITUS", "HYMAN", "FELTON", + "EZEQUIEL", "ERASMO", "STANTON", "LONNY", "LEN", "IKE", "MILAN", "LINO", "JAROD", "HERB", + "ANDREAS", "WALTON", "RHETT", "PALMER", "DOUGLASS", "CORDELL", "OSWALDO", "ELLSWORTH", "VIRGILIO", "TONEY", + "NATHANAEL", "DEL", "BENEDICT", "MOSE", "JOHNSON", "ISREAL", "GARRET", "FAUSTO", "ASA", "ARLEN", + "ZACK", "WARNER", "MODESTO", "FRANCESCO", "MANUAL", "GAYLORD", "GASTON", "FILIBERTO", "DEANGELO", "MICHALE", + "GRANVILLE", "WES", "MALIK", "ZACKARY", "TUAN", "ELDRIDGE", "CRISTOPHER", "CORTEZ", "ANTIONE", "MALCOM", + "LONG", "KOREY", "JOSPEH", "COLTON", "WAYLON", "VON", "HOSEA", "SHAD", "SANTO", "RUDOLF", + "ROLF", "REY", "RENALDO", "MARCELLUS", "LUCIUS", "KRISTOFER", "BOYCE", "BENTON", "HAYDEN", "HARLAND", + "ARNOLDO", "RUEBEN", "LEANDRO", "KRAIG", "JERRELL", "JEROMY", "HOBERT", "CEDRICK", "ARLIE", "WINFORD", + "WALLY", "LUIGI", "KENETH", "JACINTO", "GRAIG", "FRANKLYN", "EDMUNDO", "SID", "PORTER", "LEIF", + "JERAMY", "BUCK", "WILLIAN", "VINCENZO", "SHON", "LYNWOOD", "JERE", "HAI", "ELDEN", "DORSEY", + "DARELL", "BRODERICK", "ALONSO" + ] + total_sum = 0 + temp_sum = 0 + name.sort() + for i in range(len(name)): + for j in name[i]: + temp_sum += ord(j) - ord('A') + 1 + total_sum += (i + 1) * temp_sum + temp_sum = 0 + print(total_sum) if __name__ == '__main__': - main() + main() diff --git a/project_euler/problem_24/sol1.py b/project_euler/problem_24/sol1.py index b20493cb03af..347f778b2cba 100644 --- a/project_euler/problem_24/sol1.py +++ b/project_euler/problem_24/sol1.py @@ -1,7 +1,10 @@ from itertools import permutations + + def main(): - result=list(map("".join, permutations('0123456789'))) - print(result[999999]) + result = list(map("".join, permutations('0123456789'))) + print(result[999999]) + if __name__ == '__main__': - main() \ No newline at end of file + main() diff --git a/project_euler/problem_25/sol1.py b/project_euler/problem_25/sol1.py index f8cea3093dcf..54cd8e083e5f 100644 --- a/project_euler/problem_25/sol1.py +++ b/project_euler/problem_25/sol1.py @@ -1,31 +1,34 @@ from __future__ import print_function try: - xrange #Python 2 + xrange # Python 2 except NameError: - xrange = range #Python 3 + xrange = range # Python 3 + def fibonacci(n): - if n == 1 or type(n) is not int: - return 0 - elif n == 2: - return 1 - else: - sequence = [0, 1] - for i in xrange(2, n+1): - sequence.append(sequence[i-1] + sequence[i-2]) + if n == 1 or type(n) is not int: + return 0 + elif n == 2: + return 1 + else: + sequence = [0, 1] + for i in xrange(2, n + 1): + sequence.append(sequence[i - 1] + sequence[i - 2]) + + return sequence[n] - return sequence[n] def fibonacci_digits_index(n): - digits = 0 - index = 2 + digits = 0 + index = 2 + + while digits < n: + index += 1 + digits = len(str(fibonacci(index))) - while digits < n: - index += 1 - digits = len(str(fibonacci(index))) + return index - return index if __name__ == '__main__': - print(fibonacci_digits_index(1000)) \ No newline at end of file + print(fibonacci_digits_index(1000)) diff --git a/project_euler/problem_25/sol2.py b/project_euler/problem_25/sol2.py index 35147a9bfb14..6778bb08f0ce 100644 --- a/project_euler/problem_25/sol2.py +++ b/project_euler/problem_25/sol2.py @@ -1,10 +1,12 @@ def fibonacci_genrator(): - a, b = 0,1 - while True: - a,b = b,a+b - yield b + a, b = 0, 1 + while True: + a, b = b, a + b + yield b + + answer = 1 gen = fibonacci_genrator() while len(str(next(gen))) < 1000: - answer += 1 -assert answer+1 == 4782 + answer += 1 +assert answer + 1 == 4782 diff --git a/project_euler/problem_28/sol1.py b/project_euler/problem_28/sol1.py index 4942115ce537..b834a1881e9b 100644 --- a/project_euler/problem_28/sol1.py +++ b/project_euler/problem_28/sol1.py @@ -1,29 +1,32 @@ from __future__ import print_function + from math import ceil try: - xrange #Python 2 + xrange # Python 2 except NameError: - xrange = range #Python 3 + xrange = range # Python 3 + def diagonal_sum(n): - total = 1 + total = 1 - for i in xrange(1, int(ceil(n/2.0))): - odd = 2*i+1 - even = 2*i - total = total + 4*odd**2 - 6*even + for i in xrange(1, int(ceil(n / 2.0))): + odd = 2 * i + 1 + even = 2 * i + total = total + 4 * odd ** 2 - 6 * even + + return total - return total if __name__ == '__main__': - import sys - - if len(sys.argv) == 1: - print(diagonal_sum(1001)) - else: - try: - n = int(sys.argv[1]) - diagonal_sum(n) - except ValueError: - print('Invalid entry - please enter a number') \ No newline at end of file + import sys + + if len(sys.argv) == 1: + print(diagonal_sum(1001)) + else: + try: + n = int(sys.argv[1]) + diagonal_sum(n) + except ValueError: + print('Invalid entry - please enter a number') diff --git a/project_euler/problem_29/solution.py b/project_euler/problem_29/solution.py index 64d35c84d9ca..b336059b78d4 100644 --- a/project_euler/problem_29/solution.py +++ b/project_euler/problem_29/solution.py @@ -19,12 +19,12 @@ def main(): currentPow = 0 - N = 101 # maximum limit + N = 101 # maximum limit for a in range(2, N): for b in range(2, N): - currentPow = a**b # calculates the current power - collectPowers.add(currentPow) # adds the result to the set + currentPow = a ** b # calculates the current power + collectPowers.add(currentPow) # adds the result to the set print("Number of terms ", len(collectPowers)) diff --git a/project_euler/problem_31/sol1.py b/project_euler/problem_31/sol1.py index 33653722f890..dc1a1f62b7e6 100644 --- a/project_euler/problem_31/sol1.py +++ b/project_euler/problem_31/sol1.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- from __future__ import print_function + try: - raw_input # Python 2 + raw_input # Python 2 except NameError: raw_input = input # Python 3 ''' diff --git a/project_euler/problem_36/sol1.py b/project_euler/problem_36/sol1.py index d78e7e59f210..51ce68326319 100644 --- a/project_euler/problem_36/sol1.py +++ b/project_euler/problem_36/sol1.py @@ -1,4 +1,5 @@ from __future__ import print_function + ''' Double-base palindromes Problem 36 @@ -9,22 +10,24 @@ (Please note that the palindromic number, in either base, may not include leading zeros.) ''' try: - xrange #Python 2 + xrange # Python 2 except NameError: - xrange = range #Python 3 + xrange = range # Python 3 + def is_palindrome(n): - n = str(n) + n = str(n) + + if n == n[::-1]: + return True + else: + return False - if n == n[::-1]: - return True - else: - return False total = 0 for i in xrange(1, 1000000): - if is_palindrome(i) and is_palindrome(bin(i).split('b')[1]): - total += i + if is_palindrome(i) and is_palindrome(bin(i).split('b')[1]): + total += i -print(total) \ No newline at end of file +print(total) diff --git a/project_euler/problem_40/sol1.py b/project_euler/problem_40/sol1.py index ab4017512a1a..cbf90443f538 100644 --- a/project_euler/problem_40/sol1.py +++ b/project_euler/problem_40/sol1.py @@ -1,5 +1,6 @@ -#-.- coding: latin-1 -.- +# -.- coding: latin-1 -.- from __future__ import print_function + ''' Champernowne's constant Problem 40 @@ -18,9 +19,9 @@ i = 1 while len(constant) < 1e6: - constant.append(str(i)) - i += 1 + constant.append(str(i)) + i += 1 constant = ''.join(constant) -print(int(constant[0])*int(constant[9])*int(constant[99])*int(constant[999])*int(constant[9999])*int(constant[99999])*int(constant[999999])) \ No newline at end of file +print(int(constant[0]) * int(constant[9]) * int(constant[99]) * int(constant[999]) * int(constant[9999]) * int(constant[99999]) * int(constant[999999])) diff --git a/project_euler/problem_48/sol1.py b/project_euler/problem_48/sol1.py index 5c4bdb0f6384..96c6c884377d 100644 --- a/project_euler/problem_48/sol1.py +++ b/project_euler/problem_48/sol1.py @@ -1,4 +1,5 @@ from __future__ import print_function + ''' Self Powers Problem 48 @@ -9,13 +10,12 @@ ''' try: - xrange + xrange except NameError: - xrange = range + xrange = range total = 0 for i in xrange(1, 1001): - total += i**i - + total += i ** i -print(str(total)[-10:]) \ No newline at end of file +print(str(total)[-10:]) diff --git a/project_euler/problem_52/sol1.py b/project_euler/problem_52/sol1.py index 376b4cfa1d63..b01e1dca8230 100644 --- a/project_euler/problem_52/sol1.py +++ b/project_euler/problem_52/sol1.py @@ -1,4 +1,5 @@ from __future__ import print_function + ''' Permuted multiples Problem 52 @@ -10,14 +11,14 @@ i = 1 while True: - if sorted(list(str(i))) == \ - sorted(list(str(2*i))) == \ - sorted(list(str(3*i))) == \ - sorted(list(str(4*i))) == \ - sorted(list(str(5*i))) == \ - sorted(list(str(6*i))): - break + if sorted(list(str(i))) == \ + sorted(list(str(2 * i))) == \ + sorted(list(str(3 * i))) == \ + sorted(list(str(4 * i))) == \ + sorted(list(str(5 * i))) == \ + sorted(list(str(6 * i))): + break - i += 1 + i += 1 -print(i) \ No newline at end of file +print(i) diff --git a/project_euler/problem_53/sol1.py b/project_euler/problem_53/sol1.py index ed6d5329eb4e..74107eb92ff0 100644 --- a/project_euler/problem_53/sol1.py +++ b/project_euler/problem_53/sol1.py @@ -1,6 +1,8 @@ -#-.- coding: latin-1 -.- +# -.- coding: latin-1 -.- from __future__ import print_function + from math import factorial + ''' Combinatoric selections Problem 53 @@ -19,18 +21,20 @@ How many, not necessarily distinct, values of nCr, for 1 ≤ n ≤ 100, are greater than one-million? ''' try: - xrange #Python 2 + xrange # Python 2 except NameError: - xrange = range #Python 3 + xrange = range # Python 3 + def combinations(n, r): - return factorial(n)/(factorial(r)*factorial(n-r)) + return factorial(n) / (factorial(r) * factorial(n - r)) + total = 0 for i in xrange(1, 101): - for j in xrange(1, i+1): - if combinations(i, j) > 1e6: - total += 1 + for j in xrange(1, i + 1): + if combinations(i, j) > 1e6: + total += 1 -print(total) \ No newline at end of file +print(total) diff --git a/project_euler/problem_76/sol1.py b/project_euler/problem_76/sol1.py index 2832f6d7afb6..15528eeeea0f 100644 --- a/project_euler/problem_76/sol1.py +++ b/project_euler/problem_76/sol1.py @@ -1,4 +1,5 @@ from __future__ import print_function + ''' Counting Summations Problem 76 @@ -15,21 +16,23 @@ How many different ways can one hundred be written as a sum of at least two positive integers? ''' try: - xrange #Python 2 + xrange # Python 2 except NameError: - xrange = range #Python 3 + xrange = range # Python 3 + def partition(m): - memo = [[0 for _ in xrange(m)] for _ in xrange(m+1)] - for i in xrange(m+1): - memo[i][0] = 1 + memo = [[0 for _ in xrange(m)] for _ in xrange(m + 1)] + for i in xrange(m + 1): + memo[i][0] = 1 + + for n in xrange(m + 1): + for k in xrange(1, m): + memo[n][k] += memo[n][k - 1] + if n > k: + memo[n][k] += memo[n - k - 1][k] - for n in xrange(m+1): - for k in xrange(1, m): - memo[n][k] += memo[n][k-1] - if n > k: - memo[n][k] += memo[n-k-1][k] + return (memo[m][m - 1] - 1) - return (memo[m][m-1] - 1) -print(partition(100)) \ No newline at end of file +print(partition(100)) diff --git a/searches/binary_search.py b/searches/binary_search.py index 1d5da96586cd..4de3741a15bd 100644 --- a/searches/binary_search.py +++ b/searches/binary_search.py @@ -10,10 +10,11 @@ python binary_search.py """ from __future__ import print_function + import bisect try: - raw_input # Python 2 + raw_input # Python 2 except NameError: raw_input = input # Python 3 @@ -85,8 +86,8 @@ def binary_search_std_lib(sorted_collection, item): return index return None -def binary_search_by_recursion(sorted_collection, item, left, right): +def binary_search_by_recursion(sorted_collection, item, left, right): """Pure implementation of binary search algorithm in Python by recursion Be careful collection must be ascending sorted, otherwise result will be @@ -112,16 +113,17 @@ def binary_search_by_recursion(sorted_collection, item, left, right): """ if (right < left): return None - + midpoint = left + (right - left) // 2 if sorted_collection[midpoint] == item: return midpoint elif sorted_collection[midpoint] > item: - return binary_search_by_recursion(sorted_collection, item, left, midpoint-1) + return binary_search_by_recursion(sorted_collection, item, left, midpoint - 1) else: - return binary_search_by_recursion(sorted_collection, item, midpoint+1, right) - + return binary_search_by_recursion(sorted_collection, item, midpoint + 1, right) + + def __assert_sorted(collection): """Check if collection is ascending sorted, if not - raises :py:class:`ValueError` @@ -145,6 +147,7 @@ def __assert_sorted(collection): if __name__ == '__main__': import sys + user_input = raw_input('Enter numbers separated by comma:\n').strip() collection = [int(item) for item in user_input.split(',')] try: diff --git a/searches/interpolation_search.py b/searches/interpolation_search.py index 329596d340a5..db2693ac87a2 100644 --- a/searches/interpolation_search.py +++ b/searches/interpolation_search.py @@ -4,7 +4,7 @@ from __future__ import print_function try: - raw_input # Python 2 + raw_input # Python 2 except NameError: raw_input = input # Python 3 @@ -21,38 +21,38 @@ def interpolation_search(sorted_collection, item): right = len(sorted_collection) - 1 while left <= right: - #avoid devided by 0 during interpolation - if sorted_collection[left]==sorted_collection[right]: - if sorted_collection[left]==item: + # avoid devided by 0 during interpolation + if sorted_collection[left] == sorted_collection[right]: + if sorted_collection[left] == item: return left else: return None point = left + ((item - sorted_collection[left]) * (right - left)) // (sorted_collection[right] - sorted_collection[left]) - - #out of range check - if point<0 or point>=len(sorted_collection): + + # out of range check + if point < 0 or point >= len(sorted_collection): return None current_item = sorted_collection[point] if current_item == item: return point else: - if pointright: - left = right + elif point > right: + left = right right = point - else: + else: if item < current_item: right = point - 1 else: left = point + 1 return None -def interpolation_search_by_recursion(sorted_collection, item, left, right): +def interpolation_search_by_recursion(sorted_collection, item, left, right): """Pure implementation of interpolation search algorithm in Python by recursion Be careful collection must be ascending sorted, otherwise result will be unpredictable @@ -62,31 +62,32 @@ def interpolation_search_by_recursion(sorted_collection, item, left, right): :return: index of found item or None if item is not found """ - #avoid devided by 0 during interpolation - if sorted_collection[left]==sorted_collection[right]: - if sorted_collection[left]==item: + # avoid devided by 0 during interpolation + if sorted_collection[left] == sorted_collection[right]: + if sorted_collection[left] == item: return left else: return None point = left + ((item - sorted_collection[left]) * (right - left)) // (sorted_collection[right] - sorted_collection[left]) - - #out of range check - if point<0 or point>=len(sorted_collection): + + # out of range check + if point < 0 or point >= len(sorted_collection): return None if sorted_collection[point] == item: return point - elif pointright: + elif point > right: return interpolation_search_by_recursion(sorted_collection, item, right, left) else: if sorted_collection[point] > item: - return interpolation_search_by_recursion(sorted_collection, item, left, point-1) + return interpolation_search_by_recursion(sorted_collection, item, left, point - 1) else: - return interpolation_search_by_recursion(sorted_collection, item, point+1, right) - + return interpolation_search_by_recursion(sorted_collection, item, point + 1, right) + + def __assert_sorted(collection): """Check if collection is ascending sorted, if not - raises :py:class:`ValueError` :param collection: collection @@ -107,7 +108,7 @@ def __assert_sorted(collection): if __name__ == '__main__': import sys - + """ user_input = raw_input('Enter numbers separated by comma:\n').strip() collection = [int(item) for item in user_input.split(',')] @@ -122,13 +123,13 @@ def __assert_sorted(collection): debug = 0 if debug == 1: - collection = [10,30,40,45,50,66,77,93] + collection = [10, 30, 40, 45, 50, 66, 77, 93] try: __assert_sorted(collection) except ValueError: sys.exit('Sequence must be ascending sorted to apply interpolation search') target = 67 - + result = interpolation_search(collection, target) if result is not None: print('{} found at positions: {}'.format(target, result)) diff --git a/searches/jump_search.py b/searches/jump_search.py index 10cb933f2f35..c01437fa3cce 100644 --- a/searches/jump_search.py +++ b/searches/jump_search.py @@ -1,10 +1,13 @@ from __future__ import print_function + import math + + def jump_search(arr, x): n = len(arr) step = int(math.floor(math.sqrt(n))) prev = 0 - while arr[min(step, n)-1] < x: + while arr[min(step, n) - 1] < x: prev = step step += int(math.floor(math.sqrt(n))) if prev >= n: @@ -19,8 +22,7 @@ def jump_search(arr, x): return -1 - -arr = [ 0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610] +arr = [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610] x = 55 index = jump_search(arr, x) -print("\nNumber " + str(x) +" is at index " + str(index)); +print("\nNumber " + str(x) + " is at index " + str(index)); diff --git a/searches/linear_search.py b/searches/linear_search.py index 058322f21d09..6a9abb887fc7 100644 --- a/searches/linear_search.py +++ b/searches/linear_search.py @@ -12,10 +12,11 @@ from __future__ import print_function try: - raw_input # Python 2 + raw_input # Python 2 except NameError: raw_input = input # Python 3 + def linear_search(sequence, target): """Pure implementation of linear search algorithm in Python diff --git a/searches/quick_select.py b/searches/quick_select.py index 76d09cb97f97..6b70562bd78f 100644 --- a/searches/quick_select.py +++ b/searches/quick_select.py @@ -4,6 +4,8 @@ A python implementation of the quick select algorithm, which is efficient for calculating the value that would appear in the index of a list if it would be sorted, even if it is not already sorted https://en.wikipedia.org/wiki/Quickselect """ + + def _partition(data, pivot): """ Three way partition the data into smaller, equal and greater lists, @@ -21,29 +23,30 @@ def _partition(data, pivot): else: equal.append(element) return less, equal, greater - + + def quickSelect(list, k): - #k = len(list) // 2 when trying to find the median (index that value would be when list is sorted) - - #invalid input - if k>=len(list) or k<0: + # k = len(list) // 2 when trying to find the median (index that value would be when list is sorted) + + # invalid input + if k >= len(list) or k < 0: return None - + smaller = [] larger = [] pivot = random.randint(0, len(list) - 1) pivot = list[pivot] count = 0 - smaller, equal, larger =_partition(list, pivot) + smaller, equal, larger = _partition(list, pivot) count = len(equal) m = len(smaller) - #k is the pivot + # k is the pivot if m <= k < m + count: return pivot # must be in smaller elif m > k: return quickSelect(smaller, k) - #must be in larger + # must be in larger else: - return quickSelect(larger, k - (m + count)) \ No newline at end of file + return quickSelect(larger, k - (m + count)) diff --git a/searches/sentinel_linear_search.py b/searches/sentinel_linear_search.py index 336cc5ab3b74..c5e5ebe490fb 100644 --- a/searches/sentinel_linear_search.py +++ b/searches/sentinel_linear_search.py @@ -10,6 +10,7 @@ python sentinel_linear_search.py """ + def sentinel_linear_search(sequence, target): """Pure implementation of sentinel linear search algorithm in Python @@ -46,7 +47,7 @@ def sentinel_linear_search(sequence, target): if __name__ == '__main__': try: - raw_input # Python 2 + raw_input # Python 2 except NameError: raw_input = input # Python 3 @@ -59,4 +60,4 @@ def sentinel_linear_search(sequence, target): if result is not None: print('{} found at positions: {}'.format(target, result)) else: - print('Not found') \ No newline at end of file + print('Not found') diff --git a/searches/tabu_search.py b/searches/tabu_search.py index e21ddd53cc78..16052f6f6b02 100644 --- a/searches/tabu_search.py +++ b/searches/tabu_search.py @@ -23,8 +23,8 @@ e.g. python tabu_search.py -f tabudata2.txt -i 4 -s 3 """ -import copy import argparse +import copy import sys diff --git a/searches/ternary_search.py b/searches/ternary_search.py index c610f9b3c6da..8089d82dd5a5 100644 --- a/searches/ternary_search.py +++ b/searches/ternary_search.py @@ -11,7 +11,7 @@ import sys try: - raw_input # Python 2 + raw_input # Python 2 except NameError: raw_input = input # Python 3 @@ -19,66 +19,70 @@ # It is recommended for users to keep this number greater than or equal to 10. precision = 10 + # This is the linear search that will occur after the search space has become smaller. def lin_search(left, right, A, target): - for i in range(left, right+1): - if(A[i] == target): + for i in range(left, right + 1): + if (A[i] == target): return i + # This is the iterative method of the ternary search algorithm. def ite_ternary_search(A, target): left = 0 right = len(A) - 1; - while(True): - if(left collection[j+1]: + for j in range(length - 1 - i): + if collection[j] > collection[j + 1]: swapped = True - collection[j], collection[j+1] = collection[j+1], collection[j] + collection[j], collection[j + 1] = collection[j + 1], collection[j] if not swapped: break # Stop iteration if the collection is sorted. return collection if __name__ == '__main__': try: - raw_input # Python 2 + raw_input # Python 2 except NameError: raw_input = input # Python 3 user_input = raw_input('Enter numbers separated by a comma:').strip() diff --git a/sorts/bucket_sort.py b/sorts/bucket_sort.py index c4d61874fc47..8e7fb98f782a 100644 --- a/sorts/bucket_sort.py +++ b/sorts/bucket_sort.py @@ -13,7 +13,8 @@ # Time Complexity of Solution: # Best Case O(n); Average Case O(n); Worst Case O(n) -DEFAULT_BUCKET_SIZE=5 +DEFAULT_BUCKET_SIZE = 5 + def bucket_sort(my_list, bucket_size=DEFAULT_BUCKET_SIZE): if len(my_list) == 0: @@ -27,9 +28,10 @@ def bucket_sort(my_list, bucket_size=DEFAULT_BUCKET_SIZE): buckets[int((my_list[i] - min_value) // bucket_size)].append(my_list[i]) return sorted([buckets[i][j] for i in range(len(buckets)) - for j in range(len(buckets[i]))]) + for j in range(len(buckets[i]))]) + if __name__ == "__main__": user_input = input('Enter numbers separated by a comma:').strip() unsorted = [float(n) for n in user_input.split(',') if len(user_input) > 0] - print(bucket_sort(unsorted)) \ No newline at end of file + print(bucket_sort(unsorted)) diff --git a/sorts/cocktail_shaker_sort.py b/sorts/cocktail_shaker_sort.py index 8ad3383bbe9f..370ba2e443d7 100644 --- a/sorts/cocktail_shaker_sort.py +++ b/sorts/cocktail_shaker_sort.py @@ -1,31 +1,33 @@ from __future__ import print_function + def cocktail_shaker_sort(unsorted): """ Pure implementation of the cocktail shaker sort algorithm in Python. """ - for i in range(len(unsorted)-1, 0, -1): + for i in range(len(unsorted) - 1, 0, -1): swapped = False - + for j in range(i, 0, -1): - if unsorted[j] < unsorted[j-1]: - unsorted[j], unsorted[j-1] = unsorted[j-1], unsorted[j] + if unsorted[j] < unsorted[j - 1]: + unsorted[j], unsorted[j - 1] = unsorted[j - 1], unsorted[j] swapped = True for j in range(i): - if unsorted[j] > unsorted[j+1]: - unsorted[j], unsorted[j+1] = unsorted[j+1], unsorted[j] + if unsorted[j] > unsorted[j + 1]: + unsorted[j], unsorted[j + 1] = unsorted[j + 1], unsorted[j] swapped = True - + if not swapped: return unsorted - + + if __name__ == '__main__': try: - raw_input # Python 2 + raw_input # Python 2 except NameError: raw_input = input # Python 3 - + user_input = raw_input('Enter numbers separated by a comma:\n').strip() unsorted = [int(item) for item in user_input.split(',')] cocktail_shaker_sort(unsorted) diff --git a/sorts/comb_sort.py b/sorts/comb_sort.py index 22b6f66f04cc..deed2a0f4c27 100644 --- a/sorts/comb_sort.py +++ b/sorts/comb_sort.py @@ -12,6 +12,7 @@ python comb_sort.py """ + def comb_sort(data): """Pure implementation of comb sort algorithm in Python :param collection: some mutable ordered collection with heterogeneous @@ -38,9 +39,9 @@ def comb_sort(data): i = 0 while gap + i < len(data): - if data[i] > data[i+gap]: + if data[i] > data[i + gap]: # Swap values - data[i], data[i+gap] = data[i+gap], data[i] + data[i], data[i + gap] = data[i + gap], data[i] swapped = True i += 1 @@ -49,7 +50,7 @@ def comb_sort(data): if __name__ == '__main__': try: - raw_input # Python 2 + raw_input # Python 2 except NameError: raw_input = input # Python 3 diff --git a/sorts/counting_sort.py b/sorts/counting_sort.py index ad98f1a0da4c..8acd1a395208 100644 --- a/sorts/counting_sort.py +++ b/sorts/counting_sort.py @@ -44,7 +44,7 @@ def counting_sort(collection): # sum each position with it's predecessors. now, counting_arr[i] tells # us how many elements <= i has in the collection for i in range(1, counting_arr_length): - counting_arr[i] = counting_arr[i] + counting_arr[i-1] + counting_arr[i] = counting_arr[i] + counting_arr[i - 1] # create the output collection ordered = [0] * coll_len @@ -52,11 +52,12 @@ def counting_sort(collection): # place the elements in the output, respecting the original order (stable # sort) from end to begin, updating counting_arr for i in reversed(range(0, coll_len)): - ordered[counting_arr[collection[i] - coll_min]-1] = collection[i] + ordered[counting_arr[collection[i] - coll_min] - 1] = collection[i] counting_arr[collection[i] - coll_min] -= 1 return ordered + def counting_sort_string(string): return ''.join([chr(i) for i in counting_sort([ord(c) for c in string])]) @@ -66,7 +67,7 @@ def counting_sort_string(string): assert "eghhiiinrsssttt" == counting_sort_string("thisisthestring") try: - raw_input # Python 2 + raw_input # Python 2 except NameError: raw_input = input # Python 3 diff --git a/sorts/cycle_sort.py b/sorts/cycle_sort.py index 492022164427..036523b0a34c 100644 --- a/sorts/cycle_sort.py +++ b/sorts/cycle_sort.py @@ -46,10 +46,10 @@ def cycle_sort(array): # Main Code starts here if __name__ == '__main__': try: - raw_input # Python 2 + raw_input # Python 2 except NameError: raw_input = input # Python 3 - + user_input = raw_input('Enter numbers separated by a comma:\n') unsorted = [int(item) for item in user_input.split(',')] n = len(unsorted) diff --git a/sorts/external_sort.py b/sorts/external_sort.py index 1638e9efafee..430e10e040a6 100644 --- a/sorts/external_sort.py +++ b/sorts/external_sort.py @@ -1,10 +1,11 @@ #!/usr/bin/env python +import argparse # # Sort large text files in a minimum amount of memory # import os -import argparse + class FileSplitter(object): BLOCK_FILENAME_FORMAT = 'block_{0}.dat' @@ -106,7 +107,6 @@ def get_file_handles(self, filenames, buffer_size): return files - class ExternalSort(object): def __init__(self, block_size): self.block_size = block_size @@ -137,7 +137,6 @@ def parse_memory(string): return int(string) - def main(): parser = argparse.ArgumentParser() parser.add_argument('-m', diff --git a/sorts/gnome_sort.py b/sorts/gnome_sort.py index 2927b097f11d..a8061b4ac261 100644 --- a/sorts/gnome_sort.py +++ b/sorts/gnome_sort.py @@ -1,29 +1,31 @@ from __future__ import print_function + def gnome_sort(unsorted): """ Pure implementation of the gnome sort algorithm in Python. """ if len(unsorted) <= 1: return unsorted - + i = 1 - + while i < len(unsorted): - if unsorted[i-1] <= unsorted[i]: + if unsorted[i - 1] <= unsorted[i]: i += 1 else: - unsorted[i-1], unsorted[i] = unsorted[i], unsorted[i-1] + unsorted[i - 1], unsorted[i] = unsorted[i], unsorted[i - 1] i -= 1 if (i == 0): i = 1 - + + if __name__ == '__main__': try: - raw_input # Python 2 + raw_input # Python 2 except NameError: raw_input = input # Python 3 - + user_input = raw_input('Enter numbers separated by a comma:\n').strip() unsorted = [int(item) for item in user_input.split(',')] gnome_sort(unsorted) diff --git a/sorts/heap_sort.py b/sorts/heap_sort.py index 3c72abca8059..8846f2ded122 100644 --- a/sorts/heap_sort.py +++ b/sorts/heap_sort.py @@ -53,9 +53,10 @@ def heap_sort(unsorted): heapify(unsorted, 0, i) return unsorted + if __name__ == '__main__': try: - raw_input # Python 2 + raw_input # Python 2 except NameError: raw_input = input # Python 3 diff --git a/sorts/insertion_sort.py b/sorts/insertion_sort.py index e088705947d4..4278096ef907 100644 --- a/sorts/insertion_sort.py +++ b/sorts/insertion_sort.py @@ -41,7 +41,7 @@ def insertion_sort(collection): if __name__ == '__main__': try: - raw_input # Python 2 + raw_input # Python 2 except NameError: raw_input = input # Python 3 diff --git a/sorts/merge_sort.py b/sorts/merge_sort.py index ecbad7075119..fe38884e5004 100644 --- a/sorts/merge_sort.py +++ b/sorts/merge_sort.py @@ -29,6 +29,7 @@ def merge_sort(collection): >>> merge_sort([-2, -5, -45]) [-45, -5, -2] """ + def merge(left, right): '''merge left and right :param left: left collection @@ -39,6 +40,7 @@ def merge(left, right): while left and right: result.append(left.pop(0) if left[0] <= right[0] else right.pop(0)) return result + left + right + if len(collection) <= 1: return collection mid = len(collection) // 2 @@ -47,10 +49,10 @@ def merge(left, right): if __name__ == '__main__': try: - raw_input # Python 2 + raw_input # Python 2 except NameError: raw_input = input # Python 3 user_input = raw_input('Enter numbers separated by a comma:\n').strip() unsorted = [int(item) for item in user_input.split(',')] - print(*merge_sort(unsorted), sep=',') \ No newline at end of file + print(*merge_sort(unsorted), sep=',') diff --git a/sorts/merge_sort_fastest.py b/sorts/merge_sort_fastest.py index bd356c935ca0..878a0fb3788c 100644 --- a/sorts/merge_sort_fastest.py +++ b/sorts/merge_sort_fastest.py @@ -37,7 +37,7 @@ def merge_sort(collection): if __name__ == '__main__': try: - raw_input # Python 2 + raw_input # Python 2 except NameError: raw_input = input # Python 3 diff --git a/sorts/pancake_sort.py b/sorts/pancake_sort.py index 478a9a967d27..1bf1e1ba0023 100644 --- a/sorts/pancake_sort.py +++ b/sorts/pancake_sort.py @@ -7,11 +7,12 @@ def pancake_sort(arr): # Find the maximum number in arr mi = arr.index(max(arr[0:cur])) # Reverse from 0 to mi - arr = arr[mi::-1] + arr[mi+1:len(arr)] + arr = arr[mi::-1] + arr[mi + 1:len(arr)] # Reverse whole list - arr = arr[cur-1::-1] + arr[cur:len(arr)] + arr = arr[cur - 1::-1] + arr[cur:len(arr)] cur -= 1 return arr + if __name__ == '__main__': - print(pancake_sort([0,10,15,3,2,9,14,13])) + print(pancake_sort([0, 10, 15, 3, 2, 9, 14, 13])) diff --git a/sorts/pigeon_sort.py b/sorts/pigeon_sort.py index 65eb8896ea9c..a55304a0d832 100644 --- a/sorts/pigeon_sort.py +++ b/sorts/pigeon_sort.py @@ -4,32 +4,36 @@ from __future__ import print_function + def pigeon_sort(array): # Manually finds the minimum and maximum of the array. min = array[0] max = array[0] for i in range(len(array)): - if(array[i] < min): min = array[i] - elif(array[i] > max): max = array[i] + if (array[i] < min): + min = array[i] + elif (array[i] > max): + max = array[i] # Compute the variables - holes_range = max-min + 1 + holes_range = max - min + 1 holes = [0 for _ in range(holes_range)] holes_repeat = [0 for _ in range(holes_range)] # Make the sorting. for i in range(len(array)): index = array[i] - min - if(holes[index] != array[i]): + if (holes[index] != array[i]): holes[index] = array[i] holes_repeat[index] += 1 - else: holes_repeat[index] += 1 + else: + holes_repeat[index] += 1 # Makes the array back by replacing the numbers. index = 0 for i in range(holes_range): - while(holes_repeat[i] > 0): + while (holes_repeat[i] > 0): array[index] = holes[i] index += 1 holes_repeat[i] -= 1 @@ -37,12 +41,13 @@ def pigeon_sort(array): # Returns the sorted array. return array + if __name__ == '__main__': try: - raw_input # Python2 + raw_input # Python2 except NameError: - raw_input = input # Python 3 - + raw_input = input # Python 3 + user_input = raw_input('Enter numbers separated by comma:\n') unsorted = [int(x) for x in user_input.split(',')] sorted = pigeon_sort(unsorted) diff --git a/sorts/quick_sort.py b/sorts/quick_sort.py index 223c26fde1fe..c77aa76b28f4 100644 --- a/sorts/quick_sort.py +++ b/sorts/quick_sort.py @@ -49,10 +49,10 @@ def quick_sort(collection): if __name__ == '__main__': try: - raw_input # Python 2 + raw_input # Python 2 except NameError: raw_input = input # Python 3 user_input = raw_input('Enter numbers separated by a comma:\n').strip() - unsorted = [ int(item) for item in user_input.split(',') ] - print( quick_sort(unsorted) ) + unsorted = [int(item) for item in user_input.split(',')] + print(quick_sort(unsorted)) diff --git a/sorts/quick_sort_3_partition.py b/sorts/quick_sort_3_partition.py index def646cdbc50..6207da1e7cd8 100644 --- a/sorts/quick_sort_3_partition.py +++ b/sorts/quick_sort_3_partition.py @@ -1,5 +1,6 @@ from __future__ import print_function + def quick_sort_3partition(sorting, left, right): if right <= left: return @@ -19,13 +20,14 @@ def quick_sort_3partition(sorting, left, right): quick_sort_3partition(sorting, left, a - 1) quick_sort_3partition(sorting, b + 1, right) + if __name__ == '__main__': try: - raw_input # Python 2 + raw_input # Python 2 except NameError: raw_input = input # Python 3 user_input = raw_input('Enter numbers separated by a comma:\n').strip() - unsorted = [ int(item) for item in user_input.split(',') ] - quick_sort_3partition(unsorted,0,len(unsorted)-1) + unsorted = [int(item) for item in user_input.split(',')] + quick_sort_3partition(unsorted, 0, len(unsorted) - 1) print(unsorted) diff --git a/sorts/radix_sort.py b/sorts/radix_sort.py index 8dfc66b17b23..2990247a0ac0 100644 --- a/sorts/radix_sort.py +++ b/sorts/radix_sort.py @@ -6,21 +6,21 @@ def radix_sort(lst): max_digit = max(lst) while placement < max_digit: - # declare and initialize buckets - buckets = [list() for _ in range( RADIX )] + # declare and initialize buckets + buckets = [list() for _ in range(RADIX)] - # split lst between lists - for i in lst: - tmp = int((i / placement) % RADIX) - buckets[tmp].append(i) + # split lst between lists + for i in lst: + tmp = int((i / placement) % RADIX) + buckets[tmp].append(i) - # empty lists into lst array - a = 0 - for b in range( RADIX ): - buck = buckets[b] - for i in buck: - lst[a] = i - a += 1 + # empty lists into lst array + a = 0 + for b in range(RADIX): + buck = buckets[b] + for i in buck: + lst[a] = i + a += 1 - # move to next - placement *= RADIX + # move to next + placement *= RADIX diff --git a/sorts/random_normal_distribution_quicksort.py b/sorts/random_normal_distribution_quicksort.py index dfa37da61e26..432eed6b8d84 100644 --- a/sorts/random_normal_distribution_quicksort.py +++ b/sorts/random_normal_distribution_quicksort.py @@ -1,66 +1,60 @@ from __future__ import print_function + from random import randint from tempfile import TemporaryFile -import numpy as np +import numpy as np -def _inPlaceQuickSort(A,start,end): +def _inPlaceQuickSort(A, start, end): count = 0 - if start item: @@ -23,7 +25,7 @@ def insertion_sort(lst): for index in range(1, length): value = lst[index] pos = binary_search(lst, value, 0, index - 1) - lst = lst[:pos] + [value] + lst[pos:index] + lst[index+1:] + lst = lst[:pos] + [value] + lst[pos:index] + lst[index + 1:] return lst @@ -73,10 +75,10 @@ def tim_sort(lst): def main(): - - lst = [5,9,10,3,-4,5,178,92,46,-18,0,7] + lst = [5, 9, 10, 3, -4, 5, 178, 92, 46, -18, 0, 7] sorted_lst = tim_sort(lst) print(sorted_lst) + if __name__ == '__main__': main() diff --git a/sorts/topological_sort.py b/sorts/topological_sort.py index db4dd250a119..b2ec3dc28a8d 100644 --- a/sorts/topological_sort.py +++ b/sorts/topological_sort.py @@ -1,4 +1,5 @@ from __future__ import print_function + # a # / \ # b c @@ -28,6 +29,7 @@ def topological_sort(start, visited, sort): # return sort return sort + if __name__ == '__main__': sort = topological_sort('a', [], []) print(sort) diff --git a/sorts/tree_sort.py b/sorts/tree_sort.py index d06b0de28e56..07f93e50251a 100644 --- a/sorts/tree_sort.py +++ b/sorts/tree_sort.py @@ -5,10 +5,10 @@ class node(): # BST data structure def __init__(self, val): self.val = val - self.left = None - self.right = None - - def insert(self,val): + self.left = None + self.right = None + + def insert(self, val): if self.val: if val < self.val: if self.left is None: @@ -23,24 +23,27 @@ def insert(self,val): else: self.val = val + def inorder(root, res): # Recursive travesal if root: - inorder(root.left,res) + inorder(root.left, res) res.append(root.val) - inorder(root.right,res) + inorder(root.right, res) + def tree_sort(arr): # Build BST if len(arr) == 0: return arr root = node(arr[0]) - for i in range(1,len(arr)): + for i in range(1, len(arr)): root.insert(arr[i]) # Traverse BST in order. res = [] - inorder(root,res) + inorder(root, res) return res + if __name__ == '__main__': - print(tree_sort([10,1,3,2,9,14,13])) + print(tree_sort([10, 1, 3, 2, 9, 14, 13])) diff --git a/sorts/wiggle_sort.py b/sorts/wiggle_sort.py index 0d4f20e3f96b..d8349382601e 100644 --- a/sorts/wiggle_sort.py +++ b/sorts/wiggle_sort.py @@ -4,14 +4,17 @@ if input numbers = [3, 5, 2, 1, 6, 4] one possible Wiggle Sorted answer is [3, 5, 1, 6, 2, 4]. """ + + def wiggle_sort(nums): for i in range(len(nums)): - if (i % 2 == 1) == (nums[i-1] > nums[i]): - nums[i-1], nums[i] = nums[i], nums[i-1] + if (i % 2 == 1) == (nums[i - 1] > nums[i]): + nums[i - 1], nums[i] = nums[i], nums[i - 1] + if __name__ == '__main__': print("Enter the array elements:\n") - array=list(map(int,input().split())) + array = list(map(int, input().split())) print("The unsorted array is:\n") print(array) wiggle_sort(array) diff --git a/strings/knuth_morris_pratt.py b/strings/knuth_morris_pratt.py index 4553944284be..742479f89886 100644 --- a/strings/knuth_morris_pratt.py +++ b/strings/knuth_morris_pratt.py @@ -46,7 +46,7 @@ def get_failure_array(pattern): if pattern[i] == pattern[j]: i += 1 elif i > 0: - i = failure[i-1] + i = failure[i - 1] continue j += 1 failure.append(i) diff --git a/strings/levenshtein_distance.py b/strings/levenshtein_distance.py index 274dfd7ccf9b..326f1d701acb 100644 --- a/strings/levenshtein_distance.py +++ b/strings/levenshtein_distance.py @@ -48,7 +48,6 @@ def levenshtein_distance(first_word, second_word): current_row = [i + 1] for j, c2 in enumerate(second_word): - # Calculate insertions, deletions and substitutions insertions = previous_row[j + 1] + 1 deletions = current_row[j] + 1 @@ -66,7 +65,7 @@ def levenshtein_distance(first_word, second_word): if __name__ == '__main__': try: - raw_input # Python 2 + raw_input # Python 2 except NameError: raw_input = input # Python 3 diff --git a/strings/manacher.py b/strings/manacher.py index e73e173b43e0..63d0892a3ec3 100644 --- a/strings/manacher.py +++ b/strings/manacher.py @@ -1,52 +1,52 @@ -# calculate palindromic length from center with incrementing difference -def palindromic_length( center, diff, string): - if center-diff == -1 or center+diff == len(string) or string[center-diff] != string[center+diff] : - return 0 - return 1 + palindromic_length(center, diff+1, string) - -def palindromic_string( input_string ): - """ - Manacher’s algorithm which finds Longest Palindromic Substring in linear time. - - 1. first this conver input_string("xyx") into new_string("x|y|x") where odd positions are actual input - characters. - 2. for each character in new_string it find corresponding length and store, - a. max_length - b. max_length's center - 3. return output_string from center - max_length to center + max_length and remove all "|" - """ - max_length = 0 - - # if input_string is "aba" than new_input_string become "a|b|a" - new_input_string = "" - output_string = "" - - # append each character + "|" in new_string for range(0, length-1) - for i in input_string[:len(input_string)-1] : - new_input_string += i + "|" - #append last character - new_input_string += input_string[-1] - - - # for each character in new_string find corresponding palindromic string - for i in range(len(new_input_string)) : - - # get palindromic length from ith position - length = palindromic_length(i, 1, new_input_string) - - # update max_length and start position - if max_length < length : - max_length = length - start = i - - #create that string - for i in new_input_string[start-max_length:start+max_length+1] : - if i != "|": - output_string += i - - return output_string - - -if __name__ == '__main__': - n = input() - print(palindromic_string(n)) +# calculate palindromic length from center with incrementing difference +def palindromic_length(center, diff, string): + if center - diff == -1 or center + diff == len(string) or string[center - diff] != string[center + diff]: + return 0 + return 1 + palindromic_length(center, diff + 1, string) + + +def palindromic_string(input_string): + """ + Manacher’s algorithm which finds Longest Palindromic Substring in linear time. + + 1. first this conver input_string("xyx") into new_string("x|y|x") where odd positions are actual input + characters. + 2. for each character in new_string it find corresponding length and store, + a. max_length + b. max_length's center + 3. return output_string from center - max_length to center + max_length and remove all "|" + """ + max_length = 0 + + # if input_string is "aba" than new_input_string become "a|b|a" + new_input_string = "" + output_string = "" + + # append each character + "|" in new_string for range(0, length-1) + for i in input_string[:len(input_string) - 1]: + new_input_string += i + "|" + # append last character + new_input_string += input_string[-1] + + # for each character in new_string find corresponding palindromic string + for i in range(len(new_input_string)): + + # get palindromic length from ith position + length = palindromic_length(i, 1, new_input_string) + + # update max_length and start position + if max_length < length: + max_length = length + start = i + + # create that string + for i in new_input_string[start - max_length:start + max_length + 1]: + if i != "|": + output_string += i + + return output_string + + +if __name__ == '__main__': + n = input() + print(palindromic_string(n)) diff --git a/strings/min_cost_string_conversion.py b/strings/min_cost_string_conversion.py index de7f9f727283..e994e8fa6841 100644 --- a/strings/min_cost_string_conversion.py +++ b/strings/min_cost_string_conversion.py @@ -1,9 +1,9 @@ from __future__ import print_function try: - xrange #Python 2 + xrange # Python 2 except NameError: - xrange = range #Python 3 + xrange = range # Python 3 ''' Algorithm for calculating the most cost-efficient sequence for converting one string into another. @@ -13,109 +13,113 @@ ---Delete character with cost cD ---Insert character with cost cI ''' + + def compute_transform_tables(X, Y, cC, cR, cD, cI): - X = list(X) - Y = list(Y) - m = len(X) - n = len(Y) + X = list(X) + Y = list(Y) + m = len(X) + n = len(Y) + + costs = [[0 for _ in xrange(n + 1)] for _ in xrange(m + 1)] + ops = [[0 for _ in xrange(n + 1)] for _ in xrange(m + 1)] - costs = [[0 for _ in xrange(n+1)] for _ in xrange(m+1)] - ops = [[0 for _ in xrange(n+1)] for _ in xrange(m+1)] + for i in xrange(1, m + 1): + costs[i][0] = i * cD + ops[i][0] = 'D%c' % X[i - 1] - for i in xrange(1, m+1): - costs[i][0] = i*cD - ops[i][0] = 'D%c' % X[i-1] + for i in xrange(1, n + 1): + costs[0][i] = i * cI + ops[0][i] = 'I%c' % Y[i - 1] - for i in xrange(1, n+1): - costs[0][i] = i*cI - ops[0][i] = 'I%c' % Y[i-1] + for i in xrange(1, m + 1): + for j in xrange(1, n + 1): + if X[i - 1] == Y[j - 1]: + costs[i][j] = costs[i - 1][j - 1] + cC + ops[i][j] = 'C%c' % X[i - 1] + else: + costs[i][j] = costs[i - 1][j - 1] + cR + ops[i][j] = 'R%c' % X[i - 1] + str(Y[j - 1]) - for i in xrange(1, m+1): - for j in xrange(1, n+1): - if X[i-1] == Y[j-1]: - costs[i][j] = costs[i-1][j-1] + cC - ops[i][j] = 'C%c' % X[i-1] - else: - costs[i][j] = costs[i-1][j-1] + cR - ops[i][j] = 'R%c' % X[i-1] + str(Y[j-1]) + if costs[i - 1][j] + cD < costs[i][j]: + costs[i][j] = costs[i - 1][j] + cD + ops[i][j] = 'D%c' % X[i - 1] - if costs[i-1][j] + cD < costs[i][j]: - costs[i][j] = costs[i-1][j] + cD - ops[i][j] = 'D%c' % X[i-1] + if costs[i][j - 1] + cI < costs[i][j]: + costs[i][j] = costs[i][j - 1] + cI + ops[i][j] = 'I%c' % Y[j - 1] - if costs[i][j-1] + cI < costs[i][j]: - costs[i][j] = costs[i][j-1] + cI - ops[i][j] = 'I%c' % Y[j-1] + return costs, ops - return costs, ops def assemble_transformation(ops, i, j): - if i == 0 and j == 0: - seq = [] - return seq - else: - if ops[i][j][0] == 'C' or ops[i][j][0] == 'R': - seq = assemble_transformation(ops, i-1, j-1) - seq.append(ops[i][j]) - return seq - elif ops[i][j][0] == 'D': - seq = assemble_transformation(ops, i-1, j) - seq.append(ops[i][j]) - return seq - else: - seq = assemble_transformation(ops, i, j-1) - seq.append(ops[i][j]) - return seq + if i == 0 and j == 0: + seq = [] + return seq + else: + if ops[i][j][0] == 'C' or ops[i][j][0] == 'R': + seq = assemble_transformation(ops, i - 1, j - 1) + seq.append(ops[i][j]) + return seq + elif ops[i][j][0] == 'D': + seq = assemble_transformation(ops, i - 1, j) + seq.append(ops[i][j]) + return seq + else: + seq = assemble_transformation(ops, i, j - 1) + seq.append(ops[i][j]) + return seq + if __name__ == '__main__': - _, operations = compute_transform_tables('Python', 'Algorithms', -1, 1, 2, 2) - - m = len(operations) - n = len(operations[0]) - sequence = assemble_transformation(operations, m-1, n-1) - - string = list('Python') - i = 0 - cost = 0 - - with open('min_cost.txt', 'w') as file: - for op in sequence: - print(''.join(string)) - - if op[0] == 'C': - file.write('%-16s' % 'Copy %c' % op[1]) - file.write('\t\t\t' + ''.join(string)) - file.write('\r\n') - - cost -= 1 - elif op[0] == 'R': - string[i] = op[2] - - file.write('%-16s' % ('Replace %c' % op[1] + ' with ' + str(op[2]))) - file.write('\t\t' + ''.join(string)) - file.write('\r\n') - - cost += 1 - elif op[0] == 'D': - string.pop(i) - - file.write('%-16s' % 'Delete %c' % op[1]) - file.write('\t\t\t' + ''.join(string)) - file.write('\r\n') - - cost += 2 - else: - string.insert(i, op[1]) - - file.write('%-16s' % 'Insert %c' % op[1]) - file.write('\t\t\t' + ''.join(string)) - file.write('\r\n') - - cost += 2 - - i += 1 - - print(''.join(string)) - print('Cost: ', cost) - - file.write('\r\nMinimum cost: ' + str(cost)) + _, operations = compute_transform_tables('Python', 'Algorithms', -1, 1, 2, 2) + + m = len(operations) + n = len(operations[0]) + sequence = assemble_transformation(operations, m - 1, n - 1) + + string = list('Python') + i = 0 + cost = 0 + + with open('min_cost.txt', 'w') as file: + for op in sequence: + print(''.join(string)) + + if op[0] == 'C': + file.write('%-16s' % 'Copy %c' % op[1]) + file.write('\t\t\t' + ''.join(string)) + file.write('\r\n') + + cost -= 1 + elif op[0] == 'R': + string[i] = op[2] + + file.write('%-16s' % ('Replace %c' % op[1] + ' with ' + str(op[2]))) + file.write('\t\t' + ''.join(string)) + file.write('\r\n') + + cost += 1 + elif op[0] == 'D': + string.pop(i) + + file.write('%-16s' % 'Delete %c' % op[1]) + file.write('\t\t\t' + ''.join(string)) + file.write('\r\n') + + cost += 2 + else: + string.insert(i, op[1]) + + file.write('%-16s' % 'Insert %c' % op[1]) + file.write('\t\t\t' + ''.join(string)) + file.write('\r\n') + + cost += 2 + + i += 1 + + print(''.join(string)) + print('Cost: ', cost) + + file.write('\r\nMinimum cost: ' + str(cost)) diff --git a/strings/naive_String_Search.py b/strings/naive_String_Search.py index 04c0d8157b24..a8c2ea584399 100644 --- a/strings/naive_String_Search.py +++ b/strings/naive_String_Search.py @@ -7,23 +7,26 @@ n=length of main string m=length of pattern string """ -def naivePatternSearch(mainString,pattern): - patLen=len(pattern) - strLen=len(mainString) - position=[] - for i in range(strLen-patLen+1): - match_found=True + + +def naivePatternSearch(mainString, pattern): + patLen = len(pattern) + strLen = len(mainString) + position = [] + for i in range(strLen - patLen + 1): + match_found = True for j in range(patLen): - if mainString[i+j]!=pattern[j]: - match_found=False + if mainString[i + j] != pattern[j]: + match_found = False break if match_found: position.append(i) return position -mainString="ABAAABCDBBABCDDEBCABC" -pattern="ABC" -position=naivePatternSearch(mainString,pattern) + +mainString = "ABAAABCDBBABCDDEBCABC" +pattern = "ABC" +position = naivePatternSearch(mainString, pattern) print("Pattern found in position ") for x in position: - print(x) \ No newline at end of file + print(x) diff --git a/work-temp/ReadExcel.py b/work-temp/ReadExcel.py new file mode 100644 index 000000000000..41a0d14c1808 --- /dev/null +++ b/work-temp/ReadExcel.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- +import os + +import pandas as pd +import numpy as np + + +def load_data(): + """ + 读取数据文件,推荐CSV格式 + :return: + """ + work_main_dir = os.path.dirname(__file__) + os.path.sep + file_path = work_main_dir + "激活学生列表.xlsx" + return pd.read_excel(file_path) + + +def main(): + data = load_data() + account_id_list = np.array(data['account_id']).tolist() + print(', \n'.join([str(i) for i in account_id_list])) + + +if __name__ == '__main__': + main() + + diff --git "a/work-temp/\346\277\200\346\264\273\345\255\246\347\224\237\345\210\227\350\241\250.xlsx" "b/work-temp/\346\277\200\346\264\273\345\255\246\347\224\237\345\210\227\350\241\250.xlsx" new file mode 100644 index 000000000000..1dae61b6db16 Binary files /dev/null and "b/work-temp/\346\277\200\346\264\273\345\255\246\347\224\237\345\210\227\350\241\250.xlsx" differ