diff --git a/BotZone2.8v1 Android/Lrfarm.py b/BotZone2.8v1 Android/Lrfarm.py
new file mode 100644
index 0000000..fde546d
--- /dev/null
+++ b/BotZone2.8v1 Android/Lrfarm.py
@@ -0,0 +1,368 @@
+import commands
+from colorama import init, Fore, Back, Style
+# Coloroma autoreset
+init(autoreset=True)
+
+
+def t():
+
+ print(" Lr Trunks")
+ stage = input('What stage would you like to complete(Proud Bloodline 320022) : ')
+ difficulty = input('Enter the difficulty|(2:Z-Hard): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete(Raging Counterstrike 406003) : ')
+ difficulty = input('Enter the difficulty|(2:Z-Hard): ')
+ loop = input('Enter how many times to execute(5): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete(Dignity of a Clan 408002) : ')
+ difficulty = input('Enter the difficulty|(2:Z-Hard): ')
+ loop = input('Enter how many times to execute(5): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete(The Strongest Space Pirate 420002) : ')
+ difficulty = input('Enter the difficulty|(2:Z-Hard): ')
+ loop = input('Enter how many times to execute(5): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete(Warrior of Hope 414002) : ')
+ difficulty = input('Enter the difficulty|(2:Z-Hard): ')
+ loop = input('Enter how many times to execute(5): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete(The Time Patrol Warrior 422002) : ')
+ difficulty = input('Enter the difficulty|(2:Z-Hard): ')
+ loop = input('Enter how many times to execute(5): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def mv():
+ print("Majin Vegeta")
+ stage = input('What stage would you like to complete(The Dark Prince Returns 319022) : ')
+ difficulty = input('Enter the difficulty|(2:Z-Hard): ')
+ loop = input('Enter how many times to execute(15): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete(The Strongest Shadow Dragon 517002) : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(3): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete(Dark Nightmare 518002) : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(3): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete(Fusion in Blue 519001) : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(3): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete(Rose-Tinted Plot 520001) : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(3): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete(A New Hope 522001) : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(3): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def gv():
+ print(" Lr Super Saiyan Goku & Super Saiyan Vegeta")
+ stage = input('What stage would you like to complete(The Ultimate Pair of the Present World 537001) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete(Ultimate Splendor 512003) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(7): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def b():
+ print("Lr Full Power Boujack (Galactic Warrior)")
+ stage = input('What stage would you like to complete( Extreme Peril 306008) : ')
+ difficulty = input('Enter the difficulty|(2:Z-Hard): ')
+ loop = input('Enter how many times to execute(25): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def c():
+ print("Lr Cell (Perfect Form) & Cell Jr")
+ stage = input('What stage would you like to complete(Waking Nightmare 502003) : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(7): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def tm():
+ print("Lr Trunks (Teen) (Future) & Mai (Future)")
+ stage = input('What stage would you like to complete( The Zero Mortals Plan 328006) : ')
+ difficulty = input('Enter the difficulty|(2:Z-Hard): ')
+ loop = input('Enter how many times to execute(20): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ "BREAK"
+ stage = input('What stage would you like to complete(Dark Nightmare 518002) : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(1): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete( Rose-Tinted Plot 520001) : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(1): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ "BREAK"
+ stage = input('What stage would you like to complete(Fusion in Blue 519001) : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(1): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete( The Epic Battle Begins 524001) : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(1): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ "BREAK"
+ stage = input('What stage would you like to complete(A New Hope 522001) : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(2): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete( Last Judgment...Or Last Hope 523002) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(2): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ "BREAK"
+ stage = input('What stage would you like to complete(Searing Rose-Colored Fury 520002) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(2): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete( Sublime Blue! 519002) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(2): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def bw():
+ print("Lr Beerus & Whis")
+ stage = input('What stage would you like to complete(God of Destruction Wrath 511002) : ')
+ difficulty = input('Enter the difficulty|(2:Z-Hard): ')
+ loop = input('Enter how many times to execute(7): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete(Destruction God Awakens 314001) : ')
+ difficulty = input('Enter the difficulty|(2:Z-Hard): ')
+ loop = input('Enter how many times to execute(4): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ "BREAK"
+ stage = input('What stage would you like to complete(In Search of the Super Saiyan God 314002) : ')
+ difficulty = input('Enter the difficulty|(2:Z-Hard): ')
+ loop = input('Enter how many times to execute(7): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete(Vegeta Pride 314007) : ')
+ difficulty = input('Enter the difficulty|(2:Z-Hard): ')
+ loop = input('Enter how many times to execute(4): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def gg():
+ print("Lr Super Saiyan Gohan (Teen) & Super Saiyan Goten (Kid)")
+ stage = input('What stage would you like to complete(Go! Warriors of the New Generation 552001) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(10): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete(Blast! Family Kamehameha! 326006) : ')
+ difficulty = input('Enter the difficulty|(2:Z-Hard): ')
+ loop = input('Enter how many times to execute(20): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def vg():
+ print("Lr Super Saiyan Goku (Angel) & Super Saiyan Vegeta (Angel)")
+ stage = input('What stage would you like to complete(The Ultimate Pair of the Otherworld 536001) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(10): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete(Fusion Reborn! 326006) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(7): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def tg():
+ print("Lr Trunks (Kid) & Goten (Kid)")
+ stage = input('What stage would you like to complete(An Unexpectedly Powerful Man! 411002) : ')
+ difficulty = input('Enter the difficulty|(2:Z-Hard): ')
+ loop = input('Enter how many times to execute(5): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete(Super Gotenks! 513002) : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(3): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ "BREAK"
+ stage = input('What stage would you like to complete(One Powerful Super Fusion! 513003) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(3): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def gh():
+ print(" Lr Super Saiyan 2 Gohan")
+ stage = input('What stage would you like to complete(Waking Nightmare 502003) : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def ggg():
+ print(" Lr Super Saiyan 3 Goku")
+ stage = input('What stage would you like to complete(Super Saiyan Goku 403002) : ')
+ difficulty = input('Enter the difficulty|(2:Z-Hard): ')
+ loop = input('Enter how many times to execute(5): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete(Phantom Majin Resurrected! 535002) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(3): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ "BREAK"
+ stage = input('What stage would you like to complete(Mighty Warrior: 24-Hour Revival 528001) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(3): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete(Ultimate Finishing Move 504002) : ')
+ difficulty = input('Enter the difficulty|(2:Z-Hard): ')
+ loop = input('Enter how many times to execute(4): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def bd():
+ print("Lr Bardock")
+ stage = input('What stage would you like to complete(Saiyans from Planet Vegeta 347001) : ')
+ difficulty = input('Enter the difficulty|(2:Z-Hard): ')
+ loop = input('Enter how many times to execute(10): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete( 534001 The Unknown Battle) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(3): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ "BREAK"
+ stage = input('What stage would you like to complete(347007 A Lone Warriors Last Battle) : ')
+ difficulty = input('Enter the difficulty|(2:Z-Hard): ')
+ loop = input('Enter how many times to execute(3): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete( 602002 True Fear) : ')
+ difficulty = input('Enter the difficulty|(2:Z-Hard): ')
+ loop = input('Enter how many times to execute(3): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ "BREAK"
+ stage = input('What stage would you like to complete(602003 Summit of the Universe) : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(5): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def gb():
+ print("Lr Goku Black (Super Saiyan Rosé) & Zamasu")
+ stage = input('What stage would you like to complete( 518002 Dark Nightmare) : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ "BREAK"
+ stage = input('What stage would you like to complete(519001 Fusion in Blue) : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(3): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete( 520001 Rose-Tinted Plot) : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(3): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ "BREAK"
+ stage = input('What stage would you like to complete(522001 A New Hope) : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(3): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete( 523002 Last Judgment...Or Last Hope) : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(3): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ "BREAK"
+ stage = input('What stage would you like to complete(524001 The Epic Battle Begins) : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(3): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def gf():
+ print("Lr Goku & Frieza (Final Form) (Angel)")
+ stage = input('What stage would you like to complete( 544001 Ever-Evolving Power) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(5): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ "BREAK"
+ stage = input('What stage would you like to complete(Ever-Evolving Evil) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(5): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ stage = input('What stage would you like to complete(538001 Kaboom! Ultra Instinct) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(5): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+ "BREAK"
+ stage = input('What stage would you like to complete(533002 The True Golden Frieza) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(5): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def by():
+ print("Lr Legendary Super Saiyan Broly")
+ stage = input('What stage would you like to complete(501003 The Demon Returns) : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
\ No newline at end of file
diff --git a/BotZone2.8v1 Android/README.md b/BotZone2.8v1 Android/README.md
new file mode 100644
index 0000000..9ba6844
--- /dev/null
+++ b/BotZone2.8v1 Android/README.md
@@ -0,0 +1,38 @@
+# pysqlsimplecipher
+Encrypt or decrypt formated sqlite db.
+
+This project is a tool for sqlite database encryption or decryption like
+[sqlcipher](http://sqlcipher.net/)
+without install sqlcipher.
+
+When encrypt or decrypt database, an algorithm called AES-256-CBC is used.
+Each page shares the same key derived from password,
+but owns a random initialization vector stored at the end of the page.
+
+## Decrypt
+```bash
+python decrypt.py encrypted.db password output.db
+```
+
+## Encrypt
+```bash
+python encrypt.py plain.db password output.db
+```
+Needs reserved space at the end of each page of the database file.
+
+Otherwise, use sqlcipher to encrypt.
+
+#### Encrypt with sqlcipher
+- Open plain db
+```bash
+./sqlcipher plain.db
+```
+- Encrypt to enc.db
+```sql
+ATTACH DATABASE 'enc.db' as encrypted key 'testkey';
+SELECT sqlcipher_export('encrypted');
+DETACH DATABASE encrypted;
+```
+
+## License
+GNU Lesser General Public License Version 3
diff --git a/BotZone2.8v1 Android/TapOnMe.py b/BotZone2.8v1 Android/TapOnMe.py
new file mode 100644
index 0000000..dd0fcf5
--- /dev/null
+++ b/BotZone2.8v1 Android/TapOnMe.py
@@ -0,0 +1,211 @@
+from colorama import init, Fore, Back, Style
+import commands
+import config
+import sys
+import os
+import webbrowser
+
+# Coloroma autoreset
+init(autoreset=True)
+if not os.path.isdir("Saves"):
+ try:
+ os.mkdir('Saves')
+ os.mkdir('Saves/ios')
+ os.mkdir('Saves/android')
+ os.mkdir('Saves/Jp')
+ os.mkdir('Saves/Jp/ios')
+ os.mkdir('Saves/Jp/android')
+ os.mkdir('Saves/fresh')
+ os.mkdir('Saves/fresh/ios')
+ os.mkdir('Saves/fresh/android')
+ except:
+ print(Fore.RED + Style.BRIGHT + 'Unable to create saves file')
+
+while True:
+
+ # Decide which client to use.
+ print(' ')
+ print(Fore.CYAN + Style.BRIGHT + 'Choose a version')
+ print('---------------------------------')
+ print(' ')
+ while True:
+ print('Which version? (' + Fore.YELLOW + Style.BRIGHT + 'Jp: 1 ' + Style.RESET_ALL + 'or ' + Fore.YELLOW + Style.BRIGHT + 'Global: 2' + Style.RESET_ALL + ')',end='')
+ client = input(" ")
+ if client.lower() == '1':
+ config.client = 'japan'
+ while True:
+ print(Fore.CYAN + Style.BRIGHT + 'Enter The BotZone ')
+ print('---------------------------------')
+ print("You're currently on JP")
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'New Account :' + Fore.YELLOW + Style.BRIGHT + ' 0')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Transfer Account :' + Fore.YELLOW + Style.BRIGHT + ' 1')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Load From Save :' + Fore.YELLOW + Style.BRIGHT + ' 2')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Daily Login :' + Fore.YELLOW + Style.BRIGHT + ' 3')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Update database:' + Fore.YELLOW + Style.BRIGHT + ' 4')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'BotZone Discord Link:' + Fore.YELLOW + Style.BRIGHT + ' 5')
+ print('---------------------------------')
+ print(' ')
+ command = input('Enter your choice: ')
+ if command == '0':
+ print(' ')
+ config.identifier = commands.signup()
+ commands.Jp_save_account()
+ config.access_token, config.secret = commands.signin(config.identifier)
+ commands.tutorial()
+ commands.daily_login()
+ break
+ elif command == '1':
+ print(' ')
+ commands.Jp_transfer_account()
+ commands.daily_login()
+ break
+ elif command == '2':
+ print(' ')
+ commands.Jp_load_account()
+ commands.daily_login()
+ commands.accept_gifts()
+ commands.accept_missions()
+ break
+ elif command == '3':
+ print('')
+ commands.Jp_bulk_daily_logins()
+ break
+ elif command == '4':
+ print('')
+ commands.db_download()
+ elif command == '5':
+ webbrowser.open(commands.discordurl, new=0, autoraise=True)
+ elif command == 'exit':
+ exit()
+ else:
+ print(Fore.RED + Style.BRIGHT + "Command not understood")
+
+ # User commands.
+ while True:
+ print('---------------------------------')
+ print(
+ Fore.CYAN + Style.BRIGHT + "Type" + Fore.YELLOW + Style.BRIGHT + " 'help'" + Fore.CYAN + Style.BRIGHT + " to view all commands.")
+
+ # Set up comma separated chain commands. Handled via stdin
+ try:
+ command = input()
+ except:
+ sys.stdin = sys.__stdin__
+ command = input()
+
+ if command == 'exit':
+ break
+ # Pass command to command executor and handle keyboard interrupts.
+ try:
+ commands.user_command_executor(command)
+ except KeyboardInterrupt:
+ print(Fore.CYAN + Style.BRIGHT + 'User interrupted process.')
+ except Exception as e:
+ print(Fore.RED + Style.BRIGHT + repr(e))
+ break
+ elif client.lower() == '2':
+ config.client = 'global'
+ print(' ')
+ while True:
+ print(Fore.CYAN + Style.BRIGHT + 'Enter The BotZone ')
+ print('---------------------------------')
+ print("You're currently on GLB")
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'New Account :' + Fore.YELLOW + Style.BRIGHT + ' 0')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Transfer Account :' + Fore.YELLOW + Style.BRIGHT + ' 1')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Load From Save :' + Fore.YELLOW + Style.BRIGHT + ' 2')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'New Fresh Account :' + Fore.YELLOW + Style.BRIGHT + ' 3')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Load Fresh Account :' + Fore.YELLOW + Style.BRIGHT + ' 4')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Daily Login :' + Fore.YELLOW + Style.BRIGHT + ' 5')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Update database:' + Fore.YELLOW + Style.BRIGHT + ' 6')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'BotZone Discord Link:' + Fore.YELLOW + Style.BRIGHT + ' 7')
+ print('---------------------------------')
+ command = input('Enter your choice: ')
+ if command == '0':
+ print(' ')
+ config.identifier = commands.signup()
+ commands.save_account()
+ config.access_token, config.secret = commands.signin(config.identifier)
+ commands.tutorial()
+ commands.daily_login()
+ break
+ elif command == '1':
+ print(' ')
+ commands.transfer_account()
+ commands.daily_login()
+ break
+ elif command == '2':
+ print(' ')
+ commands.load_account()
+ commands.daily_login()
+ commands.accept_gifts()
+ commands.accept_missions()
+ break
+ elif command == '3':
+ print(' ')
+ config.identifier = commands.signup()
+ commands.fresh_save_account()
+ config.access_token, config.secret = commands.signin(config.identifier)
+ commands.tutorial()
+ commands.daily_login()
+ break
+ elif command == '4':
+ print(' ')
+ commands.fresh_load_account()
+ commands.daily_login()
+ commands.accept_gifts()
+ commands.accept_missions()
+ break
+ elif command == '5':
+ print('')
+ commands.bulk_daily_logins()
+ commands.fresh_bulk_daily_logins()
+ elif command == '6':
+ print('')
+ commands.db_download()
+ elif command == '7':
+ webbrowser.open(commands.discordurl, new=0, autoraise=True)
+ elif command == 'exit':
+ exit()
+ else:
+ print(Fore.RED + Style.BRIGHT + "Command not understood")
+
+ # User commands.
+ while True:
+ print('---------------------------------')
+ print(
+ Fore.CYAN + Style.BRIGHT + "Type" + Fore.YELLOW + Style.BRIGHT + " 'help'" + Fore.CYAN + Style.BRIGHT + " to view all commands.")
+
+ # Set up comma separated chain commands. Handled via stdin
+ try:
+ command = input()
+ except:
+ sys.stdin = sys.__stdin__
+ command = input()
+
+ if command == 'exit':
+ break
+ # Pass command to command executor and handle keyboard interrupts.
+ try:
+ commands.user_command_executor(command)
+ except KeyboardInterrupt:
+ print(Fore.CYAN + Style.BRIGHT + 'User interrupted process.')
+ except Exception as e:
+ print(Fore.RED + Style.BRIGHT + repr(e))
+
+ break
+ else:
+ print(Fore.RED + Style.BRIGHT + "Command not understood")
diff --git a/BotZone2.8v1 Android/aa.py b/BotZone2.8v1 Android/aa.py
new file mode 100644
index 0000000..3472657
--- /dev/null
+++ b/BotZone2.8v1 Android/aa.py
@@ -0,0 +1,400 @@
+import commands
+from colorama import init, Fore, Back, Style
+# Coloroma autoreset
+init(autoreset=True)
+
+
+def ss():
+ print("TEQ Super Saiyan God SS Vegito")
+ stage = input('What stage would you like to complete(519002 Sublime Blue!) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def sss():
+ print("PHY Super Saiyan Broly")
+ stage = input('What stage would you like to complete(548001 The Greatest Saiyan Adversary) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def ssss():
+ print("STR Super Gogeta")
+ stage = input('What stage would you like to complete(505003 Fusion Reborn!) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(2): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def s():
+ print("AGL Super Saiyan Gogeta")
+ stage = input('What stage would you like to complete(549001 The Omnipotent Saiyan Warrior) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def a():
+ print("INT SSJ3 Bardock")
+ stage = input('What stage would you like to complete(534001 The Unknown Battle : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def aa():
+ print("STR SSJ4 Goku")
+ stage = input('What stage would you like to complete(525001 The Scarlet Hero! Super Saiyan 4! : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def aaa():
+ print("INT UI Goku")
+ stage = input('What stage would you like to complete(538001 Kaboom! Ultra Instinct : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def aaaa():
+ print("AGL SSJ4 Vegeta")
+ stage = input('What stage would you like to complete(526001 The Crimson Flash! Super Saiyan 4 : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def b():
+ print("PHY FP Frieza")
+ stage = input('What stage would you like to complete(507002 Full-Power Final Battle : ')
+ difficulty = input('Enter the difficulty|(2:Z-Hard): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def bb():
+ print("TEQ Golden Frieza")
+ stage = input('What stage would you like to complete(516001 Emperors Obsession Area : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def bbb():
+ print("AGL SSJ3 Goku")
+ stage = input('What stage would you like to complete(504002 Ultimate Finishing Move Area : ')
+ difficulty = input('Enter the difficulty|(2:Z-Hard): ')
+ loop = input('Enter how many times to execute(10): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def bbbb():
+ print("TEQ SSJ4 Gogeta")
+ stage = input('What stage would you like to complete(532001 The Ultimate Super Gogeta : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def c():
+ print("INT Super Gogeta")
+ stage = input('What stage would you like to complete(505003 Fusion Reborn!) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def cc():
+ print("SSJ3 Gotenks")
+ stage = input('What stage would you like to complete(513002 Super Gotenks) : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+def ccc():
+ print("TEQ FP SSJ4 Goku")
+ stage = input('What stage would you like to complete(542001 Transcend Super Saiyan 4) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def cccc():
+ print("STR Jiren")
+ stage = input('What stage would you like to complete(540002 Confronting the Strongest of All Universes) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def d():
+ print("INT Golden Frieza")
+ stage = input('What stage would you like to complete(533002 The True Golden Frieza) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def dd():
+ print("PHY Android 17")
+ stage = input('What stage would you like to complete(543001 Superb Ranger) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def ddd():
+ print("TEQ Hit")
+ stage = input('What stage would you like to complete(547001 The Deadliest Assassin) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def dddd():
+ print("AGL SSBE Vegeta")
+ stage = input('What stage would you like to complete(524002 Battle for Honor and Pride) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def e():
+ print("PHY Kid Buu")
+ stage = input('What stage would you like to complete(524003 Regression to Evil) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def ee():
+ print("INT Kid Buu")
+ stage = input('What stage would you like to complete(524003 Regression to Evil) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def eee():
+ print("TEQ SSJ3 Goku (Angel)")
+ stage = input('What stage would you like to complete(528001 Mighty Warrior: 24-Hour Revival) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def eeee():
+ print("PHY Goku Black")
+ stage = input('What stage would you like to complete(518002 Dark Nightmare) : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def f():
+ print("INT Goku Black")
+ stage = input('What stage would you like to complete(518003 Black Harbinger of Destruction) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def ff():
+ print("TEQ SSG Goku")
+ stage = input('What stage would you like to complete(549001 The Omnipotent Saiyan Warrior) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def ffx():
+ print("STR SSG Vegeta")
+ stage = input('What stage would you like to complete(549001 The Omnipotent Saiyan Warrior) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def fff():
+ print("AGL SSGSS Goku")
+ stage = input('What stage would you like to complete(514001 Ceaseless Combat) : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(5): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def ffff():
+ print("STR Toppo")
+ stage = input('What stage would you like to complete(524002 Battle for Honor and Pride) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(5): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def g():
+ print("STR Rose Goku Black")
+ stage = input('What stage would you like to complete(520002 Searing Rose-Colored Fury) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def gg():
+ print("PHY SSGSS Vegito")
+ stage = input('What stage would you like to complete(519001 Fusion in Blue) : ')
+ difficulty = input('Enter the difficulty|(3:Super): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def ggg():
+ print("STR SSJ3 Goku")
+ stage = input('What stage would you like to complete(504002 Ultimate Finishing Move Area : ')
+ difficulty = input('Enter the difficulty|(2:Z-Hard): ')
+ loop = input('Enter how many times to execute(10): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def gggx():
+ print("TEQ SSJ3 Broly")
+ stage = input('What stage would you like to complete(531001 All-Time Nastiest Evolution : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def gggg():
+ print("AGL Transgoku")
+ stage = input('What stage would you like to complete(544001 Ever-Evolving Power : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def h():
+ print("STR SSJ3 Vegeta")
+ stage = input('What stage would you like to complete(510002 The Most Powerful Blow : ')
+ difficulty = input('Enter the difficulty|(2:Z-Hard): ')
+ loop = input('Enter how many times to execute(10): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def hh():
+ print("PHY SSJ3 Gotenks")
+ stage = input('What stage would you like to complete(513003 One Powerful Super Fusion! : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def hhh():
+ print("AGL Turles")
+ stage = input('What stage would you like to complete(539001 Arrival of the Universe-Crusher! : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def hhhh():
+ print("STR Janemba")
+ stage = input('What stage would you like to complete(506003 Overwhelming Force of Evil! : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def j():
+ print("INT Janemba")
+ stage = input('What stage would you like to complete(506003 Overwhelming Force of Evil! : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(2): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def jj():
+ print("TEQ TransFrieza")
+ stage = input('What stage would you like to complete(545001 Ever-Evolving Evil : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(11): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
+
+#####################################################################################################################
+def jjj():
+ print("AGL Broly")
+ stage = input('What stage would you like to complete(548001 The Greatest Saiyan Adversary) : ')
+ difficulty = input('Enter the difficulty|(4:Super2): ')
+ loop = input('Enter how many times to execute(5): ')
+ for i in range(int(loop)):
+ commands.complete_stage(stage, difficulty)
+
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/Makefile b/BotZone2.8v1 Android/android-database-sqlcipher/Makefile
new file mode 100644
index 0000000..de0c9a7
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/Makefile
@@ -0,0 +1,84 @@
+.POSIX:
+.PHONY: init clean distclean build-openssl build publish-local-snapshot \
+ publish-local-release publish-remote-snapshot public-remote-release check
+GRADLE = ./gradlew
+
+init:
+ git submodule update --init
+
+clean:
+ $(GRADLE) clean
+
+distclean:
+ $(GRADLE) distclean
+
+build-openssl:
+ $(GRADLE) buildOpenSSL
+
+check:
+ $(GRADLE) check
+
+build-debug: check
+ $(GRADLE) android-database-sqlcipher:bundleDebugAar \
+ -PdebugBuild=true
+
+build-release: check
+ $(GRADLE) android-database-sqlcipher:bundleReleaseAar \
+ -PdebugBuild=false
+
+publish-local-snapshot:
+ @ $(collect-signing-info) \
+ $(GRADLE) \
+ -PpublishSnapshot=true \
+ -PpublishLocal=true \
+ -PsigningKeyId="$$gpgKeyId" \
+ -PsigningKeyRingFile="$$gpgKeyRingFile" \
+ -PsigningKeyPassword="$$gpgPassword" \
+ uploadArchives
+
+publish-local-release:
+ @ $(collect-signing-info) \
+ $(GRADLE) \
+ -PpublishSnapshot=false \
+ -PpublishLocal=true \
+ -PsigningKeyId="$$gpgKeyId" \
+ -PsigningKeyRingFile="$$gpgKeyRingFile" \
+ -PsigningKeyPassword="$$gpgPassword" \
+ uploadArchives
+
+publish-remote-snapshot:
+ @ $(collect-signing-info) \
+ $(collect-nexus-info) \
+ $(GRADLE) \
+ -PpublishSnapshot=true \
+ -PpublishLocal=false \
+ -PsigningKeyId="$$gpgKeyId" \
+ -PsigningKeyRingFile="$$gpgKeyRingFile" \
+ -PsigningKeyPassword="$$gpgPassword" \
+ -PnexusUsername="$$nexusUsername" \
+ -PnexusPassword="$$nexusPassword" \
+ uploadArchives
+
+publish-remote-release:
+ @ $(collect-signing-info) \
+ $(collect-nexus-info) \
+ $(GRADLE) \
+ -PpublishSnapshot=false \
+ -PpublishLocal=false \
+ -PdebugBuild=false \
+ -PsigningKeyId="$$gpgKeyId" \
+ -PsigningKeyRingFile="$$gpgKeyRingFile" \
+ -PsigningKeyPassword="$$gpgPassword" \
+ -PnexusUsername="$$nexusUsername" \
+ -PnexusPassword="$$nexusPassword" \
+ uploadArchives
+
+collect-nexus-info := \
+ read -p "Enter Nexus username:" nexusUsername; \
+ stty -echo; read -p "Enter Nexus password:" nexusPassword; stty echo;
+
+collect-signing-info := \
+ read -p "Enter GPG signing key id:" gpgKeyId; \
+ read -p "Enter full path to GPG keyring file \
+ (possibly ${HOME}/.gnupg/secring.gpg)" gpgKeyRingFile; \
+ stty -echo; read -p "Enter GPG password:" gpgPassword; stty echo;
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/README.md b/BotZone2.8v1 Android/android-database-sqlcipher/README.md
new file mode 100644
index 0000000..fa02bbd
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/README.md
@@ -0,0 +1,131 @@
+### Download Source and Binaries
+
+The latest AAR binary package information can be [here](https://www.zetetic.net/sqlcipher/open-source), the source can be found [here](https://github.com/sqlcipher/android-database-sqlcipher).
+
+
+### Compatibility
+
+SQLCipher for Android runs on Android 4–Android 9, for `armeabi`, `armeabi-v7a`, `x86`, `x86_64`, and `arm64_v8a` architectures.
+
+### Contributions
+
+We welcome contributions, to contribute to SQLCipher for Android, a [contributor agreement](https://www.zetetic.net/contributions/) needs to be submitted. All submissions should be based on the `master` branch.
+
+### An Illustrative Terminal Listing
+
+A typical SQLite database in unencrypted, and visually parseable even as encoded text. The following example shows the difference between hexdumps of a standard SQLite database and one implementing SQLCipher.
+
+```
+~ sjlombardo$ hexdump -C sqlite.db
+00000000 53 51 4c 69 74 65 20 66 6f 72 6d 61 74 20 33 00 |SQLite format 3.|
+…
+000003c0 65 74 32 74 32 03 43 52 45 41 54 45 20 54 41 42 |et2t2.CREATE TAB|
+000003d0 4c 45 20 74 32 28 61 2c 62 29 24 01 06 17 11 11 |LE t2(a,b)$…..|
+…
+000007e0 20 74 68 65 20 73 68 6f 77 15 01 03 01 2f 01 6f | the show…./.o|
+000007f0 6e 65 20 66 6f 72 20 74 68 65 20 6d 6f 6e 65 79 |ne for the money|
+
+~ $ sqlite3 sqlcipher.db
+sqlite> PRAGMA KEY=’test123′;
+sqlite> CREATE TABLE t1(a,b);
+sqlite> INSERT INTO t1(a,b) VALUES (‘one for the money’, ‘two for the show’);
+sqlite> .quit
+
+~ $ hexdump -C sqlcipher.db
+00000000 84 d1 36 18 eb b5 82 90 c4 70 0d ee 43 cb 61 87 |.?6.?..?p.?C?a.|
+00000010 91 42 3c cd 55 24 ab c6 c4 1d c6 67 b4 e3 96 bb |.B?..?|
+00000bf0 8e 99 ee 28 23 43 ab a4 97 cd 63 42 8a 8e 7c c6 |..?(#C??.?cB..|?|
+
+~ $ sqlite3 sqlcipher.db
+sqlite> SELECT * FROM t1;
+Error: file is encrypted or is not a database
+```
+(example courtesy of SQLCipher)
+
+### Application Integration
+
+You have a two main options for using SQLCipher for Android in your app:
+
+- Using it with Room or other consumers of the `androidx.sqlite` API
+
+- Using the native SQLCipher for Android classes
+
+In both cases, you will need to add a dependency on `net.zetetic:android-database-sqlcipher`,
+such as having the following line in your module's `build.gradle` `dependencies`
+closure:
+
+```gradle
+implementation 'net.zetetic:android-database-sqlcipher:4.2.0'
+```
+
+(replacing `4.2.0` with the version you want)
+
+
+
+#### Using SQLCipher for Android With Room
+
+SQLCipher for Android has a `SupportFactory` class in the `net.sqlcipher.database` package
+that can be used to configure Room to use SQLCipher for Android.
+
+There are two `SupportFactory` constructors:
+
+- Both take a `byte[]` to use as the passphrase (if you have a `char[]`, use
+`SQLiteDatabase.getBytes()` to get a suitable `byte[]` to use)
+
+- One constructor has a second parameter: a `SQLiteDatabaseHook` that you can use
+for executing SQL statements before or after the passphrase is used to decrypt
+the database
+
+Then, pass your `SupportFactory` to `openHelperFactory()` on your `RoomDatabase.Builder`:
+
+```java
+final byte[] passphrase = SQLiteDatabase.getBytes(userEnteredPassphrase);
+final SupportFactory factory = new SupportFactory(passphrase);
+final SomeDatabase room = Room.databaseBuilder(activity, SomeDatabase.class, DB_NAME)
+ .openHelperFactory(factory)
+ .build();
+```
+
+Now, Room will make all of its database requests using SQLCipher for Android instead
+of the framework copy of SQLCipher.
+
+Note that `SupportFactory` should work with other consumers of the `androidx.sqlite` API;
+Room is merely a prominent example.
+
+#### Using SQLCipher for Android's Native API
+
+If you have existing SQLite code using classes like `SQLiteDatabase` and `SQLiteOpenHelper`,
+converting your code to use SQLCipher for Android mostly is a three-step process:
+
+1. Replace all `android.database.sqlite.*` `import` statements with ones that
+use `net.sqlcipher.database.*` (e.g., convert `android.database.sqlite.SQLiteDatabase`
+to `net.sqlcipher.database.SQLiteDatabase`)
+
+2. Before attempting to open a database, call `SQLiteDatabase.loadLibs()`, passing
+in a `Context` (e.g., add this to `onCreate()` of your `Application` subclass, using
+the `Application` itself as the `Context`)
+
+3. When opening a database (e.g., `SQLiteDatabase.openOrCreateDatabase()`), pass
+in the passphrase as a `char[]` or `byte[]`
+
+The rest of your code may not need any changes.
+
+An article covering both integration of SQLCipher into an Android application as well as building the source can be found [here](https://www.zetetic.net/sqlcipher/sqlcipher-for-android/).
+
+### Building
+
+In order to build `android-database-sqlcipher` from source you will need both the Android SDK, Gradle, and the Android NDK. We currently recommend using Android NDK version `r15c`, however we plan to update to a newer NDK release when possible. To complete the `make` command, the `ANDROID_NDK_ROOT` environment variable must be defined which should point to your NDK root. Once you have cloned the repo, change directory into the root of the repository and run the following commands:
+
+```
+# this only needs to be done once
+make init
+
+# to build the source for debug:
+make build-debug
+# or for a release build:
+make build-release
+```
+
+### License
+
+The Android support libraries are licensed under Apache 2.0, in line with the Android OS code on which they are based. The SQLCipher code itself is licensed under a BSD-style license from Zetetic LLC. Finally, the original SQLite code itself is in the public domain.
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/SQLCIPHER_LICENSE b/BotZone2.8v1 Android/android-database-sqlcipher/SQLCIPHER_LICENSE
new file mode 100644
index 0000000..21566c5
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/SQLCIPHER_LICENSE
@@ -0,0 +1,26 @@
+http://sqlcipher.net
+
+ Copyright (c) 2010 Zetetic LLC
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ * Neither the name of the ZETETIC LLC nor the
+ names of its contributors may be used to endorse or promote products
+ derived from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY ZETETIC LLC ''AS IS'' AND ANY
+ EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ DISCLAIMED. IN NO EVENT SHALL ZETETIC LLC BE LIABLE FOR ANY
+ DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/build-openssl-libraries.sh b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/build-openssl-libraries.sh
new file mode 100644
index 0000000..3ccc659
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/build-openssl-libraries.sh
@@ -0,0 +1,151 @@
+#! /usr/bin/env bash
+
+MINIMUM_ANDROID_SDK_VERSION=$1
+MINIMUM_ANDROID_64_BIT_SDK_VERSION=$2
+OPENSSL=openssl-$3
+
+(cd src/main/external/;
+ gunzip -c ${OPENSSL}.tar.gz | tar xf -
+)
+
+(cd src/main/external/${OPENSSL};
+
+ if [[ ! ${MINIMUM_ANDROID_SDK_VERSION} ]]; then
+ echo "MINIMUM_ANDROID_SDK_VERSION was not provided, include and rerun"
+ exit 1
+ fi
+
+ if [[ ! ${MINIMUM_ANDROID_64_BIT_SDK_VERSION} ]]; then
+ echo "MINIMUM_ANDROID_64_BIT_SDK_VERSION was not provided, include and rerun"
+ exit 1
+ fi
+
+ if [[ ! ${ANDROID_NDK_ROOT} ]]; then
+ echo "ANDROID_NDK_ROOT environment variable not set, set and rerun"
+ exit 1
+ fi
+
+ NDK_TOOLCHAIN_VERSION=4.9
+ ANDROID_LIB_ROOT=../android-libs
+ ANDROID_TOOLCHAIN_DIR=/tmp/sqlcipher-android-toolchain
+ OPENSSL_CONFIGURE_OPTIONS="-fPIC no-idea no-camellia \
+ no-seed no-bf no-cast no-rc2 no-rc4 no-rc5 no-md2 \
+ no-md4 no-ecdh no-sock no-ssl3 \
+ no-dsa no-dh no-ec no-ecdsa no-tls1 \
+ no-rfc3779 no-whirlpool no-srp \
+ no-mdc2 no-ecdh no-engine \
+ no-srtp"
+
+ HOST_INFO=`uname -a`
+ case ${HOST_INFO} in
+ Darwin*)
+ TOOLCHAIN_SYSTEM=darwin-x86_64
+ ;;
+ Linux*)
+ if [[ "${HOST_INFO}" == *i686* ]]
+ then
+ TOOLCHAIN_SYSTEM=linux-x86
+ else
+ TOOLCHAIN_SYSTEM=linux-x86_64
+ fi
+ ;;
+ *)
+ echo "Toolchain unknown for host system"
+ exit 1
+ ;;
+ esac
+
+ rm -rf ${ANDROID_LIB_ROOT}
+
+ for SQLCIPHER_TARGET_PLATFORM in armeabi armeabi-v7a x86 x86_64 arm64-v8a
+ do
+ echo "Building libcrypto.a for ${SQLCIPHER_TARGET_PLATFORM}"
+ case "${SQLCIPHER_TARGET_PLATFORM}" in
+ armeabi)
+ TOOLCHAIN_ARCH=arm
+ TOOLCHAIN_PREFIX=arm-linux-androideabi
+ TOOLCHAIN_FOLDER=arm-linux-androideabi
+ CONFIGURE_ARCH=android-arm
+ ANDROID_API_VERSION=${MINIMUM_ANDROID_SDK_VERSION}
+ OFFSET_BITS=32
+ TOOLCHAIN_DIR=${ANDROID_TOOLCHAIN_DIR}-armeabi
+ ;;
+ armeabi-v7a)
+ TOOLCHAIN_ARCH=arm
+ TOOLCHAIN_PREFIX=arm-linux-androideabi
+ TOOLCHAIN_FOLDER=arm-linux-androideabi
+ CONFIGURE_ARCH="android-arm -march=armv7-a"
+ ANDROID_API_VERSION=${MINIMUM_ANDROID_SDK_VERSION}
+ OFFSET_BITS=32
+ TOOLCHAIN_DIR=${ANDROID_TOOLCHAIN_DIR}-armeabi-v7a
+ ;;
+ x86)
+ TOOLCHAIN_ARCH=x86
+ TOOLCHAIN_PREFIX=i686-linux-android
+ TOOLCHAIN_FOLDER=x86
+ CONFIGURE_ARCH=android-x86
+ ANDROID_API_VERSION=${MINIMUM_ANDROID_SDK_VERSION}
+ OFFSET_BITS=32
+ TOOLCHAIN_DIR=${ANDROID_TOOLCHAIN_DIR}-x86
+ ;;
+ x86_64)
+ TOOLCHAIN_ARCH=x86_64
+ TOOLCHAIN_PREFIX=x86_64-linux-android
+ TOOLCHAIN_FOLDER=x86_64
+ CONFIGURE_ARCH=android64-x86_64
+ ANDROID_API_VERSION=${MINIMUM_ANDROID_64_BIT_SDK_VERSION}
+ OFFSET_BITS=64
+ TOOLCHAIN_DIR=${ANDROID_TOOLCHAIN_DIR}-x86_64
+ ;;
+ arm64-v8a)
+ TOOLCHAIN_ARCH=arm64
+ TOOLCHAIN_PREFIX=aarch64-linux-android
+ TOOLCHAIN_FOLDER=aarch64-linux-android
+ CONFIGURE_ARCH=android-arm64
+ ANDROID_API_VERSION=${MINIMUM_ANDROID_64_BIT_SDK_VERSION}
+ OFFSET_BITS=64
+ TOOLCHAIN_DIR=${ANDROID_TOOLCHAIN_DIR}-arm64-v8a
+ ;;
+ *)
+ echo "Unsupported build platform:${SQLCIPHER_TARGET_PLATFORM}"
+ exit 1
+ esac
+ SOURCE_TOOLCHAIN_DIR=${ANDROID_NDK_ROOT}/toolchains/${TOOLCHAIN_FOLDER}-${NDK_TOOLCHAIN_VERSION}/prebuilt/${TOOLCHAIN_SYSTEM}
+ rm -rf ${TOOLCHAIN_DIR}
+ mkdir -p "${ANDROID_LIB_ROOT}/${SQLCIPHER_TARGET_PLATFORM}"
+ python ${ANDROID_NDK_ROOT}/build/tools/make_standalone_toolchain.py \
+ --arch ${TOOLCHAIN_ARCH} \
+ --api ${ANDROID_API_VERSION} \
+ --install-dir ${TOOLCHAIN_DIR} \
+ --unified-headers
+
+ if [[ $? -ne 0 ]]; then
+ echo "Error executing make_standalone_toolchain.py for ${TOOLCHAIN_ARCH}"
+ exit 1
+ fi
+
+ export PATH=${TOOLCHAIN_DIR}/bin:${PATH}
+
+ ANDROID_NDK=${ANDROID_NDK_ROOT} \
+ PATH=${SOURCE_TOOLCHAIN_DIR}/bin:${PATH} \
+ ./Configure ${CONFIGURE_ARCH} \
+ -D__ANDROID_API__=${ANDROID_API_VERSION} \
+ -D_FILE_OFFSET_BITS=${OFFSET_BITS} \
+ ${OPENSSL_CONFIGURE_OPTIONS} \
+ --sysroot=${TOOLCHAIN_DIR}/sysroot
+
+ if [[ $? -ne 0 ]]; then
+ echo "Error executing:./Configure ${CONFIGURE_ARCH} ${OPENSSL_CONFIGURE_OPTIONS}"
+ exit 1
+ fi
+
+ make clean
+ make build_libs
+
+ if [[ $? -ne 0 ]]; then
+ echo "Error executing make for platform:${SQLCIPHER_TARGET_PLATFORM}"
+ exit 1
+ fi
+ mv libcrypto.a ${ANDROID_LIB_ROOT}/${SQLCIPHER_TARGET_PLATFORM}
+ done
+)
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/build.gradle b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/build.gradle
new file mode 100644
index 0000000..9a35180
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/build.gradle
@@ -0,0 +1,49 @@
+apply plugin: "com.android.library"
+apply plugin: "org.ec4j.editorconfig"
+apply from: "native.gradle"
+apply from: "maven.gradle"
+
+android {
+
+ compileSdkVersion "${compileAndroidSdkVersion}" as Integer
+
+ defaultConfig {
+ versionName "${clientVersionNumber}"
+ minSdkVersion "${minimumAndroidSdkVersion}"
+ targetSdkVersion "${targetAndroidSdkVersion}"
+ versionCode 1
+ versionName "${clientVersionNumber}"
+ archivesBaseName = "${archivesBaseName}-${versionName}"
+ }
+
+ editorconfig {
+ includes = ["src/**", "*.gradle"]
+ excludes = ["src/main/external/sqlcipher/**", "src/main/external/openssl-*/**"]
+ }
+
+ buildTypes {
+ debug {
+ debuggable true
+ }
+ release {
+ debuggable false
+ minifyEnabled false
+ }
+ }
+
+ sourceSets {
+ main {
+ jniLibs.srcDirs "${rootProject.ext.nativeRootOutputDir}/libs"
+ }
+ }
+
+ dependencies {
+ implementation "androidx.sqlite:sqlite:2.0.1"
+ }
+
+ clean.dependsOn cleanNative
+ check.dependsOn editorconfigCheck
+ buildNative.mustRunAfter buildAmalgamation
+ buildAmalgamation.mustRunAfter buildOpenSSL
+ preBuild.dependsOn([buildOpenSSL, buildAmalgamation, buildNative])
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/maven.gradle b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/maven.gradle
new file mode 100644
index 0000000..3d72a2c
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/maven.gradle
@@ -0,0 +1,100 @@
+apply plugin: "maven"
+apply plugin: "signing"
+import org.gradle.plugins.signing.Sign
+
+def isReleaseBuild() {
+ return mavenVersionName.contains("SNAPSHOT") == false
+}
+
+def getReleaseRepositoryUrl() {
+ return hasProperty('mavenReleaseRepositoryUrl') ? mavenReleaseRepositoryUrl
+ : "https://oss.sonatype.org/service/local/staging/deploy/maven2/"
+}
+
+def getSnapshotRepositoryUrl() {
+ if(hasProperty('mavenLocalRepositoryPrefix')) {
+ return "${mavenLocalRepositoryPrefix}${buildDir}/${mavenSnapshotRepositoryUrl}"
+ } else {
+ return hasProperty('mavenSnapshotRepositoryUrl') ? mavenSnapshotRepositoryUrl
+ : "https://oss.sonatype.org/content/repositories/snapshots/"
+ }
+}
+
+def getRepositoryUsername() {
+ return hasProperty('nexusUsername') ? nexusUsername : ""
+}
+
+def getRepositoryPassword() {
+ return hasProperty('nexusPassword') ? nexusPassword : ""
+}
+
+gradle.taskGraph.whenReady { taskGraph ->
+ if (taskGraph.allTasks.any { it instanceof Sign }) {
+ allprojects { ext."signing.keyId" = "${signingKeyId}" }
+ allprojects { ext."signing.secretKeyRingFile" = "${signingKeyRingFile}" }
+ allprojects { ext."signing.password" = "${signingKeyPassword}" }
+ }
+}
+
+afterEvaluate { project ->
+ uploadArchives {
+ repositories {
+ mavenDeployer {
+ beforeDeployment { MavenDeployment deployment -> signing.signPom(deployment) }
+
+ pom.groupId = mavenGroup
+ pom.artifactId = mavenArtifactId
+ pom.version = mavenVersionName
+
+ repository(url: getReleaseRepositoryUrl()) {
+ authentication(userName: getRepositoryUsername(), password: getRepositoryPassword())
+ }
+ snapshotRepository(url: getSnapshotRepositoryUrl()) {
+ authentication(userName: getRepositoryUsername(), password: getRepositoryPassword())
+ }
+
+ pom.project {
+ name mavenArtifactId
+ packaging mavenPackaging
+ description mavenPomDescription
+ url mavenPomUrl
+
+ scm {
+ url mavenScmUrl
+ connection mavenScmConnection
+ developerConnection mavenScmDeveloperConnection
+ }
+
+ licenses {
+ license {
+ url mavenLicenseUrl
+ }
+ }
+
+ developers {
+ developer {
+ name mavenDeveloperName
+ email mavenDeveloperEmail
+ organization mavenDeveloperOrganization
+ organizationUrl mavenDeveloperUrl
+ }
+ }
+ }
+ }
+ }
+ }
+
+ signing {
+ required { isReleaseBuild() && gradle.taskGraph.hasTask("uploadArchives") }
+ sign configurations.archives
+ }
+
+ task androidSourcesJar(type: Jar) {
+ classifier = "sources"
+ from android.sourceSets.main.java.sourceFiles
+ }
+
+ artifacts {
+ archives androidSourcesJar
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/native.gradle b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/native.gradle
new file mode 100644
index 0000000..b6b33db
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/native.gradle
@@ -0,0 +1,152 @@
+import org.gradle.internal.logging.text.StyledTextOutputFactory
+import static org.gradle.internal.logging.text.StyledTextOutput.Style
+
+task buildOpenSSL() {
+ onlyIf {
+ def armNativeFile = new File("${androidNativeRootDir}/armeabi/libcrypto.a")
+ if (armNativeFile.exists()) {
+ def out = services.get(StyledTextOutputFactory).create("")
+ out.style(Style.Normal).text("${androidNativeRootDir}/armeabi/libcrypto.a exists").style(Style.Info).println(' SKIPPED')
+ }
+ return !armNativeFile.exists()
+ }
+ doLast {
+ exec {
+ workingDir "${projectDir}"
+ commandLine "./build-openssl-libraries.sh",
+ "${minimumAndroidSdkVersion}",
+ "${minimumAndroid64BitSdkVersion}",
+ "${opensslVersion}"
+ }
+ }
+}
+
+task buildAmalgamation() {
+ onlyIf {
+ def amalgamation = new File("${projectDir}/src/main/external/sqlcipher/sqlite3.c")
+ return !amalgamation.exists()
+ }
+ doLast {
+ exec {
+ workingDir "${projectDir}/src/main/external/sqlcipher"
+ environment("CFLAGS", "${sqlcipherCFlags}")
+ commandLine "./configure", "--enable-tempstore=yes", "--with-crypto-lib=none"
+ }
+ exec {
+ workingDir "${projectDir}/src/main/external/sqlcipher"
+ environment("CFLAGS", "${sqlcipherCFlags}")
+ commandLine "make", "sqlite3.c"
+ }
+ }
+}
+
+task buildNative() {
+ description "Build the native SQLCipher binaries"
+ doLast {
+ executeNdkBuild(
+ "${nativeRootOutputDir}/libs32",
+ file("src/main/cpp").absolutePath,
+ file("src/main/cpp/Application32.mk").absolutePath,
+ "${sqlcipherCFlags}", "${otherSqlcipherCFlags}",
+ "${minimumAndroidSdkVersion}")
+ executeNdkBuild(
+ "${nativeRootOutputDir}/libs64",
+ file("src/main/cpp").absolutePath,
+ file("src/main/cpp/Application64.mk").absolutePath,
+ "${sqlcipherCFlags}", "${otherSqlcipherCFlags}",
+ "${minimumAndroid64BitSdkVersion}")
+ exec {
+ workingDir "${nativeRootOutputDir}"
+ commandLine "mkdir", "-p", "libs"
+ }
+ copy {
+ from fileTree("${nativeRootOutputDir}/libs32").include("*/*")
+ into "${nativeRootOutputDir}/libs"
+ from fileTree("${nativeRootOutputDir}/libs64").include("*/*")
+ into "${nativeRootOutputDir}/libs"
+ }
+ }
+}
+
+task cleanOpenSSL() {
+ description "Clean the OpenSSL source"
+ doLast {
+ logger.info "Cleaning OpenSSL source"
+ File file = new File("${opensslDir}")
+ if (file.exists()) {
+ file.deleteDir()
+ }
+ }
+}
+
+task cleanSQLCipher() {
+ description "Clean the SQLCipher source"
+ doLast {
+ logger.info "Cleaning SQLCipher source"
+ gitClean("${sqlcipherDir}")
+ File amalgamationSource = new File("${sqlcipherDir}/sqlite3.c")
+ File amalgamationHeader = new File("${sqlcipherDir}/sqlite3.h")
+ if (amalgamationSource.exists()) amalgamationSource.delete()
+ if (amalgamationHeader.exists()) amalgamationHeader.delete()
+ }
+}
+
+task cleanNative() {
+ description "Clean the native (JNI) build artifacts"
+ doLast {
+ logger.info "Cleaning native build artifacts"
+ ["libs", "libs32", "libs64", "obj"].each {
+ File file = new File("${projectDir}/src/main/${it}")
+ if (file.exists()) {
+ file.deleteDir()
+ }
+ }
+ }
+}
+
+task distclean(dependsOn: [clean, cleanSQLCipher, cleanOpenSSL]) {
+ description "Clean build, SQLCipher, and OpenSSL artifacts"
+ doLast {
+ new File("${androidNativeRootDir}/").deleteDir()
+ }
+}
+
+def gitClean(directory) {
+ logger.info "Cleaning directory:${directory}"
+ exec {
+ workingDir "${directory}"
+ commandLine "git", "checkout", "-f"
+ }
+ exec {
+ workingDir "${directory}"
+ commandLine "git", "clean", "-d", "-f"
+ }
+}
+
+def executeNdkBuild(outputDir, androidMkDirectory, applicationMkFile,
+ cflags, otherSqlcipherCFlags, androidVersion) {
+ logger.info "Executing NDK build command"
+
+ def out = services.get(StyledTextOutputFactory).create("")
+ out.style(Style.Normal).text("SQLCIPHER_CFLAGS=").style(Style.Info).println("${cflags}")
+ out.style(Style.Normal).text("OPENSSL_DIR=").style(Style.Info).println("${opensslDir}")
+ out.style(Style.Normal).text("SQLCIPHER_DIR=").style(Style.Info).println("${sqlcipherDir}")
+ out.style(Style.Normal).text("SQLCIPHER_OTHER_CFLAGS=").style(Style.Info).println("${otherSqlcipherCFlags}")
+ out.style(Style.Normal).text("ANDROID_NATIVE_ROOT_DIR=").style(Style.Info).println("${androidNativeRootDir}")
+ out.style(Style.Normal).text("NDK_APP_PLATFORM=").style(Style.Info).println("${androidVersion}")
+
+ exec {
+ def outputDirectory = "NDK_LIBS_OUT=${outputDir}"
+ def applicationFile = "NDK_APPLICATION_MK=${applicationMkFile}"
+ def environmentVariables = ["SQLCIPHER_CFLAGS" : "${cflags}",
+ "OPENSSL_DIR" : "${opensslDir}",
+ "SQLCIPHER_DIR" : "${sqlcipherDir}",
+ "SQLCIPHER_OTHER_CFLAGS" : "${otherSqlcipherCFlags}",
+ "ANDROID_NATIVE_ROOT_DIR": "${androidNativeRootDir}",
+ "NDK_APP_PLATFORM" : "${androidVersion}"]
+ environment(environmentVariables)
+ commandLine "ndk-build", "${ndkBuildType}",
+ "--environment-overrides", outputDirectory,
+ "-C", androidMkDirectory, applicationFile
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/AndroidManifest.xml b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/AndroidManifest.xml
new file mode 100644
index 0000000..463aa00
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/AndroidManifest.xml
@@ -0,0 +1,4 @@
+
+
+
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/aidl/net/sqlcipher/IContentObserver.aidl b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/aidl/net/sqlcipher/IContentObserver.aidl
new file mode 100644
index 0000000..2285751
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/aidl/net/sqlcipher/IContentObserver.aidl
@@ -0,0 +1,31 @@
+/*
+**
+** Copyright 2007, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+package net.sqlcipher;
+
+/**
+ * @hide
+ */
+interface IContentObserver
+{
+ /**
+ * This method is called when an update occurs to the cursor that is being
+ * observed. selfUpdate is true if the update was caused by a call to
+ * commit on the cursor that is being observed.
+ */
+ oneway void onChange(boolean selfUpdate);
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/Android.mk b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/Android.mk
new file mode 100644
index 0000000..be699ca
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/Android.mk
@@ -0,0 +1,29 @@
+LOCAL_PATH := $(call my-dir)
+MY_PATH := $(LOCAL_PATH)
+include $(CLEAR_VARS)
+LOCAL_PATH := $(MY_PATH)
+
+SQLCIPHER_SRC := $(SQLCIPHER_DIR)/sqlite3.c
+LOCAL_CFLAGS += $(SQLCIPHER_CFLAGS) $(SQLCIPHER_OTHER_CFLAGS)
+LOCAL_C_INCLUDES := $(SQLCIPHER_DIR) $(LOCAL_PATH)
+LOCAL_LDLIBS := -llog -latomic
+LOCAL_LDFLAGS += -L$(ANDROID_NATIVE_ROOT_DIR)/$(TARGET_ARCH_ABI) -fuse-ld=bfd
+LOCAL_STATIC_LIBRARIES += static-libcrypto
+LOCAL_MODULE := libsqlcipher
+LOCAL_SRC_FILES := $(SQLCIPHER_SRC) \
+ jni_exception.cpp \
+ net_sqlcipher_database_SQLiteCompiledSql.cpp \
+ net_sqlcipher_database_SQLiteDatabase.cpp \
+ net_sqlcipher_database_SQLiteProgram.cpp \
+ net_sqlcipher_database_SQLiteQuery.cpp \
+ net_sqlcipher_database_SQLiteStatement.cpp \
+ net_sqlcipher_CursorWindow.cpp \
+ CursorWindow.cpp
+
+include $(BUILD_SHARED_LIBRARY)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := static-libcrypto
+LOCAL_EXPORT_C_INCLUDES := $(OPENSSL_DIR)/include
+LOCAL_SRC_FILES := $(ANDROID_NATIVE_ROOT_DIR)/$(TARGET_ARCH_ABI)/libcrypto.a
+include $(PREBUILT_STATIC_LIBRARY)
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/Application32.mk b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/Application32.mk
new file mode 100644
index 0000000..0ccf8cb
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/Application32.mk
@@ -0,0 +1,7 @@
+APP_PROJECT_PATH := $(shell pwd)
+APP_ABI := armeabi armeabi-v7a x86
+APP_PLATFORM := android-$(NDK_APP_PLATFORM)
+APP_BUILD_SCRIPT := $(APP_PROJECT_PATH)/Android.mk
+APP_STL := stlport_static
+APP_CFLAGS := -D_FILE_OFFSET_BITS=32
+APP_LDFLAGS += -Wl,--exclude-libs,ALL
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/Application64.mk b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/Application64.mk
new file mode 100644
index 0000000..b0a598c
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/Application64.mk
@@ -0,0 +1,7 @@
+APP_PROJECT_PATH := $(shell pwd)
+APP_ABI := x86_64 arm64-v8a
+APP_PLATFORM := android-$(NDK_APP_PLATFORM)
+APP_BUILD_SCRIPT := $(APP_PROJECT_PATH)/Android.mk
+APP_STL := stlport_static
+APP_CFLAGS := -D_FILE_OFFSET_BITS=64
+APP_LDFLAGS += -Wl,--exclude-libs,ALL
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/CursorWindow.cpp b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/CursorWindow.cpp
new file mode 100644
index 0000000..e58fb3c
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/CursorWindow.cpp
@@ -0,0 +1,406 @@
+/*
+ * Copyright (C) 2006-2007 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#undef LOG_TAG
+#define LOG_TAG "CursorWindow"
+
+#include
+#include
+#include
+#include
+#include
+#include "CursorWindow.h"
+
+namespace sqlcipher {
+
+CursorWindow::CursorWindow(size_t initialSize, size_t growthPaddingSize, size_t maxSize)
+{
+ mInitialSize = initialSize;
+ mGrowthPaddingSize = growthPaddingSize;
+ mMaxSize = maxSize;
+ LOG_WINDOW("CursorWindow::CursorWindow initialSize:%d growBySize:%d maxSize:%d\n",
+ initialSize, growthPaddingSize, maxSize);
+}
+
+bool CursorWindow::initBuffer(bool localOnly)
+{
+ void* data = malloc(mInitialSize);
+ if(data){
+ mData = (uint8_t *) data;
+ mHeader = (window_header_t *) mData;
+ mSize = mInitialSize;
+ clear();
+ LOG_WINDOW("Created CursorWindow with new MemoryDealer: mFreeOffset = %d, mSize = %d, mInitialSize = %d, mGrowthPaddingSize = %d, mMaxSize = %d, mData = %p\n",
+ mFreeOffset, mSize, mInitialSize, mGrowthPaddingSize, mMaxSize, mData);
+ return true;
+ }
+ return false;
+}
+
+CursorWindow::~CursorWindow()
+{
+ if(mData){
+ free(mData);
+ }
+}
+
+void CursorWindow::clear()
+{
+ mHeader->numRows = 0;
+ mHeader->numColumns = 0;
+ mFreeOffset = sizeof(window_header_t) + ROW_SLOT_CHUNK_SIZE;
+ // Mark the first chunk's next 'pointer' as null
+ *((uint32_t *)(mData + mFreeOffset - sizeof(uint32_t))) = 0;
+ mChunkNumToNextChunkOffset.clear();
+ mLastChunkPtrOffset = 0;
+}
+
+int32_t CursorWindow::freeSpace()
+{
+ int32_t freeSpace = mSize - mFreeOffset;
+ if (freeSpace < 0) {
+ freeSpace = 0;
+ }
+ return freeSpace;
+}
+
+field_slot_t * CursorWindow::allocRow()
+{
+ // Fill in the row slot
+ row_slot_t * rowSlot = allocRowSlot();
+ if (rowSlot == NULL) {
+ return NULL;
+ }
+
+ // Record the original offset of the rowSlot prior to allocation of the field directory
+ uint32_t rowSlotOffset = (uint8_t*)rowSlot - mData;
+
+ // Allocate the slots for the field directory
+ size_t fieldDirSize = mHeader->numColumns * sizeof(field_slot_t);
+ uint32_t fieldDirOffset = alloc(fieldDirSize);
+ if (!fieldDirOffset) {
+ mHeader->numRows--;
+ LOGE("The row failed, so back out the new row accounting from allocRowSlot %d", mHeader->numRows);
+ return NULL;
+ }
+ field_slot_t * fieldDir = (field_slot_t *)offsetToPtr(fieldDirOffset);
+ memset(fieldDir, 0x0, fieldDirSize);
+
+ // Reset the rowSlot pointer relative to mData
+ // If the last alloc relocated mData this will be rowSlot's new address, otherwise the value will not change
+ rowSlot = (row_slot_t*)(mData + rowSlotOffset);
+
+ LOG_WINDOW("Allocated row %u, rowSlot is at offset %u, fieldDir is %d bytes at offset %u\n", (mHeader->numRows - 1), ((uint8_t *)rowSlot) - mData, fieldDirSize, fieldDirOffset);
+ rowSlot->offset = fieldDirOffset;
+
+ return fieldDir;
+}
+
+uint32_t CursorWindow::alloc(size_t requestedSize, bool aligned)
+{
+ size_t size = 0, new_allocation_sz = 0;
+ uint32_t padding;
+ void *tempData = NULL;
+ if (aligned) {
+ // 4 byte alignment
+ padding = 4 - (mFreeOffset & 0x3);
+ } else {
+ padding = 0;
+ }
+ size = requestedSize + padding;
+ if (size > freeSpace()) {
+ new_allocation_sz = mSize + size - freeSpace() + mGrowthPaddingSize;
+ LOGE("need to grow: mSize = %d, size = %d, freeSpace() = %d, numRows = %d new_allocation_sz:%d\n",
+ mSize, size, freeSpace(), mHeader->numRows, new_allocation_sz);
+ if(mMaxSize == 0 || new_allocation_sz <= mMaxSize) {
+ tempData = realloc((void *)mData, new_allocation_sz);
+ if(tempData == NULL) return 0;
+ mData = (uint8_t *)tempData;
+ mHeader = (window_header_t *)mData;
+ LOGE("allocation grew to:%d", new_allocation_sz);
+ mSize = new_allocation_sz;
+ } else {
+ return 0;
+ }
+ }
+ uint32_t offset = mFreeOffset + padding;
+ mFreeOffset += size;
+ return offset;
+}
+
+row_slot_t * CursorWindow::getRowSlot(int row)
+{
+ LOG_WINDOW("getRowSlot entered: requesting row:%d, current row num:%d", row, mHeader->numRows);
+ unordered_map::iterator result;
+ int chunkNum = row / ROW_SLOT_CHUNK_NUM_ROWS;
+ int chunkPos = row % ROW_SLOT_CHUNK_NUM_ROWS;
+ int chunkPtrOffset = sizeof(window_header_t) + ROW_SLOT_CHUNK_SIZE - sizeof(uint32_t);
+ uint8_t * rowChunk = mData + sizeof(window_header_t);
+
+ // check for chunkNum in cache
+ result = mChunkNumToNextChunkOffset.find(chunkNum);
+ if(result != mChunkNumToNextChunkOffset.end()){
+ rowChunk = offsetToPtr(result->second);
+ LOG_WINDOW("Retrieved chunk offset from cache for row:%d", row);
+ return (row_slot_t *)(rowChunk + (chunkPos * sizeof(row_slot_t)));
+ }
+
+ // walk the list, this shouldn't occur
+ LOG_WINDOW("getRowSlot walking list %d times to find rowslot for row:%d", chunkNum, row);
+ for (int i = 0; i < chunkNum; i++) {
+ rowChunk = offsetToPtr(*((uint32_t *)(mData + chunkPtrOffset)));
+ chunkPtrOffset = rowChunk - mData + (ROW_SLOT_CHUNK_NUM_ROWS * sizeof(row_slot_t));
+ }
+ return (row_slot_t *)(rowChunk + (chunkPos * sizeof(row_slot_t)));
+ LOG_WINDOW("exit getRowSlot current row num %d, this row %d", mHeader->numRows, row);
+}
+
+row_slot_t * CursorWindow::allocRowSlot()
+{
+ int chunkNum = mHeader->numRows / ROW_SLOT_CHUNK_NUM_ROWS;
+ int chunkPos = mHeader->numRows % ROW_SLOT_CHUNK_NUM_ROWS;
+ int chunkPtrOffset = sizeof(window_header_t) + ROW_SLOT_CHUNK_SIZE - sizeof(uint32_t);
+ uint8_t * rowChunk = mData + sizeof(window_header_t);
+ LOG_WINDOW("allocRowSlot entered: Allocating row slot, mHeader->numRows is %d, chunkNum is %d, chunkPos is %d",
+ mHeader->numRows, chunkNum, chunkPos);
+
+ if(mLastChunkPtrOffset != 0){
+ chunkPtrOffset = mLastChunkPtrOffset;
+ }
+ if(chunkNum > 0) {
+ uint32_t nextChunkOffset = *((uint32_t *)(mData + chunkPtrOffset));
+ LOG_WINDOW("nextChunkOffset is %d", nextChunkOffset);
+ if (nextChunkOffset == 0) {
+ mLastChunkPtrOffset = chunkPtrOffset;
+ // Allocate a new row chunk
+ nextChunkOffset = alloc(ROW_SLOT_CHUNK_SIZE, true);
+ mChunkNumToNextChunkOffset.insert(make_pair(chunkNum, nextChunkOffset));
+ if (nextChunkOffset == 0) {
+ return NULL;
+ }
+ rowChunk = offsetToPtr(nextChunkOffset);
+ LOG_WINDOW("allocated new chunk at %d, rowChunk = %p", nextChunkOffset, rowChunk);
+ *((uint32_t *)(mData + chunkPtrOffset)) = rowChunk - mData;
+ // Mark the new chunk's next 'pointer' as null
+ *((uint32_t *)(rowChunk + ROW_SLOT_CHUNK_SIZE - sizeof(uint32_t))) = 0;
+ } else {
+ LOG_WINDOW("follwing 'pointer' to next chunk, offset of next pointer is %d", chunkPtrOffset);
+ rowChunk = offsetToPtr(nextChunkOffset);
+ chunkPtrOffset = rowChunk - mData + (ROW_SLOT_CHUNK_NUM_ROWS * sizeof(row_slot_t));
+ if(chunkPos == ROW_SLOT_CHUNK_NUM_ROWS - 1){
+ // prepare to allocate new rowslot_t now at end of row
+ mLastChunkPtrOffset = chunkPtrOffset;
+ }
+ }
+ }
+ mHeader->numRows++;
+ return (row_slot_t *)(rowChunk + (chunkPos * sizeof(row_slot_t)));
+}
+
+field_slot_t * CursorWindow::getFieldSlotWithCheck(int row, int column)
+{
+ LOG_WINDOW("getFieldSlotWithCheck entered: row:%d column:%d", row, column);
+ if (row < 0 || row >= mHeader->numRows || column < 0 || column >= mHeader->numColumns) {
+ LOGE("Bad request for field slot %d,%d. numRows = %d, numColumns = %d", row, column, mHeader->numRows, mHeader->numColumns);
+ return NULL;
+ }
+ row_slot_t * rowSlot = getRowSlot(row);
+ if (!rowSlot) {
+ LOGE("Failed to find rowSlot for row %d", row);
+ return NULL;
+ }
+ if (rowSlot->offset == 0 || rowSlot->offset >= mSize) {
+ LOGE("Invalid rowSlot, offset = %d", rowSlot->offset);
+ return NULL;
+ }
+ int fieldDirOffset = rowSlot->offset;
+ return ((field_slot_t *)offsetToPtr(fieldDirOffset)) + column;
+}
+
+uint32_t CursorWindow::read_field_slot(int row, int column, field_slot_t * slotOut)
+{
+ LOG_WINDOW("read_field_slot entered: row:%d, column:%d, slotOut:%p", row, column, slotOut);
+ if (row < 0 || row >= mHeader->numRows || column < 0 || column >= mHeader->numColumns) {
+ LOGE("Bad request for field slot %d,%d. numRows = %d, numColumns = %d", row, column, mHeader->numRows, mHeader->numColumns);
+ return -1;
+ }
+ row_slot_t * rowSlot = getRowSlot(row);
+ if (!rowSlot) {
+ LOGE("Failed to find rowSlot for row %d", row);
+ return -1;
+ }
+ if (rowSlot->offset == 0 || rowSlot->offset >= mSize) {
+ LOGE("Invalid rowSlot, offset = %d", rowSlot->offset);
+ return -1;
+ }
+ LOG_WINDOW("Found field directory for %d,%d at rowSlot %d, offset %d", row, column, (uint8_t *)rowSlot - mData, rowSlot->offset);
+ field_slot_t * fieldDir = (field_slot_t *)offsetToPtr(rowSlot->offset);
+ LOG_WINDOW("Read field_slot_t %d,%d: offset = %d, size = %d, type = %d", row, column, fieldDir[column].data.buffer.offset, fieldDir[column].data.buffer.size, fieldDir[column].type);
+
+ // Copy the data to the out param
+ slotOut->data.buffer.offset = fieldDir[column].data.buffer.offset;
+ slotOut->data.buffer.size = fieldDir[column].data.buffer.size;
+ slotOut->type = fieldDir[column].type;
+ return 0;
+}
+
+void CursorWindow::copyIn(uint32_t offset, uint8_t const * data, size_t size)
+{
+ assert(offset + size <= mSize);
+ memcpy(mData + offset, data, size);
+}
+
+void CursorWindow::copyIn(uint32_t offset, int64_t data)
+{
+ assert(offset + sizeof(int64_t) <= mSize);
+ memcpy(mData + offset, (uint8_t *)&data, sizeof(int64_t));
+}
+
+void CursorWindow::copyIn(uint32_t offset, double data)
+{
+ assert(offset + sizeof(double) <= mSize);
+ memcpy(mData + offset, (uint8_t *)&data, sizeof(double));
+}
+
+void CursorWindow::copyOut(uint32_t offset, uint8_t * data, size_t size)
+{
+ assert(offset + size <= mSize);
+ memcpy(data, mData + offset, size);
+}
+
+int64_t CursorWindow::copyOutLong(uint32_t offset)
+{
+ int64_t value;
+ assert(offset + sizeof(int64_t) <= mSize);
+ memcpy(&value, mData + offset, sizeof(int64_t));
+ return value;
+}
+
+double CursorWindow::copyOutDouble(uint32_t offset)
+{
+ double value;
+ assert(offset + sizeof(double) <= mSize);
+ memcpy(&value, mData + offset, sizeof(double));
+ return value;
+}
+
+bool CursorWindow::putLong(unsigned int row, unsigned int col, int64_t value)
+{
+ field_slot_t * fieldSlot = getFieldSlotWithCheck(row, col);
+ if (!fieldSlot) {
+ return false;
+ }
+
+#if WINDOW_STORAGE_INLINE_NUMERICS
+ fieldSlot->data.l = value;
+#else
+ int offset = alloc(sizeof(int64_t));
+ if (!offset) {
+ return false;
+ }
+
+ copyIn(offset, value);
+
+ fieldSlot->data.buffer.offset = offset;
+ fieldSlot->data.buffer.size = sizeof(int64_t);
+#endif
+ fieldSlot->type = FIELD_TYPE_INTEGER;
+ return true;
+}
+
+bool CursorWindow::putDouble(unsigned int row, unsigned int col, double value)
+{
+ field_slot_t * fieldSlot = getFieldSlotWithCheck(row, col);
+ if (!fieldSlot) {
+ return false;
+ }
+
+#if WINDOW_STORAGE_INLINE_NUMERICS
+ fieldSlot->data.d = value;
+#else
+ int offset = alloc(sizeof(int64_t));
+ if (!offset) {
+ return false;
+ }
+
+ copyIn(offset, value);
+
+ fieldSlot->data.buffer.offset = offset;
+ fieldSlot->data.buffer.size = sizeof(double);
+#endif
+ fieldSlot->type = FIELD_TYPE_FLOAT;
+ return true;
+}
+
+bool CursorWindow::putNull(unsigned int row, unsigned int col)
+{
+ field_slot_t * fieldSlot = getFieldSlotWithCheck(row, col);
+ if (!fieldSlot) {
+ return false;
+ }
+
+ fieldSlot->type = FIELD_TYPE_NULL;
+ fieldSlot->data.buffer.offset = 0;
+ fieldSlot->data.buffer.size = 0;
+ return true;
+}
+
+bool CursorWindow::getLong(unsigned int row, unsigned int col, int64_t * valueOut)
+{
+ field_slot_t * fieldSlot = getFieldSlotWithCheck(row, col);
+ if (!fieldSlot || fieldSlot->type != FIELD_TYPE_INTEGER) {
+ return false;
+ }
+
+#if WINDOW_STORAGE_INLINE_NUMERICS
+ *valueOut = fieldSlot->data.l;
+#else
+ *valueOut = copyOutLong(fieldSlot->data.buffer.offset);
+#endif
+ return true;
+}
+
+bool CursorWindow::getDouble(unsigned int row, unsigned int col, double * valueOut)
+{
+ field_slot_t * fieldSlot = getFieldSlotWithCheck(row, col);
+ if (!fieldSlot || fieldSlot->type != FIELD_TYPE_FLOAT) {
+ return false;
+ }
+
+#if WINDOW_STORAGE_INLINE_NUMERICS
+ *valueOut = fieldSlot->data.d;
+#else
+ *valueOut = copyOutDouble(fieldSlot->data.buffer.offset);
+#endif
+ return true;
+}
+
+bool CursorWindow::getNull(unsigned int row, unsigned int col, bool * valueOut)
+{
+ field_slot_t * fieldSlot = getFieldSlotWithCheck(row, col);
+ if (!fieldSlot) {
+ return false;
+ }
+
+ if (fieldSlot->type != FIELD_TYPE_NULL) {
+ *valueOut = false;
+ } else {
+ *valueOut = true;
+ }
+ return true;
+}
+
+}; // namespace sqlcipher
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/CursorWindow.h b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/CursorWindow.h
new file mode 100644
index 0000000..67ece39
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/CursorWindow.h
@@ -0,0 +1,202 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _ANDROID__DATABASE_WINDOW_H
+#define _ANDROID__DATABASE_WINDOW_H
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include "log.h"
+#include
+
+#define ROW_SLOT_CHUNK_NUM_ROWS 128
+#define INITIAL_WINDOW_SIZE (1024 * 1024)
+#define GROW_WINDOW_SIZE_EXTRA INITIAL_WINDOW_SIZE
+#define WINDOW_ALLOCATION_UNBOUNDED 0
+
+// Row slots are allocated in chunks of ROW_SLOT_CHUNK_NUM_ROWS,
+// with an offset after the rows that points to the next chunk
+#define ROW_SLOT_CHUNK_SIZE ((ROW_SLOT_CHUNK_NUM_ROWS * sizeof(row_slot_t)) + sizeof(uint32_t))
+
+#if LOG_NDEBUG
+
+#define IF_LOG_WINDOW() if (false)
+#define LOG_WINDOW(...)
+
+#else
+
+#define IF_LOG_WINDOW() IF_LOG(LOG_DEBUG, "CursorWindow")
+#define LOG_WINDOW(...) LOG(LOG_DEBUG, "CursorWindow", __VA_ARGS__)
+
+#endif
+
+// When defined to true strings are stored as UTF8, otherwise they're UTF16
+#define WINDOW_STORAGE_UTF8 0
+
+// When defined to true numberic values are stored inline in the field_slot_t,
+// otherwise they're allocated in the window
+#define WINDOW_STORAGE_INLINE_NUMERICS 1
+
+using std::make_pair;
+using std::tr1::unordered_map;
+
+namespace sqlcipher {
+
+typedef struct
+{
+ uint32_t numRows;
+ uint32_t numColumns;
+} window_header_t;
+
+typedef struct
+{
+ uint32_t offset;
+} row_slot_t;
+
+typedef struct
+{
+ uint8_t type;
+ union {
+ double d;
+ int64_t l;
+ struct {
+ uint32_t offset;
+ uint32_t size;
+ } buffer;
+ } data;
+} __attribute__((packed)) field_slot_t;
+
+#define FIELD_TYPE_INTEGER 1
+#define FIELD_TYPE_FLOAT 2
+#define FIELD_TYPE_STRING 3
+#define FIELD_TYPE_BLOB 4
+#define FIELD_TYPE_NULL 0
+
+/**
+ * This class stores a set of rows from a database in a buffer. The begining of the
+ * window has first chunk of row_slot_ts, which are offsets to the row directory, followed by
+ * an offset to the next chunk in a linked-list of additional chunk of row_slot_ts in case
+ * the pre-allocated chunk isn't big enough to refer to all rows. Each row directory has a
+ * field_slot_t per column, which has the size, offset, and type of the data for that field.
+ * Note that the data types come from sqlite3.h.
+ */
+class CursorWindow
+{
+public:
+ CursorWindow(size_t initialSize, size_t growthPaddingSize, size_t maxSize);
+ CursorWindow(){}
+ ~CursorWindow();
+
+ bool initBuffer(bool localOnly);
+ size_t size() {return mSize;}
+ uint8_t * data() {return mData;}
+ uint32_t getNumRows() {return mHeader->numRows;}
+ uint32_t getNumColumns() {return mHeader->numColumns;}
+ void freeLastRow() {
+ if (mHeader->numRows > 0) {
+ mHeader->numRows--;
+ }
+ }
+ bool setNumColumns(uint32_t numColumns)
+ {
+ uint32_t cur = mHeader->numColumns;
+ if (cur > 0 && cur != numColumns) {
+ LOGE("Trying to go from %d columns to %d", cur, numColumns);
+ return false;
+ }
+ mHeader->numColumns = numColumns;
+ return true;
+ }
+
+ int32_t freeSpace();
+
+ void clear();
+
+ /**
+ * Allocate a row slot and its directory. The returned
+ * pointer points to the begining of the row's directory
+ * or NULL if there wasn't room. The directory is
+ * initialied with NULL entries for each field.
+ */
+ field_slot_t * allocRow();
+
+ /**
+ * Allocate a portion of the window. Returns the offset
+ * of the allocation, or 0 if there isn't enough space.
+ * If aligned is true, the allocation gets 4 byte alignment.
+ */
+ uint32_t alloc(size_t size, bool aligned = false);
+
+ uint32_t read_field_slot(int row, int column, field_slot_t * slot);
+
+ /**
+ * Copy data into the window at the given offset.
+ */
+ void copyIn(uint32_t offset, uint8_t const * data, size_t size);
+ void copyIn(uint32_t offset, int64_t data);
+ void copyIn(uint32_t offset, double data);
+
+ void copyOut(uint32_t offset, uint8_t * data, size_t size);
+ int64_t copyOutLong(uint32_t offset);
+ double copyOutDouble(uint32_t offset);
+
+ bool putLong(unsigned int row, unsigned int col, int64_t value);
+ bool putDouble(unsigned int row, unsigned int col, double value);
+ bool putNull(unsigned int row, unsigned int col);
+
+ bool getLong(unsigned int row, unsigned int col, int64_t * valueOut);
+ bool getDouble(unsigned int row, unsigned int col, double * valueOut);
+ bool getNull(unsigned int row, unsigned int col, bool * valueOut);
+
+ uint8_t * offsetToPtr(uint32_t offset) {return mData + offset;}
+
+ row_slot_t * allocRowSlot();
+
+ row_slot_t * getRowSlot(int row);
+
+ /**
+ * return NULL if Failed to find rowSlot or
+ * Invalid rowSlot
+ */
+ field_slot_t * getFieldSlotWithCheck(int row, int column);
+ field_slot_t * getFieldSlot(int row, int column)
+ {
+ int fieldDirOffset = getRowSlot(row)->offset;
+ return ((field_slot_t *)offsetToPtr(fieldDirOffset)) + column;
+ }
+
+private:
+ uint8_t * mData;
+ size_t mSize;
+ size_t mInitialSize;
+ size_t mGrowthPaddingSize;
+ size_t mMaxSize;
+ window_header_t * mHeader;
+ /**
+ * Offset of the lowest unused data byte in the array.
+ */
+ uint32_t mFreeOffset;
+ unordered_map mChunkNumToNextChunkOffset;
+ int mLastChunkPtrOffset;
+};
+
+}; // namespace sqlcipher
+
+#endif
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/jni_elements.h b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/jni_elements.h
new file mode 100644
index 0000000..32367ea
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/jni_elements.h
@@ -0,0 +1,3 @@
+#ifndef NELEM
+# define NELEM(x) ((int) (sizeof(x) / sizeof((x)[0])))
+#endif
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/jni_exception.cpp b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/jni_exception.cpp
new file mode 100644
index 0000000..41a91ff
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/jni_exception.cpp
@@ -0,0 +1,8 @@
+#include
+#include "jni_exception.h"
+
+void jniThrowException(JNIEnv* env, const char* exceptionClass, const char* sqlite3Message) {
+ jclass exClass;
+ exClass = env->FindClass(exceptionClass);
+ env->ThrowNew(exClass, sqlite3Message);
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/jni_exception.h b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/jni_exception.h
new file mode 100644
index 0000000..2c66be5
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/jni_exception.h
@@ -0,0 +1,6 @@
+#include
+
+#ifndef _JNI_EXCEPTION_H
+#define _JNI_EXCEPTION_H
+void jniThrowException(JNIEnv* env, const char* exceptionClass, const char* sqlite3Message);
+#endif
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/log.h b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/log.h
new file mode 100644
index 0000000..f4da968
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/log.h
@@ -0,0 +1,44 @@
+#include
+
+#ifdef LOG_NDEBUG
+#define LOGI(...)
+#define LOGE(...)
+#define LOGV(...)
+#define LOGD(...)
+#else
+#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
+#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
+#define LOGV(...) __android_log_print(ANDROID_LOG_VERBOSE,LOG_TAG,__VA_ARGS__)
+#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)
+#endif
+
+#ifndef LOG
+#define LOG(priority, tag, ...) \
+ LOG_PRI(ANDROID_##priority, tag, __VA_ARGS__)
+#endif
+
+#ifndef LOG_PRI
+#define LOG_PRI(priority, tag, ...) \
+ __android_log_print(priority, tag, __VA_ARGS__)
+#endif
+
+#ifndef LOG_ASSERT
+#define LOG_ASSERT(cond, ...) LOG_FATAL_IF(!(cond), ## __VA_ARGS__)
+#endif
+
+#ifndef LOG_FATAL_IF
+#define LOG_FATAL_IF(cond, ...) LOG_ALWAYS_FATAL_IF(cond, ## __VA_ARGS__)
+#endif
+
+#ifndef LOG_ALWAYS_FATAL_IF
+#define LOG_ALWAYS_FATAL_IF(cond, ...) \
+ ( (CONDITION(cond)) \
+ ? ((void)android_printAssert(#cond, LOG_TAG, ## __VA_ARGS__)) \
+ : (void)0 )
+#endif
+
+#ifndef CONDITION
+#define CONDITION(cond) (__builtin_expect((cond)!=0, 0))
+#endif
+
+#define android_printAssert(a, b, ...) printf("%s: ", __VA_ARGS__)
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/net_sqlcipher_CursorWindow.cpp b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/net_sqlcipher_CursorWindow.cpp
new file mode 100644
index 0000000..cd9e408
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/net_sqlcipher_CursorWindow.cpp
@@ -0,0 +1,680 @@
+/*
+ * Copyright (C) 2007 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#undef LOG_TAG
+#define LOG_TAG "CursorWindow"
+
+#include
+#include
+#include
+#include
+#include
+
+#include "CursorWindow.h"
+#include "jni_elements.h"
+#include "jni_exception.h"
+#include "sqlite3_exception.h"
+
+#include
+#include
+
+#include
+#include
+
+namespace sqlcipher {
+
+ static jfieldID gWindowField;
+ static jfieldID gBufferField;
+ static jfieldID gSizeCopiedField;
+
+#define GET_WINDOW(env, object) ((CursorWindow *)env->GetLongField(object, gWindowField))
+#define SET_WINDOW(env, object, window) (env->SetLongField(object, gWindowField,(intptr_t)window))
+#define SET_BUFFER(env, object, buf) (env->SetObjectField(object, gBufferField, buf))
+#define SET_SIZE_COPIED(env, object, size) (env->SetIntField(object, gSizeCopiedField, size))
+
+ CursorWindow * get_window_from_object(JNIEnv * env, jobject javaWindow)
+ {
+ return GET_WINDOW(env, javaWindow);
+ }
+
+ static void native_init_empty(JNIEnv * env, jobject object,
+ jboolean localOnly, jlong initialSize,
+ jlong growthPaddingSize, jlong maxSize)
+ {
+ uint8_t * data;
+ size_t size;
+ CursorWindow * window;
+
+ window = new CursorWindow(initialSize, growthPaddingSize, maxSize);
+ if (!window) {
+ jniThrowException(env, "java/lang/RuntimeException", "No memory for native window object");
+ return;
+ }
+
+ if (!window->initBuffer(localOnly)) {
+ jniThrowException(env, "java/lang/IllegalStateException", "Couldn't init cursor window");
+ delete window;
+ return;
+ }
+ LOG_WINDOW("native_init_empty: window = %p", window);
+ SET_WINDOW(env, object, window);
+ }
+
+ static void native_clear(JNIEnv * env, jobject object)
+ {
+ CursorWindow * window = GET_WINDOW(env, object);
+ LOG_WINDOW("Clearing window %p", window);
+ if (window == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException", "clear() called after close()");
+ return;
+ }
+ window->clear();
+ }
+
+ static void native_close(JNIEnv * env, jobject object)
+ {
+ CursorWindow * window = GET_WINDOW(env, object);
+ if (window) {
+ LOG_WINDOW("Closing window %p", window);
+ delete window;
+ SET_WINDOW(env, object, 0);
+ }
+ }
+
+ static void throwExceptionWithRowCol(JNIEnv * env, jint row, jint column)
+ {
+ char buf[100];
+ snprintf(buf, sizeof(buf), "get field slot from row %d col %d failed", row, column);
+ jniThrowException(env, "net/sqlcipher/InvalidRowColumnException", buf);
+ }
+
+ static void throwUnknowTypeException(JNIEnv * env, jint type)
+ {
+ char buf[80];
+ snprintf(buf, sizeof(buf), "UNKNOWN type %d", type);
+ jniThrowException(env, "net/sqlcipher/UnknownTypeException", buf);
+ }
+
+ static jlong getLong_native(JNIEnv * env, jobject object, jint row, jint column)
+ {
+ int32_t err;
+ CursorWindow * window = GET_WINDOW(env, object);
+ LOG_WINDOW("Getting long for %d,%d from %p", row, column, window);
+
+ field_slot_t field;
+ err = window->read_field_slot(row, column, &field);
+ if (err != 0) {
+ throwExceptionWithRowCol(env, row, column);
+ return 0;
+ }
+
+ uint8_t type = field.type;
+ if (type == FIELD_TYPE_INTEGER) {
+ int64_t value;
+ if (window->getLong(row, column, &value)) {
+ return value;
+ }
+ return 0;
+ } else if (type == FIELD_TYPE_STRING) {
+ uint32_t size = field.data.buffer.size;
+ if (size > 0) {
+ long long int result;
+ jstring data = env->NewString((const jchar*)window->offsetToPtr(field.data.buffer.offset), (jsize)size / sizeof(jchar));
+ const char* utf8data = env->GetStringUTFChars(data, NULL);
+ result = strtoll(utf8data, NULL, 0);
+ if(utf8data) env->ReleaseStringUTFChars(data, utf8data);
+ if(data) env->DeleteLocalRef(data);
+ return result;
+ } else {
+ return 0;
+ }
+ } else if (type == FIELD_TYPE_FLOAT) {
+ double value;
+ if (window->getDouble(row, column, &value)) {
+ return value;
+ }
+ return 0;
+ } else if (type == FIELD_TYPE_NULL) {
+ return 0;
+ } else if (type == FIELD_TYPE_BLOB) {
+ throw_sqlite3_exception(env, "Unable to convert BLOB to long");
+ return 0;
+ } else {
+ throwUnknowTypeException(env, type);
+ return 0;
+ }
+ }
+
+ static jbyteArray getBlob_native(JNIEnv* env, jobject object, jint row, jint column)
+ {
+ int32_t err;
+ CursorWindow * window = GET_WINDOW(env, object);
+ LOG_WINDOW("Getting blob for %d,%d from %p", row, column, window);
+
+ field_slot_t field;
+ err = window->read_field_slot(row, column, &field);
+ if (err != 0) {
+ throwExceptionWithRowCol(env, row, column);
+ return NULL;
+ }
+
+ uint8_t type = field.type;
+ if (type == FIELD_TYPE_BLOB || type == FIELD_TYPE_STRING) {
+ jbyteArray byteArray = env->NewByteArray(field.data.buffer.size);
+ if(byteArray == NULL) return NULL;
+ env->SetByteArrayRegion(byteArray, 0, field.data.buffer.size,
+ (const jbyte*)window->offsetToPtr(field.data.buffer.offset));
+ return byteArray;
+ } else if (type == FIELD_TYPE_INTEGER) {
+ throw_sqlite3_exception(env, "INTEGER data in getBlob_native ");
+ } else if (type == FIELD_TYPE_FLOAT) {
+ throw_sqlite3_exception(env, "FLOAT data in getBlob_native ");
+ } else if (type == FIELD_TYPE_NULL) {
+ // do nothing
+ } else {
+ throwUnknowTypeException(env, type);
+ }
+ return NULL;
+ }
+
+ static jboolean isBlob_native(JNIEnv* env, jobject object, jint row, jint column)
+ {
+ int32_t err;
+ CursorWindow * window = GET_WINDOW(env, object);
+ LOG_WINDOW("Checking if column is a blob or null for %d,%d from %p", row, column, window);
+ field_slot_t field;
+ err = window->read_field_slot(row, column, &field);
+ if (err != 0) {
+ throwExceptionWithRowCol(env, row, column);
+ return false;
+ }
+ return field.type == FIELD_TYPE_BLOB || field.type == FIELD_TYPE_NULL;
+ }
+
+ static jboolean isString_native(JNIEnv* env, jobject object, jint row, jint column)
+ {
+ int32_t err;
+ CursorWindow * window = GET_WINDOW(env, object);
+ LOG_WINDOW("Checking if column is a string or null for %d,%d from %p", row, column, window);
+ field_slot_t field;
+ err = window->read_field_slot(row, column, &field);
+ if (err != 0) {
+ throwExceptionWithRowCol(env, row, column);
+ return false;
+ }
+ return field.type == FIELD_TYPE_STRING || field.type == FIELD_TYPE_NULL;
+ }
+
+ static jboolean isInteger_native(JNIEnv* env, jobject object, jint row, jint column)
+ {
+ int32_t err;
+ CursorWindow * window = GET_WINDOW(env, object);
+ LOG_WINDOW("Checking if column is an integer for %d,%d from %p", row, column, window);
+ field_slot_t field;
+ err = window->read_field_slot(row, column, &field);
+ if (err != 0) {
+ throwExceptionWithRowCol(env, row, column);
+ return false;
+ }
+ return field.type == FIELD_TYPE_INTEGER;
+ }
+
+ static jint getType_native(JNIEnv* env, jobject object, jint row, jint column)
+ {
+ int32_t err;
+ CursorWindow * window = GET_WINDOW(env, object);
+ LOG_WINDOW("Getting type for %d,%d from %p", row, column, window);
+ field_slot_t field;
+ err = window->read_field_slot(row, column, &field);
+ if (err != 0) {
+ throwExceptionWithRowCol(env, row, column);
+ return false;
+ }
+ return field.type;
+ }
+
+ static jboolean isFloat_native(JNIEnv* env, jobject object, jint row, jint column)
+ {
+ int32_t err;
+ CursorWindow * window = GET_WINDOW(env, object);
+ LOG_WINDOW("Checking if column is a float for %d,%d from %p", row, column, window);
+ field_slot_t field;
+ err = window->read_field_slot(row, column, &field);
+ if (err != 0) {
+ throwExceptionWithRowCol(env, row, column);
+ return false;
+ }
+ return field.type == FIELD_TYPE_FLOAT;
+ }
+
+ static jstring getString_native(JNIEnv* env, jobject object, jint row, jint column)
+ {
+ int i;
+ int32_t err;
+ CursorWindow * window = GET_WINDOW(env, object);
+ LOG_WINDOW("Getting string for %d,%d from %p", row, column, window);
+ field_slot_t field;
+ err = window->read_field_slot(row, column, &field);
+ if (err != 0) {
+ throwExceptionWithRowCol(env, row, column);
+ return NULL;
+ }
+ uint8_t type = field.type;
+ jint size = (jint)field.data.buffer.size;
+ if (type == FIELD_TYPE_NULL) {
+ return NULL;
+ } else if (type == FIELD_TYPE_BLOB) {
+ throw_sqlite3_exception(env, "Unable to convert BLOB to string");
+ return NULL;
+ } else if (type == FIELD_TYPE_STRING) {
+ return env->NewString((const jchar*)window->offsetToPtr(field.data.buffer.offset), (jsize)size / sizeof(jchar));
+ } else if (type == FIELD_TYPE_INTEGER) {
+ int64_t value;
+ if (window->getLong(row, column, &value)) {
+ char buf[32];
+ snprintf(buf, sizeof(buf), "%"PRId64"", value);
+ return env->NewStringUTF((const char*)buf);
+ }
+ return NULL;
+ } else if (type == FIELD_TYPE_FLOAT) {
+ double value;
+ if (window->getDouble(row, column, &value)) {
+ char buf[32];
+ snprintf(buf, sizeof(buf), "%g", value);
+ return env->NewStringUTF(buf);
+ }
+ }
+ return NULL;
+ }
+
+ /**
+ * Use this only to convert characters that are known to be within the
+ * 0-127 range for direct conversion to UTF-16
+ */
+ static jint charToJchar(const char* src, jchar* dst, jint bufferSize)
+ {
+ int32_t len = strlen(src);
+ if (bufferSize < len) {
+ len = bufferSize;
+ }
+ for (int i = 0; i < len; i++) {
+ *dst++ = (*src++ & 0x7F);
+ }
+ return len;
+ }
+
+ static jcharArray copyStringToBuffer_native(JNIEnv* env, jobject object, jint row,
+ jint column, jint bufferSize, jobject buf)
+ {
+ int32_t err;
+ CursorWindow * window = GET_WINDOW(env, object);
+ LOG_WINDOW("Copying string for %d,%d from %p", row, column, window);
+
+ field_slot_t field;
+ err = window->read_field_slot(row, column, &field);
+ if (err != 0) {
+ jniThrowException(env, "java/lang/IllegalStateException", "Unable to get field slot");
+ return NULL;
+ }
+
+ jcharArray buffer = (jcharArray)env->GetObjectField(buf, gBufferField);
+ if (buffer == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException", "buf should not be null");
+ return NULL;
+ }
+ jchar* dst = env->GetCharArrayElements(buffer, NULL);
+ uint8_t type = field.type;
+ uint32_t sizeCopied = 0;
+ jcharArray newArray = NULL;
+ if (type == FIELD_TYPE_STRING) {
+ uint32_t size = field.data.buffer.size;
+ if (size > 0) {
+ jsize length = (jsize)size/sizeof(jchar);
+ int32_t strSize = (jsize)size/sizeof(jchar);
+ jstring content = env->NewString((const jchar *)window->offsetToPtr(field.data.buffer.offset), length);
+ const jchar *elements = env->GetStringChars(content, JNI_FALSE);
+ if (strSize > bufferSize || dst == NULL) {
+ newArray = env->NewCharArray(length);
+ env->SetCharArrayRegion(newArray, 0, length, elements);
+ if(elements) env->ReleaseStringChars(content, elements);
+ if(content) env->DeleteLocalRef(content);
+ } else {
+ memcpy(dst, elements, strSize * 2);
+ }
+ sizeCopied = strSize;
+ }
+ } else if (type == FIELD_TYPE_INTEGER) {
+ int64_t value;
+ if (window->getLong(row, column, &value)) {
+ int len;
+ char buf[32];
+ len = snprintf(buf, sizeof(buf), "%"PRId64"", value);
+ jint bufferLength = env->GetArrayLength(buffer);
+ if(len > bufferLength || dst == NULL){
+ jstring content = env->NewStringUTF(buf);
+ const jchar *elements = env->GetStringChars(content, JNI_FALSE);
+ newArray = env->NewCharArray(len);
+ env->SetCharArrayRegion(newArray, 0, len, elements);
+ sizeCopied = len;
+ if(elements) env->ReleaseStringChars(content, elements);
+ if(content) env->DeleteLocalRef(content);
+ } else {
+ memcpy(dst, buf, len);
+ sizeCopied = charToJchar(buf, dst, bufferSize);
+ }
+ }
+ } else if (type == FIELD_TYPE_FLOAT) {
+ double value;
+ if (window->getDouble(row, column, &value)) {
+ int len;
+ char buf[32];
+ len = snprintf(buf, sizeof(buf), "%g", value);
+ jint bufferLength = env->GetArrayLength(buffer);
+ if(len > bufferLength || dst == NULL){
+ jstring content = env->NewStringUTF(buf);
+ const jchar *elements = env->GetStringChars(content, JNI_FALSE);
+ newArray = env->NewCharArray(len);
+ env->SetCharArrayRegion(newArray, 0, len, elements);
+ sizeCopied = len;
+ if(elements) env->ReleaseStringChars(content, elements);
+ if(content) env->DeleteLocalRef(content);
+ } else {
+ memcpy(dst, buf, len);
+ sizeCopied = charToJchar(buf, dst, bufferSize);
+ }
+ }
+ } else if (type == FIELD_TYPE_NULL) {
+ } else if (type == FIELD_TYPE_BLOB) {
+ throw_sqlite3_exception(env, "Unable to convert BLOB to string");
+ } else {
+ LOGE("Unknown field type %d", type);
+ throw_sqlite3_exception(env, "UNKNOWN type in copyStringToBuffer_native()");
+ }
+ SET_SIZE_COPIED(env, buf, sizeCopied);
+ env->ReleaseCharArrayElements(buffer, dst, JNI_OK);
+ return newArray;
+ }
+
+ static jdouble getDouble_native(JNIEnv* env, jobject object, jint row, jint column)
+ {
+ int32_t err;
+ CursorWindow * window = GET_WINDOW(env, object);
+ LOG_WINDOW("Getting double for %d,%d from %p", row, column, window);
+
+ field_slot_t field;
+ err = window->read_field_slot(row, column, &field);
+ if (err != 0) {
+ throwExceptionWithRowCol(env, row, column);
+ return 0.0;
+ }
+
+ uint8_t type = field.type;
+ if (type == FIELD_TYPE_FLOAT) {
+ double value;
+ if (window->getDouble(row, column, &value)) {
+ return value;
+ }
+ return 0.0;
+ } else if (type == FIELD_TYPE_STRING) {
+ uint32_t size = field.data.buffer.size;
+ if (size > 0) {
+ double result;
+ jstring data = env->NewString((const jchar*)window->offsetToPtr(field.data.buffer.offset), (jsize)size / sizeof(jchar));
+ const char* utf8data = env->GetStringUTFChars(data, NULL);
+ result = strtod(utf8data, NULL);
+ if(utf8data) env->ReleaseStringUTFChars(data, utf8data);
+ if(data) env->DeleteLocalRef(data);
+ return result;
+ } else {
+ return 0.0;
+ }
+ } else if (type == FIELD_TYPE_INTEGER) {
+ int64_t value;
+ if (window->getLong(row, column, &value)) {
+ return (double) value;
+ }
+ return 0.0;
+ } else if (type == FIELD_TYPE_NULL) {
+ return 0.0;
+ } else if (type == FIELD_TYPE_BLOB) {
+ throw_sqlite3_exception(env, "Unable to convert BLOB to double");
+ return 0.0;
+ } else {
+ throwUnknowTypeException(env, type);
+ return 0.0;
+ }
+ }
+
+ static jboolean isNull_native(JNIEnv* env, jobject object, jint row, jint column)
+ {
+ CursorWindow * window = GET_WINDOW(env, object);
+ LOG_WINDOW("Checking for NULL at %d,%d from %p", row, column, window);
+ bool isNull;
+ if (window->getNull(row, column, &isNull)) {
+ return isNull;
+ }
+ //TODO throw execption?
+ return true;
+ }
+
+ static jint getNumRows(JNIEnv * env, jobject object)
+ {
+ CursorWindow * window = GET_WINDOW(env, object);
+ return window->getNumRows();
+ }
+
+ static jboolean setNumColumns(JNIEnv * env, jobject object, jint columnNum)
+ {
+ CursorWindow * window = GET_WINDOW(env, object);
+ return window->setNumColumns(columnNum);
+ }
+
+ static jboolean allocRow(JNIEnv * env, jobject object)
+ {
+ CursorWindow * window = GET_WINDOW(env, object);
+ return window->allocRow() != NULL;
+ }
+
+ static jboolean putBlob_native(JNIEnv * env, jobject object, jbyteArray value, jint row, jint col)
+ {
+ CursorWindow * window = GET_WINDOW(env, object);
+ if (!value) {
+ LOG_WINDOW("How did a null value send to here");
+ return false;
+ }
+ field_slot_t * fieldSlot = window->getFieldSlotWithCheck(row, col);
+ if (fieldSlot == NULL) {
+ LOG_WINDOW(" getFieldSlotWithCheck error ");
+ return false;
+ }
+
+ jint len = env->GetArrayLength(value);
+ int offset = window->alloc(len);
+ if (!offset) {
+ LOG_WINDOW("Failed allocating %u bytes", len);
+ return false;
+ }
+ jbyte * bytes = env->GetByteArrayElements(value, NULL);
+ window->copyIn(offset, (uint8_t const *)bytes, len);
+
+ // This must be updated after the call to alloc(), since that
+ // may move the field around in the window
+ fieldSlot->type = FIELD_TYPE_BLOB;
+ fieldSlot->data.buffer.offset = offset;
+ fieldSlot->data.buffer.size = len;
+ env->ReleaseByteArrayElements(value, bytes, JNI_ABORT);
+ LOG_WINDOW("%d,%d is BLOB with %u bytes @ %d", row, col, len, offset);
+ return true;
+ }
+
+ static jboolean putString_native(JNIEnv * env, jobject object, jstring value, jint row, jint col)
+ {
+ CursorWindow * window = GET_WINDOW(env, object);
+ if (!value) {
+ LOG_WINDOW("How did a null value send to here");
+ return false;
+ }
+ field_slot_t * fieldSlot = window->getFieldSlotWithCheck(row, col);
+ if (fieldSlot == NULL) {
+ LOG_WINDOW(" getFieldSlotWithCheck error ");
+ return false;
+ }
+
+#if WINDOW_STORAGE_UTF8
+ int len = env->GetStringUTFLength(value) + 1;
+ char const * valStr = env->GetStringUTFChars(value, NULL);
+#else
+ int len = env->GetStringLength(value);
+ // GetStringLength return number of chars and one char takes 2 bytes
+ len *= 2;
+ const jchar* valStr = env->GetStringChars(value, NULL);
+#endif
+ if (!valStr) {
+ LOG_WINDOW("value can't be transfer to UTFChars");
+ return false;
+ }
+
+ int offset = window->alloc(len);
+ if (!offset) {
+ LOG_WINDOW("Failed allocating %u bytes", len);
+#if WINDOW_STORAGE_UTF8
+ env->ReleaseStringUTFChars(value, valStr);
+#else
+ env->ReleaseStringChars(value, valStr);
+#endif
+ return false;
+ }
+
+ window->copyIn(offset, (uint8_t const *)valStr, len);
+
+ // This must be updated after the call to alloc(), since that
+ // may move the field around in the window
+ fieldSlot->type = FIELD_TYPE_STRING;
+ fieldSlot->data.buffer.offset = offset;
+ fieldSlot->data.buffer.size = len;
+
+ LOG_WINDOW("%d,%d is TEXT with %u bytes @ %d", row, col, len, offset);
+#if WINDOW_STORAGE_UTF8
+ env->ReleaseStringUTFChars(value, valStr);
+#else
+ env->ReleaseStringChars(value, valStr);
+#endif
+
+ return true;
+ }
+
+ static jboolean putLong_native(JNIEnv * env, jobject object, jlong value, jint row, jint col)
+ {
+ CursorWindow * window = GET_WINDOW(env, object);
+ if (!window->putLong(row, col, value)) {
+ LOG_WINDOW(" getFieldSlotWithCheck error ");
+ return false;
+ }
+ LOG_WINDOW("%d,%d is INTEGER 0x%016llx", row, col, value);
+ return true;
+ }
+
+ static jboolean putDouble_native(JNIEnv * env, jobject object, jdouble value, jint row, jint col)
+ {
+ CursorWindow * window = GET_WINDOW(env, object);
+ if (!window->putDouble(row, col, value)) {
+ LOG_WINDOW(" getFieldSlotWithCheck error ");
+ return false;
+ }
+ LOG_WINDOW("%d,%d is FLOAT %lf", row, col, value);
+ return true;
+ }
+
+ static jboolean putNull_native(JNIEnv * env, jobject object, jint row, jint col)
+ {
+ CursorWindow * window = GET_WINDOW(env, object);
+ if (!window->putNull(row, col)) {
+ LOG_WINDOW(" getFieldSlotWithCheck error ");
+ return false;
+ }
+ LOG_WINDOW("%d,%d is NULL", row, col);
+ return true;
+ }
+
+ // free the last row
+ static void freeLastRow(JNIEnv * env, jobject object) {
+ CursorWindow * window = GET_WINDOW(env, object);
+ window->freeLastRow();
+ }
+
+ static JNINativeMethod sMethods[] =
+ {
+ /* name, signature, funcPtr */
+ {"native_init", "(ZJJJ)V", (void *)native_init_empty},
+ // {"native_init", "(Landroid/os/IBinder;)V", (void *)native_init_memory},
+ // {"native_getBinder", "()Landroid/os/IBinder;", (void *)native_getBinder},
+ {"native_clear", "()V", (void *)native_clear},
+ {"close_native", "()V", (void *)native_close},
+ {"getLong_native", "(II)J", (void *)getLong_native},
+ {"getBlob_native", "(II)[B", (void *)getBlob_native},
+ {"isBlob_native", "(II)Z", (void *)isBlob_native},
+ {"getString_native", "(II)Ljava/lang/String;", (void *)getString_native},
+ //{"getString_native", "(II)[B", (void *)getString_native},
+ {"copyStringToBuffer_native", "(IIILandroid/database/CharArrayBuffer;)[C", (void *)copyStringToBuffer_native},
+ {"getDouble_native", "(II)D", (void *)getDouble_native},
+ {"isNull_native", "(II)Z", (void *)isNull_native},
+ {"getNumRows_native", "()I", (void *)getNumRows},
+ {"setNumColumns_native", "(I)Z", (void *)setNumColumns},
+ {"allocRow_native", "()Z", (void *)allocRow},
+ {"putBlob_native", "([BII)Z", (void *)putBlob_native},
+ {"putString_native", "(Ljava/lang/String;II)Z", (void *)putString_native},
+ {"putLong_native", "(JII)Z", (void *)putLong_native},
+ {"putDouble_native", "(DII)Z", (void *)putDouble_native},
+ {"freeLastRow_native", "()V", (void *)freeLastRow},
+ {"putNull_native", "(II)Z", (void *)putNull_native},
+ {"isString_native", "(II)Z", (void *)isString_native},
+ {"isFloat_native", "(II)Z", (void *)isFloat_native},
+ {"isInteger_native", "(II)Z", (void *)isInteger_native},
+ {"getType_native", "(II)I", (void *)getType_native},
+ };
+
+ int register_android_database_CursorWindow(JNIEnv * env)
+ {
+ jclass clazz;
+ clazz = env->FindClass("net/sqlcipher/CursorWindow");
+ if (clazz == NULL) {
+ LOGE("Can't find net/sqlcipher/CursorWindow");
+ return -1;
+ }
+ gWindowField = env->GetFieldID(clazz, "nWindow", "J");
+ if (gWindowField == NULL) {
+ LOGE("Error locating fields");
+ return -1;
+ }
+ clazz = env->FindClass("android/database/CharArrayBuffer");
+ if (clazz == NULL) {
+ LOGE("Can't find android/database/CharArrayBuffer");
+ return -1;
+ }
+ gBufferField = env->GetFieldID(clazz, "data", "[C");
+ if (gBufferField == NULL) {
+ LOGE("Error locating fields data in CharArrayBuffer");
+ return -1;
+ }
+ gSizeCopiedField = env->GetFieldID(clazz, "sizeCopied", "I");
+ if (gSizeCopiedField == NULL) {
+ LOGE("Error locating fields sizeCopied in CharArrayBuffer");
+ return -1;
+ }
+ clazz = env->FindClass("net/sqlcipher/CursorWindow");
+ return env->RegisterNatives(clazz, sMethods, NELEM(sMethods));
+ }
+} // namespace sqlcipher
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/net_sqlcipher_database_SQLiteCompiledSql.cpp b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/net_sqlcipher_database_SQLiteCompiledSql.cpp
new file mode 100644
index 0000000..4db2864
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/net_sqlcipher_database_SQLiteCompiledSql.cpp
@@ -0,0 +1,135 @@
+/*
+ * Copyright (C) 2006-2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#undef LOG_TAG
+#define LOG_TAG "Cursor"
+
+#include
+// #include
+// #include
+// #include
+
+#include
+#include
+#include
+#include
+#include
+#include "log.h"
+#include "jni_elements.h"
+#include "jni_exception.h"
+#include "sqlite3_exception.h"
+
+namespace sqlcipher {
+
+static jfieldID gHandleField;
+static jfieldID gStatementField;
+
+
+#define GET_STATEMENT(env, object) \
+ (sqlite3_stmt *)env->GetLongField(object, gStatementField)
+#define GET_HANDLE(env, object) \
+ (sqlite3 *)env->GetLongField(object, gHandleField)
+
+
+sqlite3_stmt * compile(JNIEnv* env, jobject object,
+ sqlite3 * handle, jstring sqlString)
+{
+ int err;
+ jchar const * sql;
+ jsize sqlLen;
+ sqlite3_stmt * statement = GET_STATEMENT(env, object);
+
+ // Make sure not to leak the statement if it already exists
+ if (statement != NULL) {
+ sqlite3_finalize(statement);
+ env->SetLongField(object, gStatementField, 0);
+ }
+
+ // Compile the SQL
+ sql = env->GetStringChars(sqlString, NULL);
+ sqlLen = env->GetStringLength(sqlString);
+ err = sqlite3_prepare16_v2(handle, sql, sqlLen * 2, &statement, NULL);
+ env->ReleaseStringChars(sqlString, sql);
+
+ if (err == SQLITE_OK) {
+ // Store the statement in the Java object for future calls
+ LOGV("Prepared statement %p on %p", statement, handle);
+ env->SetLongField(object, gStatementField, (intptr_t)statement);
+ return statement;
+ } else {
+ // Error messages like 'near ")": syntax error' are not
+ // always helpful enough, so construct an error string that
+ // includes the query itself.
+ const char *query = env->GetStringUTFChars(sqlString, NULL);
+ char *message = (char*) malloc(strlen(query) + 50);
+ if (message) {
+ strcpy(message, ", while compiling: "); // less than 50 chars
+ strcat(message, query);
+ }
+ env->ReleaseStringUTFChars(sqlString, query);
+ throw_sqlite3_exception(env, handle, message);
+ free(message);
+ return NULL;
+ }
+}
+
+static void native_compile(JNIEnv* env, jobject object, jstring sqlString)
+{
+ compile(env, object, GET_HANDLE(env, object), sqlString);
+}
+
+static void native_finalize(JNIEnv* env, jobject object)
+{
+ int err;
+ sqlite3_stmt * statement = GET_STATEMENT(env, object);
+
+ if (statement != NULL) {
+ sqlite3_finalize(statement);
+ env->SetLongField(object, gStatementField, 0);
+ }
+}
+
+static JNINativeMethod sMethods[] =
+{
+ /* name, signature, funcPtr */
+ {"native_compile", "(Ljava/lang/String;)V", (void *)native_compile},
+ {"native_finalize", "()V", (void *)native_finalize},
+};
+
+int register_android_database_SQLiteCompiledSql(JNIEnv * env)
+{
+ jclass clazz;
+
+ clazz = env->FindClass("net/sqlcipher/database/SQLiteCompiledSql");
+ if (clazz == NULL) {
+ LOGE("Can't find net/sqlcipher/database/SQLiteCompiledSql");
+ return -1;
+ }
+
+ gHandleField = env->GetFieldID(clazz, "nHandle", "J");
+ gStatementField = env->GetFieldID(clazz, "nStatement", "J");
+
+ if (gHandleField == NULL || gStatementField == NULL) {
+ LOGE("Error locating fields");
+ return -1;
+ }
+ return env->RegisterNatives(clazz, sMethods, NELEM(sMethods));
+}
+
+
+
+
+} // namespace sqlcipher
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/net_sqlcipher_database_SQLiteDatabase.cpp b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/net_sqlcipher_database_SQLiteDatabase.cpp
new file mode 100644
index 0000000..9fb15d9
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/net_sqlcipher_database_SQLiteDatabase.cpp
@@ -0,0 +1,678 @@
+/*
+ * Copyright (C) 2006-2007 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#undef LOG_TAG
+#define LOG_TAG "Database"
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+#include "log.h"
+#include "jni_elements.h"
+#include "jni_exception.h"
+#include "sqlite3_exception.h"
+#include "sqlcipher_loading.h"
+
+#define UTF16_STORAGE 0
+#define INVALID_VERSION -1
+#define SQLITE_SOFT_HEAP_LIMIT (4 * 1024 * 1024)
+#define ANDROID_TABLE "android_metadata"
+/* uncomment the next line to force-enable logging of all statements */
+// #define DB_LOG_STATEMENTS
+
+namespace sqlcipher {
+
+
+ enum {
+ OPEN_READWRITE = 0x00000000,
+ OPEN_READONLY = 0x00000001,
+ OPEN_READ_MASK = 0x00000001,
+ NO_LOCALIZED_COLLATORS = 0x00000010,
+ CREATE_IF_NECESSARY = 0x10000000
+ };
+
+ static jfieldID offset_db_handle;
+
+ static char *createStr(const char *path) {
+ int len = strlen(path);
+ char *str = (char *)malloc(len + 1);
+ strncpy(str, path, len);
+ str[len] = 0;
+ return str;
+ }
+
+ static void sqlLogger(void *databaseName, int iErrCode, const char *zMsg) {
+ // skip printing this message if it is due to certain types of errors
+ if (iErrCode == SQLITE_CONSTRAINT) return;
+ LOGI("sqlite returned: error code = %d, msg = %s\n", iErrCode, zMsg);
+ }
+
+ // register the logging func on sqlite. needs to be done BEFORE any sqlite3 func is called.
+ static void registerLoggingFunc(const char *path) {
+ static bool loggingFuncSet = false;
+ if (loggingFuncSet) {
+ return;
+ }
+
+ LOGV("Registering sqlite logging func \n");
+ int err = sqlite3_config(SQLITE_CONFIG_LOG, &sqlLogger, (void *)createStr(path));
+ if (err != SQLITE_OK) {
+ LOGE("sqlite_config failed error_code = %d. THIS SHOULD NEVER occur.\n", err);
+ return;
+ }
+ loggingFuncSet = true;
+ }
+
+ int native_status(JNIEnv* env, jobject object, jint operation, jboolean reset)
+ {
+ int value;
+ int highWater;
+ sqlite3 * handle = (sqlite3 *)env->GetLongField(object, offset_db_handle);
+ int status = sqlite3_status(operation, &value, &highWater, reset);
+ if(status != SQLITE_OK){
+ throw_sqlite3_exception(env, handle);
+ }
+ return value;
+ }
+
+ void native_key(JNIEnv* env, jobject object, jbyteArray jKey) {
+ int rc = 0;
+ int index = 0;
+ jsize size = 0;
+ jbyte *key = 0;
+ sqlite3 *handle = NULL;
+ handle = (sqlite3 *)env->GetLongField(object, offset_db_handle);
+ if(handle == NULL){
+ LOGE("env->GetLongField returned NULL when retrieving sqlite3 *\n");
+ }
+ key = env->GetByteArrayElements(jKey, NULL);
+ size = env->GetArrayLength(jKey);
+ if(key == NULL || size == 0) goto done;
+ rc = sqlite3_key(handle, key, size);
+ if(rc != SQLITE_OK) {
+ throw_sqlite3_exception(env, handle);
+ }
+ done:
+ if(key) env->ReleaseByteArrayElements(jKey, key, JNI_ABORT);
+ }
+
+ void native_rekey(JNIEnv* env, jobject object, jbyteArray jKey) {
+ int rc = 0;
+ jsize size = 0;
+ jbyte *key = 0;
+ sqlite3 *handle = NULL;
+ handle = (sqlite3 *)env->GetLongField(object, offset_db_handle);
+ key = env->GetByteArrayElements(jKey, NULL);
+ size = env->GetArrayLength(jKey);
+ if(key == NULL || size == 0) goto done;
+ rc = sqlite3_rekey(handle, key, size);
+ if(rc != SQLITE_OK) {
+ throw_sqlite3_exception(env, handle);
+ }
+ done:
+ if(key) env->ReleaseByteArrayElements(jKey, key, JNI_ABORT);
+ }
+
+ void native_key_mutf8(JNIEnv* env, jobject object, jcharArray jKey) {
+ int rc;
+ int idx;
+ jint releaseElements = 0;
+ jboolean arrayIsCopy;
+ sqlite3 *handle = (sqlite3 *)env->GetLongField(object, offset_db_handle);
+ jsize sz = env->GetArrayLength(jKey);
+ jchar* jKeyChar = env->GetCharArrayElements(jKey, &arrayIsCopy);
+ jstring key = env->NewString(jKeyChar, sz);
+ const char* password = env->GetStringUTFChars(key, JNI_FALSE);
+ int password_sz = env->GetStringUTFLength(key);
+ if(password_sz > 0){
+ rc = sqlite3_key(handle, password, password_sz);
+ if(rc != SQLITE_OK){
+ throw_sqlite3_exception(env, handle);
+ }
+ }
+ env->ReleaseCharArrayElements(jKey, jKeyChar, JNI_ABORT);
+ env->ReleaseStringUTFChars(key, password);
+ }
+
+ void native_rawExecSQL(JNIEnv* env, jobject object, jstring sql)
+ {
+ sqlite3 * handle = (sqlite3 *)env->GetLongField(object, offset_db_handle);
+ char const * sqlCommand = env->GetStringUTFChars(sql, NULL);
+ int status = sqlite3_exec(handle, sqlCommand, NULL, NULL, NULL);
+ env->ReleaseStringUTFChars(sql, sqlCommand);
+ if(status != SQLITE_OK){
+ throw_sqlite3_exception(env, handle);
+ }
+ }
+
+ /* public native void setICURoot(String path); */
+ // void setICURoot(JNIEnv* env, jobject object, jstring ICURoot)
+ // {
+ // char const * ICURootPath = env->GetStringUTFChars(ICURoot, NULL);
+ // setenv("SQLCIPHER_ICU_PREFIX", ICURootPath, 1);
+ // env->ReleaseStringUTFChars(ICURoot, ICURootPath);
+ // }
+
+
+ /* public native void dbopen(String path, int flags, String locale); */
+ void dbopen(JNIEnv* env, jobject object, jstring pathString, jint flags)
+ {
+ int err;
+ sqlite3 * handle = NULL;
+ sqlite3_stmt * statement = NULL;
+ char const * path8 = env->GetStringUTFChars(pathString, NULL);
+ int sqliteFlags;
+
+ // register the logging func on sqlite. needs to be done BEFORE any sqlite3 func is called.
+ registerLoggingFunc(path8);
+
+ // convert our flags into the sqlite flags
+ if (flags & CREATE_IF_NECESSARY) {
+ sqliteFlags = SQLITE_OPEN_READWRITE | SQLITE_OPEN_CREATE;
+ } else if (flags & OPEN_READONLY) {
+ sqliteFlags = SQLITE_OPEN_READONLY;
+ } else {
+ sqliteFlags = SQLITE_OPEN_READWRITE;
+ }
+
+ err = sqlite3_open_v2(path8, &handle, sqliteFlags, NULL);
+ if (err != SQLITE_OK) {
+ LOGE("sqlite3_open_v2(\"%s\", &handle, %d, NULL) failed\n", path8, sqliteFlags);
+ throw_sqlite3_exception_errcode(env, err, "Could not open database");
+ goto done;
+ }
+
+ // Check that the database is really read/write when that is what we asked for.
+ if ((sqliteFlags & SQLITE_OPEN_READWRITE) && sqlite3_db_readonly(handle, NULL)) {
+ throw_sqlite3_exception(env, handle, "Could not open the database in read/write mode.");
+ goto done;
+ }
+
+ // The soft heap limit prevents the page cache allocations from growing
+ // beyond the given limit, no matter what the max page cache sizes are
+ // set to. The limit does not, as of 3.5.0, affect any other allocations.
+ sqlite3_soft_heap_limit(SQLITE_SOFT_HEAP_LIMIT);
+
+ // Set the default busy handler to retry for 1000ms and then return SQLITE_BUSY
+ err = sqlite3_busy_timeout(handle, 1000 /* ms */);
+ if (err != SQLITE_OK) {
+ LOGE("sqlite3_busy_timeout(handle, 1000) failed for \"%s\"\n", path8);
+ throw_sqlite3_exception(env, handle, "Could not set busy timeout");
+ goto done;
+ }
+
+#ifdef DB_INTEGRITY_CHECK
+ static const char* integritySql = "pragma integrity_check(1);";
+ err = sqlite3_prepare_v2(handle, integritySql, -1, &statement, NULL);
+ if (err != SQLITE_OK) {
+ LOGE("sqlite_prepare_v2(handle, \"%s\") failed for \"%s\"\n", integritySql, path8);
+ throw_sqlite3_exception(env, handle, "sqlite_prepare_v2(handle, \"pragma integrity_check(1);\") failed");
+ goto done;
+ }
+
+ // first is OK or error message
+ err = sqlite3_step(statement);
+ if (err != SQLITE_ROW) {
+ LOGE("integrity check failed for \"%s\"\n", integritySql, path8);
+ throw_sqlite3_exception(env, handle);
+ goto done;
+ } else {
+ const char *text = (const char*)sqlite3_column_text(statement, 0);
+ if (strcmp(text, "ok") != 0) {
+ LOGE("integrity check failed for \"%s\": %s\n", integritySql, path8, text);
+ jniThrowException(env, "net/sqlcipher/database/SQLiteDatabaseCorruptException", text);
+ goto done;
+ }
+ }
+#endif
+
+ sqlite3_enable_load_extension(handle, 1);
+
+ LOGV("Opened '%s' - %p\n", path8, handle);
+ env->SetLongField(object, offset_db_handle, (intptr_t)handle);
+ handle = NULL; // The caller owns the handle now.
+
+ done:
+ // Release allocated resources
+ if (path8 != NULL) env->ReleaseStringUTFChars(pathString, path8);
+ if (statement != NULL) sqlite3_finalize(statement);
+ if (handle != NULL) sqlite3_close(handle);
+ }
+
+ static char *getDatabaseName(JNIEnv* env, sqlite3 * handle, jstring databaseName) {
+ char const *path = env->GetStringUTFChars(databaseName, NULL);
+ if (path == NULL) {
+ LOGE("Failure in getDatabaseName(). VM ran out of memory?\n");
+ return NULL; // VM would have thrown OutOfMemoryError
+ }
+ char *dbNameStr = createStr(path);
+ env->ReleaseStringUTFChars(databaseName, path);
+ return dbNameStr;
+ }
+
+ static void sqlTrace(void *databaseName, const char *sql) {
+ LOGI("sql_statement|%s|%s\n", (char *)databaseName, sql);
+ }
+
+ /* public native void enableSqlTracing(); */
+ static void enableSqlTracing(JNIEnv* env, jobject object, jstring databaseName)
+ {
+ sqlite3 * handle = (sqlite3 *)env->GetLongField(object, offset_db_handle);
+ sqlite3_trace(handle, &sqlTrace, (void *)getDatabaseName(env, handle, databaseName));
+ }
+
+ static void sqlProfile(void *databaseName, const char *sql, sqlite3_uint64 tm) {
+ double d = tm/1000000.0;
+ LOGI("elapsedTime4Sql|%s|%.3f ms|%s\n", (char *)databaseName, d, sql);
+ }
+
+ /* public native void enableSqlProfiling(); */
+ static void enableSqlProfiling(JNIEnv* env, jobject object, jstring databaseName)
+ {
+ sqlite3 * handle = (sqlite3 *)env->GetLongField(object, offset_db_handle);
+ sqlite3_profile(handle, &sqlProfile, (void *)getDatabaseName(env, handle, databaseName));
+ }
+
+
+ /* public native void close(); */
+ static void dbclose(JNIEnv* env, jobject object)
+ {
+ sqlite3 * handle = (sqlite3 *)env->GetLongField(object, offset_db_handle);
+
+ if (handle != NULL) {
+ // release the memory associated with the traceFuncArg in enableSqlTracing function
+ void *traceFuncArg = sqlite3_trace(handle, &sqlTrace, NULL);
+ if (traceFuncArg != NULL) {
+ free(traceFuncArg);
+ }
+ // release the memory associated with the traceFuncArg in enableSqlProfiling function
+ traceFuncArg = sqlite3_profile(handle, &sqlProfile, NULL);
+ if (traceFuncArg != NULL) {
+ free(traceFuncArg);
+ }
+ LOGV("Closing database: handle=%p\n", handle);
+ int result = sqlite3_close(handle);
+ if (result == SQLITE_OK) {
+ LOGV("Closed %p\n", handle);
+ env->SetLongField(object, offset_db_handle, 0);
+ } else {
+ // This can happen if sub-objects aren't closed first. Make sure the caller knows.
+ LOGE("sqlite3_close(%p) failed: %d\n", handle, result);
+ throw_sqlite3_exception(env, handle, "sqlite3_close() failed");
+ }
+ }
+ }
+
+ /* public native void native_execSQL(String sql); */
+ static void native_execSQL(JNIEnv* env, jobject object, jstring sqlString)
+ {
+ int err;
+ int stepErr;
+ sqlite3_stmt * statement = NULL;
+ sqlite3 * handle = (sqlite3 *)env->GetLongField(object, offset_db_handle);
+ jchar const * sql = env->GetStringChars(sqlString, NULL);
+ jsize sqlLen = env->GetStringLength(sqlString);
+
+ if (sql == NULL || sqlLen == 0) {
+ jniThrowException(env, "java/lang/IllegalArgumentException", "You must supply an SQL string");
+ return;
+ }
+
+ err = sqlite3_prepare16_v2(handle, sql, sqlLen * 2, &statement, NULL);
+
+ env->ReleaseStringChars(sqlString, sql);
+
+ if (err != SQLITE_OK) {
+ char const * sql8 = env->GetStringUTFChars(sqlString, NULL);
+ LOGE("Failure %d (%s) on %p when preparing '%s'.\n", err, sqlite3_errmsg(handle), handle, sql8);
+ throw_sqlite3_exception(env, handle, sql8);
+ env->ReleaseStringUTFChars(sqlString, sql8);
+ return;
+ }
+
+ stepErr = sqlite3_step(statement);
+ err = sqlite3_finalize(statement);
+
+ if (stepErr != SQLITE_DONE) {
+ if (stepErr == SQLITE_ROW) {
+ throw_sqlite3_exception(env, "Queries cannot be performed using execSQL(), use query() instead.");
+ } else {
+ char const * sql8 = env->GetStringUTFChars(sqlString, NULL);
+ LOGE("Failure %d (%s) on %p when executing '%s'\n", err, sqlite3_errmsg(handle), handle, sql8);
+ throw_sqlite3_exception(env, handle, sql8);
+ env->ReleaseStringUTFChars(sqlString, sql8);
+
+ }
+ } else
+#ifndef DB_LOG_STATEMENTS
+ // IF_LOGV()
+#endif
+ {
+ char const * sql8 = env->GetStringUTFChars(sqlString, NULL);
+ LOGV("Success on %p when executing '%s'\n", handle, sql8);
+ env->ReleaseStringUTFChars(sqlString, sql8);
+ }
+ }
+
+ /* native long lastInsertRow(); */
+ static jlong lastInsertRow(JNIEnv* env, jobject object)
+ {
+ sqlite3 * handle = (sqlite3 *)env->GetLongField(object, offset_db_handle);
+
+ return sqlite3_last_insert_rowid(handle);
+ }
+
+ /* native int lastChangeCount(); */
+ static jint lastChangeCount(JNIEnv* env, jobject object)
+ {
+ sqlite3 * handle = (sqlite3 *)env->GetLongField(object, offset_db_handle);
+
+ return sqlite3_changes(handle);
+ }
+
+ /* native int native_getDbLookaside(); */
+ static jint native_getDbLookaside(JNIEnv* env, jobject object)
+ {
+ sqlite3 * handle = (sqlite3 *)env->GetLongField(object, offset_db_handle);
+ int pCur = -1;
+ int unused;
+ sqlite3_db_status(handle, SQLITE_DBSTATUS_LOOKASIDE_USED, &pCur, &unused, 0);
+ return pCur;
+ }
+
+ /* set locale in the android_metadata table, install localized collators, and rebuild indexes */
+ // static void native_setLocale(JNIEnv* env, jobject object, jstring localeString, jint flags)
+ // {
+ // if ((flags & NO_LOCALIZED_COLLATORS)) return;
+
+ // int err;
+ // char const* locale8 = env->GetStringUTFChars(localeString, NULL);
+ // sqlite3 * handle = (sqlite3 *)env->GetIntField(object, offset_db_handle);
+ // sqlite3_stmt* stmt = NULL;
+ // char** meta = NULL;
+ // int rowCount, colCount;
+ // char* dbLocale = NULL;
+
+ // // create the table, if necessary and possible
+ // if (!(flags & OPEN_READONLY)) {
+ // static const char *createSql ="CREATE TABLE IF NOT EXISTS " ANDROID_TABLE " (locale TEXT)";
+ // err = sqlite3_exec(handle, createSql, NULL, NULL, NULL);
+ // if (err != SQLITE_OK) {
+ // LOGE("CREATE TABLE " ANDROID_TABLE " failed\n");
+ // throw_sqlite3_exception(env, handle, "create locale table failed");
+ // goto done;
+ // }
+ // }
+
+ // // try to read from the table
+ // static const char *selectSql = "SELECT locale FROM " ANDROID_TABLE " LIMIT 1";
+ // err = sqlite3_get_table(handle, selectSql, &meta, &rowCount, &colCount, NULL);
+ // if (err != SQLITE_OK) {
+ // LOGE("SELECT locale FROM " ANDROID_TABLE " failed\n");
+ // throw_sqlite3_exception(env, handle, "select locale failed");
+ // goto done;
+ // }
+
+ // dbLocale = (rowCount >= 1) ? meta[colCount] : NULL;
+
+ // if (dbLocale != NULL && !strcmp(dbLocale, locale8)) {
+ // // database locale is the same as the desired locale; set up the collators and go
+ // err = register_localized_collators(handle, locale8, UTF16_STORAGE);
+ // if (err != SQLITE_OK) throw_sqlite3_exception(env, handle);
+ // goto done; // no database changes needed
+ // }
+
+ // if ((flags & OPEN_READONLY)) {
+ // // read-only database, so we're going to have to put up with whatever we got
+ // // For registering new index. Not for modifing the read-only database.
+ // err = register_localized_collators(handle, locale8, UTF16_STORAGE);
+ // if (err != SQLITE_OK) throw_sqlite3_exception(env, handle, "register localized collators failed");
+ // goto done;
+ // }
+
+ // // need to update android_metadata and indexes atomically, so use a transaction...
+ // err = sqlite3_exec(handle, "BEGIN TRANSACTION", NULL, NULL, NULL);
+ // if (err != SQLITE_OK) {
+ // LOGE("BEGIN TRANSACTION failed setting locale\n");
+ // throw_sqlite3_exception(env, handle, "BEGIN TRANSACTION failed setting locale");
+ // goto done;
+ // }
+
+ // err = register_localized_collators(handle, locale8, UTF16_STORAGE);
+ // if (err != SQLITE_OK) {
+ // LOGE("register_localized_collators() failed setting locale\n");
+ // throw_sqlite3_exception(env, handle, "register_localized_collators() failed setting locale");
+ // goto rollback;
+ // }
+
+ // err = sqlite3_exec(handle, "DELETE FROM " ANDROID_TABLE, NULL, NULL, NULL);
+ // if (err != SQLITE_OK) {
+ // LOGE("DELETE failed setting locale\n");
+ // throw_sqlite3_exception(env, handle, "DELETE failed setting locale");
+ // goto rollback;
+ // }
+
+ // static const char *sql = "INSERT INTO " ANDROID_TABLE " (locale) VALUES(?);";
+ // err = sqlite3_prepare_v2(handle, sql, -1, &stmt, NULL);
+ // if (err != SQLITE_OK) {
+ // LOGE("sqlite3_prepare_v2(\"%s\") failed\n", sql);
+ // throw_sqlite3_exception(env, handle, "sqlite3_prepare_v2() failed setting locale");
+ // goto rollback;
+ // }
+
+ // err = sqlite3_bind_text(stmt, 1, locale8, -1, SQLITE_TRANSIENT);
+ // if (err != SQLITE_OK) {
+ // LOGE("sqlite3_bind_text() failed setting locale\n");
+ // throw_sqlite3_exception(env, handle, "sqlite3_bind_text() failed setting locale");
+ // goto rollback;
+ // }
+
+ // err = sqlite3_step(stmt);
+ // if (err != SQLITE_OK && err != SQLITE_DONE) {
+ // LOGE("sqlite3_step(\"%s\") failed setting locale\n", sql);
+ // throw_sqlite3_exception(env, handle, "sqlite3_step() failed setting locale");
+ // goto rollback;
+ // }
+
+ // err = sqlite3_exec(handle, "REINDEX LOCALIZED", NULL, NULL, NULL);
+ // if (err != SQLITE_OK) {
+ // LOGE("REINDEX LOCALIZED failed\n");
+ // throw_sqlite3_exception(env, handle, "REINDEX LOCALIZED failed");
+ // goto rollback;
+ // }
+
+ // // all done, yay!
+ // err = sqlite3_exec(handle, "COMMIT TRANSACTION", NULL, NULL, NULL);
+ // if (err != SQLITE_OK) {
+ // LOGE("COMMIT TRANSACTION failed setting locale\n");
+ // throw_sqlite3_exception(env, handle, "COMMIT TRANSACTION failed setting locale");
+ // goto done;
+ // }
+
+ // rollback:
+ // if (err != SQLITE_OK) {
+ // sqlite3_exec(handle, "ROLLBACK TRANSACTION", NULL, NULL, NULL);
+ // }
+
+ // done:
+ // if (locale8 != NULL) env->ReleaseStringUTFChars(localeString, locale8);
+ // if (stmt != NULL) sqlite3_finalize(stmt);
+ // if (meta != NULL) sqlite3_free_table(meta);
+ // }
+
+ static jint native_releaseMemory(JNIEnv *env, jobject clazz)
+ {
+ // Attempt to release as much memory from the
+ return sqlite3_release_memory(SQLITE_SOFT_HEAP_LIMIT);
+ }
+
+ static JNINativeMethod sMethods[] =
+ {
+ /* name, signature, funcPtr */
+ {"dbopen", "(Ljava/lang/String;I)V", (void *)dbopen},
+ {"dbclose", "()V", (void *)dbclose},
+ {"enableSqlTracing", "(Ljava/lang/String;)V", (void *)enableSqlTracing},
+ {"enableSqlProfiling", "(Ljava/lang/String;)V", (void *)enableSqlProfiling},
+ {"native_execSQL", "(Ljava/lang/String;)V", (void *)native_execSQL},
+ {"lastInsertRow", "()J", (void *)lastInsertRow},
+ {"lastChangeCount", "()I", (void *)lastChangeCount},
+ {"native_getDbLookaside", "()I", (void *)native_getDbLookaside},
+ {"releaseMemory", "()I", (void *)native_releaseMemory},
+ {"native_rawExecSQL", "(Ljava/lang/String;)V", (void *)native_rawExecSQL},
+ {"native_status", "(IZ)I", (void *)native_status},
+ {"key_mutf8", "([C)V", (void *)native_key_mutf8},
+ {"key", "([B)V", (void *)native_key},
+ {"rekey", "([B)V", (void *)native_rekey},
+ };
+
+ int register_android_database_SQLiteDatabase(JNIEnv *env)
+ {
+ jclass clazz;
+
+ clazz = env->FindClass("net/sqlcipher/database/SQLiteDatabase");
+ if (clazz == NULL) {
+ LOGE("Can't find net/sqlcipher/database/SQLiteDatabase\n");
+ return -1;
+ }
+
+ offset_db_handle = env->GetFieldID(clazz, "mNativeHandle", "J");
+ if (offset_db_handle == NULL) {
+ LOGE("Can't find SQLiteDatabase.mNativeHandle\n");
+ return -1;
+ }
+ return env->RegisterNatives(clazz, sMethods, NELEM(sMethods));
+ }
+
+ //this code is not executed
+ extern "C" jint JNI_OnLoad(JavaVM* vm, void* reserved)
+ {
+ JNIEnv *env;
+ //gJavaVM = vm;
+ LOGI("JNI_OnLoad called");
+ if (vm->GetEnv((void**) &env, JNI_VERSION_1_2) != JNI_OK) {
+ LOGE("Failed to get the environment using GetEnv()");
+ return -1;
+ }
+
+ LOGI("JNI_OnLoad register methods ");
+
+ register_android_database_SQLiteDatabase(env);
+ register_android_database_SQLiteCompiledSql(env);
+ register_android_database_SQLiteQuery(env);
+ register_android_database_SQLiteProgram(env);
+ register_android_database_SQLiteStatement(env);
+ register_android_database_CursorWindow(env);
+
+ //register_android_database_SQLiteDebug(env);
+
+ return JNI_VERSION_1_2;
+
+ }
+
+ /* throw a SQLiteException with a message appropriate for the error in handle */
+ void throw_sqlite3_exception(JNIEnv* env, sqlite3* handle) {
+ throw_sqlite3_exception(env, handle, NULL);
+ }
+
+ /* throw a SQLiteException with the given message */
+ void throw_sqlite3_exception(JNIEnv* env, const char* message) {
+ throw_sqlite3_exception(env, NULL, message);
+ }
+
+ /* throw a SQLiteException with a message appropriate for the error in handle
+ concatenated with the given message
+ */
+ void throw_sqlite3_exception(JNIEnv* env, sqlite3* handle, const char* message) {
+ if (handle && sqlite3_errcode(handle) != SQLITE_OK) {
+ throw_sqlite3_exception(env, sqlite3_errcode(handle),
+ sqlite3_errmsg(handle), message);
+ } else {
+ // we use SQLITE_OK so that a generic SQLiteException is thrown;
+ // any code not specified in the switch statement below would do.
+ throw_sqlite3_exception(env, SQLITE_OK, "unknown error", message);
+ }
+ }
+
+ /* throw a SQLiteException for a given error code */
+ void throw_sqlite3_exception_errcode(JNIEnv* env, int errcode, const char* message) {
+ if (errcode == SQLITE_DONE) {
+ throw_sqlite3_exception(env, errcode, NULL, message);
+ } else {
+ char temp[21];
+ sprintf(temp, "error code %d", errcode);
+ throw_sqlite3_exception(env, errcode, temp, message);
+ }
+ }
+
+ /* throw a SQLiteException for a given error code, sqlite3message, and
+ user message
+ */
+ void throw_sqlite3_exception(JNIEnv* env, int errcode,
+ const char* sqlite3Message, const char* message) {
+ const char* exceptionClass;
+ switch (errcode) {
+ case SQLITE_IOERR:
+ exceptionClass = "net/sqlcipher/database/SQLiteDiskIOException";
+ break;
+ case SQLITE_CORRUPT:
+ exceptionClass = "net/sqlcipher/database/SQLiteDatabaseCorruptException";
+ break;
+ case SQLITE_CONSTRAINT:
+ exceptionClass = "net/sqlcipher/database/SQLiteConstraintException";
+ break;
+ case SQLITE_ABORT:
+ exceptionClass = "net/sqlcipher/database/SQLiteAbortException";
+ break;
+ case SQLITE_DONE:
+ exceptionClass = "net/sqlcipher/database/SQLiteDoneException";
+ break;
+ case SQLITE_FULL:
+ exceptionClass = "net/sqlcipher/database/SQLiteFullException";
+ break;
+ case SQLITE_MISUSE:
+ exceptionClass = "net/sqlcipher/database/SQLiteMisuseException";
+ break;
+ default:
+ exceptionClass = "net/sqlcipher/database/SQLiteException";
+ break;
+ }
+
+ if (sqlite3Message != NULL && message != NULL) {
+ char* fullMessage = (char *)malloc(strlen(sqlite3Message) + strlen(message) + 3);
+ if (fullMessage != NULL) {
+ strcpy(fullMessage, sqlite3Message);
+ strcat(fullMessage, ": ");
+ strcat(fullMessage, message);
+ jniThrowException(env, exceptionClass, fullMessage);
+ free(fullMessage);
+ } else {
+ jniThrowException(env, exceptionClass, sqlite3Message);
+ }
+ } else if (sqlite3Message != NULL) {
+ jniThrowException(env, exceptionClass, sqlite3Message);
+ } else {
+ jniThrowException(env, exceptionClass, message);
+ }
+ }
+
+
+} // namespace sqlcipher
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/net_sqlcipher_database_SQLiteDebug.cpp b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/net_sqlcipher_database_SQLiteDebug.cpp
new file mode 100644
index 0000000..15a31cf
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/net_sqlcipher_database_SQLiteDebug.cpp
@@ -0,0 +1,230 @@
+/*
+ * Copyright (C) 2007 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+#include
+
+// From mem_mspace.c in libsqlite
+extern "C" mspace sqlite3_get_mspace();
+
+namespace sqlcipher {
+
+static jfieldID gMemoryUsedField;
+static jfieldID gPageCacheOverfloField;
+static jfieldID gLargestMemAllocField;
+
+
+#define USE_MSPACE 0
+
+static void getPagerStats(JNIEnv *env, jobject clazz, jobject statsObj)
+{
+ int memoryUsed;
+ int pageCacheOverflo;
+ int largestMemAlloc;
+ int unused;
+
+ sqlite3_status(SQLITE_STATUS_MEMORY_USED, &memoryUsed, &unused, 0);
+ sqlite3_status(SQLITE_STATUS_MALLOC_SIZE, &unused, &largestMemAlloc, 0);
+ sqlite3_status(SQLITE_STATUS_PAGECACHE_OVERFLOW, &pageCacheOverflo, &unused, 0);
+ env->SetIntField(statsObj, gMemoryUsedField, memoryUsed);
+ env->SetIntField(statsObj, gPageCacheOverfloField, pageCacheOverflo);
+ env->SetIntField(statsObj, gLargestMemAllocField, largestMemAlloc);
+}
+
+static jlong getHeapSize(JNIEnv *env, jobject clazz)
+{
+#if !NO_MALLINFO
+ struct mallinfo info = mspace_mallinfo(sqlite3_get_mspace());
+ struct mallinfo info = dlmallinfo();
+ return (jlong) info.usmblks;
+#elif USE_MSPACE
+ mspace space = sqlite3_get_mspace();
+ if (space != 0) {
+ return mspace_footprint(space);
+ } else {
+ return 0;
+ }
+#else
+ return 0;
+#endif
+}
+
+static jlong getHeapAllocatedSize(JNIEnv *env, jobject clazz)
+{
+#if !NO_MALLINFO
+ struct mallinfo info = mspace_mallinfo(sqlite3_get_mspace());
+ return (jlong) info.uordblks;
+#else
+ return sqlite3_memory_used();
+#endif
+}
+
+static jlong getHeapFreeSize(JNIEnv *env, jobject clazz)
+{
+#if !NO_MALLINFO
+ struct mallinfo info = mspace_mallinfo(sqlite3_get_mspace());
+ return (jlong) info.fordblks;
+#else
+ return getHeapSize(env, clazz) - sqlite3_memory_used();
+#endif
+}
+
+static int read_mapinfo(FILE *fp,
+ int *sharedPages, int *privatePages)
+{
+ char line[1024];
+ int len;
+ int skip;
+
+ unsigned start = 0, size = 0, resident = 0;
+ unsigned shared_clean = 0, shared_dirty = 0;
+ unsigned private_clean = 0, private_dirty = 0;
+ unsigned referenced = 0;
+
+ int isAnon = 0;
+ int isHeap = 0;
+
+again:
+ skip = 0;
+
+ if(fgets(line, 1024, fp) == 0) return 0;
+
+ len = strlen(line);
+ if (len < 1) return 0;
+ line[--len] = 0;
+
+ /* ignore guard pages */
+ if (line[18] == '-') skip = 1;
+
+ start = strtoul(line, 0, 16);
+
+ if (len > 50 && !strncmp(line + 49, "/tmp/sqlite-heap", strlen("/tmp/sqlite-heap"))) {
+ isHeap = 1;
+ }
+
+ if (fgets(line, 1024, fp) == 0) return 0;
+ if (sscanf(line, "Size: %d kB", &size) != 1) return 0;
+ if (fgets(line, 1024, fp) == 0) return 0;
+ if (sscanf(line, "Rss: %d kB", &resident) != 1) return 0;
+ if (fgets(line, 1024, fp) == 0) return 0;
+ if (sscanf(line, "Shared_Clean: %d kB", &shared_clean) != 1) return 0;
+ if (fgets(line, 1024, fp) == 0) return 0;
+ if (sscanf(line, "Shared_Dirty: %d kB", &shared_dirty) != 1) return 0;
+ if (fgets(line, 1024, fp) == 0) return 0;
+ if (sscanf(line, "Private_Clean: %d kB", &private_clean) != 1) return 0;
+ if (fgets(line, 1024, fp) == 0) return 0;
+ if (sscanf(line, "Private_Dirty: %d kB", &private_dirty) != 1) return 0;
+ if (fgets(line, 1024, fp) == 0) return 0;
+ if (sscanf(line, "Referenced: %d kB", &referenced) != 1) return 0;
+
+ if (skip) {
+ goto again;
+ }
+
+ if (isHeap) {
+ *sharedPages += shared_dirty;
+ *privatePages += private_dirty;
+ }
+ return 1;
+}
+
+static void load_maps(int pid, int *sharedPages, int *privatePages)
+{
+ char tmp[128];
+ FILE *fp;
+
+ sprintf(tmp, "/proc/%d/smaps", pid);
+ fp = fopen(tmp, "r");
+ if (fp == 0) return;
+
+ while (read_mapinfo(fp, sharedPages, privatePages) != 0) {
+ // Do nothing
+ }
+ fclose(fp);
+}
+
+static void getHeapDirtyPages(JNIEnv *env, jobject clazz, jintArray pages)
+{
+ int _pages[2];
+
+ _pages[0] = 0;
+ _pages[1] = 0;
+
+ load_maps(getpid(), &_pages[0], &_pages[1]);
+
+ // Convert from kbytes to 4K pages
+ _pages[0] /= 4;
+ _pages[1] /= 4;
+
+ env->SetIntArrayRegion(pages, 0, 2, _pages);
+}
+
+/*
+ * JNI registration.
+ */
+
+static JNINativeMethod gMethods[] =
+{
+ { "getPagerStats", "(Lnet/sqlcipher/database/SQLiteDebug$PagerStats;)V",
+ (void*) getPagerStats },
+ { "getHeapSize", "()J", (void*) getHeapSize },
+ { "getHeapAllocatedSize", "()J", (void*) getHeapAllocatedSize },
+ { "getHeapFreeSize", "()J", (void*) getHeapFreeSize },
+ { "getHeapDirtyPages", "([I)V", (void*) getHeapDirtyPages },
+};
+
+int register_android_database_SQLiteDebug(JNIEnv *env)
+{
+ jclass clazz;
+
+ clazz = env->FindClass("net/sqlcipher/database/SQLiteDebug$PagerStats");
+ if (clazz == NULL) {
+ LOGE("Can't find net/sqlcipher/database/SQLiteDebug$PagerStats");
+ return -1;
+ }
+
+ gMemoryUsedField = env->GetFieldID(clazz, "memoryUsed", "I");
+ if (gMemoryUsedField == NULL) {
+ LOGE("Can't find memoryUsed");
+ return -1;
+ }
+
+ gLargestMemAllocField = env->GetFieldID(clazz, "largestMemAlloc", "I");
+ if (gLargestMemAllocField == NULL) {
+ LOGE("Can't find largestMemAlloc");
+ return -1;
+ }
+
+ gPageCacheOverfloField = env->GetFieldID(clazz, "pageCacheOverflo", "I");
+ if (gPageCacheOverfloField == NULL) {
+ LOGE("Can't find pageCacheOverflo");
+ return -1;
+ }
+
+ return jniRegisterNativeMethods(env, "net/sqlcipher/database/SQLiteDebug",
+ gMethods, NELEM(gMethods));
+}
+
+} // namespace sqlcipher
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/net_sqlcipher_database_SQLiteProgram.cpp b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/net_sqlcipher_database_SQLiteProgram.cpp
new file mode 100644
index 0000000..2a52607
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/net_sqlcipher_database_SQLiteProgram.cpp
@@ -0,0 +1,193 @@
+/*
+ * Copyright (C) 2006-2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#undef LOG_TAG
+#define LOG_TAG "Cursor"
+
+// #include
+// #include
+// #include
+
+#include
+#include
+#include
+#include
+#include
+#include "log.h"
+#include "jni_elements.h"
+#include "jni_exception.h"
+#include "sqlite3_exception.h"
+
+namespace sqlcipher {
+
+static jfieldID gHandleField;
+static jfieldID gStatementField;
+
+
+#define GET_STATEMENT(env, object) \
+ (sqlite3_stmt *)env->GetLongField(object, gStatementField)
+#define GET_HANDLE(env, object) \
+ (sqlite3 *)env->GetLongField(object, gHandleField)
+
+static void native_compile(JNIEnv* env, jobject object, jstring sqlString)
+{
+ char buf[65];
+ strcpy(buf, "android_database_SQLiteProgram->native_compile() not implemented");
+ throw_sqlite3_exception(env, GET_HANDLE(env, object), buf);
+ return;
+}
+
+static void native_bind_null(JNIEnv* env, jobject object,
+ jint index)
+{
+ int err;
+ sqlite3_stmt * statement = GET_STATEMENT(env, object);
+
+ err = sqlite3_bind_null(statement, index);
+ if (err != SQLITE_OK) {
+ char buf[32];
+ sprintf(buf, "handle %p", statement);
+ throw_sqlite3_exception(env, GET_HANDLE(env, object), buf);
+ return;
+ }
+}
+
+static void native_bind_long(JNIEnv* env, jobject object,
+ jint index, jlong value)
+{
+ int err;
+ sqlite3_stmt * statement = GET_STATEMENT(env, object);
+
+ err = sqlite3_bind_int64(statement, index, value);
+ if (err != SQLITE_OK) {
+ char buf[32];
+ sprintf(buf, "handle %p", statement);
+ throw_sqlite3_exception(env, GET_HANDLE(env, object), buf);
+ return;
+ }
+}
+
+static void native_bind_double(JNIEnv* env, jobject object,
+ jint index, jdouble value)
+{
+ int err;
+ sqlite3_stmt * statement = GET_STATEMENT(env, object);
+
+ err = sqlite3_bind_double(statement, index, value);
+ if (err != SQLITE_OK) {
+ char buf[32];
+ sprintf(buf, "handle %p", statement);
+ throw_sqlite3_exception(env, GET_HANDLE(env, object), buf);
+ return;
+ }
+}
+
+static void native_bind_string(JNIEnv* env, jobject object,
+ jint index, jstring sqlString)
+{
+ int err;
+ jchar const * sql;
+ jsize sqlLen;
+ sqlite3_stmt * statement= GET_STATEMENT(env, object);
+
+ sql = env->GetStringChars(sqlString, NULL);
+ sqlLen = env->GetStringLength(sqlString);
+ err = sqlite3_bind_text16(statement, index, sql, sqlLen * 2, SQLITE_TRANSIENT);
+ env->ReleaseStringChars(sqlString, sql);
+ if (err != SQLITE_OK) {
+ char buf[32];
+ sprintf(buf, "handle %p", statement);
+ throw_sqlite3_exception(env, GET_HANDLE(env, object), buf);
+ return;
+ }
+}
+
+static void native_bind_blob(JNIEnv* env, jobject object,
+ jint index, jbyteArray value)
+{
+ int err;
+ jchar const * sql;
+ jsize sqlLen;
+ sqlite3_stmt * statement= GET_STATEMENT(env, object);
+
+ jint len = env->GetArrayLength(value);
+ jbyte * bytes = env->GetByteArrayElements(value, NULL);
+
+ err = sqlite3_bind_blob(statement, index, bytes, len, SQLITE_TRANSIENT);
+ env->ReleaseByteArrayElements(value, bytes, JNI_ABORT);
+
+ if (err != SQLITE_OK) {
+ char buf[32];
+ sprintf(buf, "statement %p", statement);
+ throw_sqlite3_exception(env, GET_HANDLE(env, object), buf);
+ return;
+ }
+}
+
+static void native_clear_bindings(JNIEnv* env, jobject object)
+{
+ int err;
+ sqlite3_stmt * statement = GET_STATEMENT(env, object);
+
+ err = sqlite3_clear_bindings(statement);
+ if (err != SQLITE_OK) {
+ throw_sqlite3_exception(env, GET_HANDLE(env, object));
+ return;
+ }
+}
+
+static void native_finalize(JNIEnv* env, jobject object)
+{
+ char buf[66];
+ strcpy(buf, "android_database_SQLiteProgram->native_finalize() not implemented");
+ throw_sqlite3_exception(env, GET_HANDLE(env, object), buf);
+ return;
+}
+
+
+static JNINativeMethod sMethods[] =
+{
+ /* name, signature, funcPtr */
+ {"native_bind_null", "(I)V", (void *)native_bind_null},
+ {"native_bind_long", "(IJ)V", (void *)native_bind_long},
+ {"native_bind_double", "(ID)V", (void *)native_bind_double},
+ {"native_bind_string", "(ILjava/lang/String;)V", (void *)native_bind_string},
+ {"native_bind_blob", "(I[B)V", (void *)native_bind_blob},
+ {"native_clear_bindings", "()V", (void *)native_clear_bindings},
+};
+
+int register_android_database_SQLiteProgram(JNIEnv * env)
+{
+ jclass clazz;
+
+ clazz = env->FindClass("net/sqlcipher/database/SQLiteProgram");
+ if (clazz == NULL) {
+ LOGE("Can't find net/sqlcipher/database/SQLiteProgram");
+ return -1;
+ }
+
+ gHandleField = env->GetFieldID(clazz, "nHandle", "J");
+ gStatementField = env->GetFieldID(clazz, "nStatement", "J");
+
+ if (gHandleField == NULL || gStatementField == NULL) {
+ LOGE("Error locating fields");
+ return -1;
+ }
+ return env->RegisterNatives(clazz, sMethods, NELEM(sMethods));
+}
+
+
+} // namespace sqlcipher
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/net_sqlcipher_database_SQLiteQuery.cpp b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/net_sqlcipher_database_SQLiteQuery.cpp
new file mode 100644
index 0000000..8dcf139
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/net_sqlcipher_database_SQLiteQuery.cpp
@@ -0,0 +1,395 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#undef LOG_TAG
+#define LOG_TAG "Cursor"
+
+#include
+// #include
+// #include
+// #include
+
+#include
+#include
+#include
+#include
+#include "log.h"
+#include "jni_elements.h"
+#include "jni_exception.h"
+#include "CursorWindow.h"
+#include "sqlite3_exception.h"
+
+namespace sqlcipher {
+
+CursorWindow * get_window_from_object(JNIEnv * env, jobject javaWindow);
+
+sqlite3_stmt * compile(JNIEnv* env, jobject object,
+ sqlite3 * handle, jstring sqlString);
+
+static jfieldID gHandleField;
+static jfieldID gStatementField;
+
+
+#define GET_STATEMENT(env, object) \
+ (sqlite3_stmt *)env->GetLongField(object, gStatementField)
+#define GET_HANDLE(env, object) \
+ (sqlite3 *)env->GetLongField(object, gHandleField)
+
+static int skip_rows(sqlite3_stmt *statement, int maxRows) {
+ int retryCount = 0;
+ for (int i = 0; i < maxRows; i++) {
+ int err = sqlite3_step(statement);
+ if (err == SQLITE_ROW){
+ // do nothing
+ } else if (err == SQLITE_DONE) {
+ return i;
+ } else if (err == SQLITE_LOCKED || err == SQLITE_BUSY) {
+ // The table is locked, retry
+ LOG_WINDOW("Database locked, retrying");
+ if (retryCount > 50) {
+ LOGE("Bailing on database busy rety");
+ break;
+ }
+ // Sleep to give the thread holding the lock a chance to finish
+ usleep(1000);
+ retryCount++;
+ continue;
+ } else {
+ return -1;
+ }
+ }
+ LOGD("skip_rows row %d", maxRows);
+ return maxRows;
+}
+
+static int finish_program_and_get_row_count(sqlite3_stmt *statement) {
+ int numRows = 0;
+ int retryCount = 0;
+ while (true) {
+ int err = sqlite3_step(statement);
+ if (err == SQLITE_ROW){
+ numRows++;
+ } else if (err == SQLITE_LOCKED || err == SQLITE_BUSY) {
+ // The table is locked, retry
+ LOG_WINDOW("Database locked, retrying");
+ if (retryCount > 50) {
+ LOGE("Bailing on database busy rety");
+ break;
+ }
+ // Sleep to give the thread holding the lock a chance to finish
+ usleep(1000);
+ retryCount++;
+ continue;
+ } else {
+ // no need to throw exception
+ break;
+ }
+ }
+ sqlite3_reset(statement);
+ LOGD("finish_program_and_get_row_count row %d", numRows);
+ return numRows;
+}
+
+static jint native_fill_window(JNIEnv* env, jobject object, jobject javaWindow,
+ jint startPos, jint requiredPos,
+ jint offsetParam, jint maxRead, jint lastPos)
+{
+ int err;
+ sqlite3_stmt * statement = GET_STATEMENT(env, object);
+ int numRows = lastPos;
+ maxRead += lastPos;
+ int numColumns;
+ int retryCount;
+ int boundParams;
+ CursorWindow * window;
+
+ if (statement == NULL) {
+ LOGE("Invalid statement in fillWindow()");
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Attempting to access a deactivated, closed, or empty cursor");
+ return 0;
+ }
+
+ // Only do the binding if there is a valid offsetParam. If no binding needs to be done
+ // offsetParam will be set to 0, an invliad value.
+ if(offsetParam > 0) {
+ // Bind the offset parameter, telling the program which row to start with
+ err = sqlite3_bind_int(statement, offsetParam, startPos);
+ if (err != SQLITE_OK) {
+ LOGE("Unable to bind offset position, offsetParam = %d", offsetParam);
+ jniThrowException(env, "java/lang/IllegalArgumentException",
+ sqlite3_errmsg(GET_HANDLE(env, object)));
+ return 0;
+ }
+ LOG_WINDOW("Bound to startPos %d", startPos);
+ } else {
+ LOG_WINDOW("Not binding to startPos %d", startPos);
+ }
+
+ // Get the native window
+ window = get_window_from_object(env, javaWindow);
+ if (!window) {
+ LOGE("Invalid CursorWindow");
+ jniThrowException(env, "java/lang/IllegalArgumentException",
+ "Bad CursorWindow");
+ return 0;
+ }
+ LOG_WINDOW("Window: numRows = %d, size = %d, freeSpace = %d", window->getNumRows(), window->size(), window->freeSpace());
+
+ numColumns = sqlite3_column_count(statement);
+ if (!window->setNumColumns(numColumns)) {
+ LOGE("Failed to change column count from %d to %d", window->getNumColumns(), numColumns);
+ jniThrowException(env, "java/lang/IllegalStateException", "numColumns mismatch");
+ return 0;
+ }
+
+ retryCount = 0;
+ if (startPos > 0) {
+ int num = skip_rows(statement, startPos);
+ if (num < 0) {
+ throw_sqlite3_exception(env, GET_HANDLE(env, object));
+ return 0;
+ } else if (num < startPos) {
+ LOGE("startPos %d > actual rows %d", startPos, num);
+ return num;
+ }
+ }
+
+ while(startPos != 0 || numRows < maxRead) {
+ err = sqlite3_step(statement);
+ if (err == SQLITE_ROW) {
+ LOG_WINDOW("\nStepped statement %p to row %d", statement, startPos + numRows);
+ retryCount = 0;
+
+ // Allocate a new field directory for the row. This pointer is not reused
+ // since it mey be possible for it to be relocated on a call to alloc() when
+ // the field data is being allocated.
+ {
+ field_slot_t * fieldDir = window->allocRow();
+ if(!fieldDir && (startPos + numRows) < requiredPos) {
+ LOG_WINDOW("Failed to allocate row, resetting window", startPos + numRows);
+ window->clear();
+ window->setNumColumns(numColumns);
+ fieldDir = window->allocRow();
+ LOG_WINDOW("Window reset, row allocated at %p", fieldDir);
+ }
+ if (!fieldDir) {
+ LOGE("Failed allocating fieldDir at startPos %d row %d", startPos, numRows);
+ return startPos + numRows + finish_program_and_get_row_count(statement) + 1;
+ }
+ }
+
+ // Pack the row into the window
+ int i;
+ bool failed = false;
+ bool reset = false;
+ for (i = 0; i < numColumns; i++) {
+
+ if(reset) {
+ LOG_WINDOW("Reset requested for row %d, likely cursor window not large enough for current row\n",
+ startPos + numRows);
+ if(!failed && (startPos + numRows) < requiredPos) {
+ LOG_WINDOW("Reseting window, previously unable to map required row %d into window\n",
+ requiredPos);
+ i = 0;
+ window->clear();
+ window->setNumColumns(numColumns);
+ field_slot_t * fieldDir = window->allocRow();
+ if(!fieldDir) {
+ LOG_WINDOW("Failed to allocate row in reset, bailing\n");
+ jniThrowException(env, "net/sqlcipher/RowAllocationException",
+ "Failed to allocate row in reset within native_fill_window");
+ } else {
+ LOG_WINDOW("Allocated row in reset set\n");
+ }
+ } else {
+ LOG_WINDOW("Bailing from reset, requested row %d already mapped in cursor window\n",
+ startPos + numRows);
+ return startPos + numRows + finish_program_and_get_row_count(statement) + 1;
+ }
+ failed = true;
+ reset = false;
+ }
+
+ int type = sqlite3_column_type(statement, i);
+ if (type == SQLITE_TEXT) {
+ // TEXT data
+ uint8_t const * text = (uint8_t const *)sqlite3_column_text16(statement, i);
+ size_t size = sqlite3_column_bytes16(statement, i);
+ int offset = window->alloc(size);
+ if (!offset) {
+ window->freeLastRow();
+ LOGE("Failed allocating %u bytes for text/blob at %d,%d", size,
+ startPos + numRows, i);
+ reset = true;
+ continue;
+ }
+
+ window->copyIn(offset, text, size);
+
+ // This must be updated after the call to alloc(), since that
+ // may move the field around in the window
+ field_slot_t * fieldSlot = window->getFieldSlot(numRows, i);
+ fieldSlot->type = FIELD_TYPE_STRING;
+ fieldSlot->data.buffer.offset = offset;
+ fieldSlot->data.buffer.size = size;
+
+ LOG_WINDOW("%d,%d is TEXT with %u bytes", startPos + numRows, i, size);
+ } else if (type == SQLITE_INTEGER) {
+ // INTEGER data
+ int64_t value = sqlite3_column_int64(statement, i);
+ if (!window->putLong(numRows, i, value)) {
+ window->freeLastRow();
+ LOGE("Failed allocating space for a long in column %d", i);
+ reset = true;
+ continue;
+ }
+ LOG_WINDOW("%d,%d is INTEGER 0x%016llx", startPos + numRows, i, value);
+ } else if (type == SQLITE_FLOAT) {
+ // FLOAT data
+ double value = sqlite3_column_double(statement, i);
+ if (!window->putDouble(numRows, i, value)) {
+ window->freeLastRow();
+ LOGE("Failed allocating space for a double in column %d", i);
+ reset = true;
+ continue;
+ }
+ LOG_WINDOW("%d,%d is FLOAT %lf", startPos + numRows, i, value);
+ } else if (type == SQLITE_BLOB) {
+ // BLOB data
+ uint8_t const * blob = (uint8_t const *)sqlite3_column_blob(statement, i);
+ size_t size = sqlite3_column_bytes16(statement, i);
+ int offset = window->alloc(size);
+ if (!offset) {
+ window->freeLastRow();
+ LOGE("Failed allocating %u bytes for blob at %d,%d", size,
+ startPos + numRows, i);
+ reset = true;
+ continue;
+ }
+ window->copyIn(offset, blob, size);
+ // This must be updated after the call to alloc(), since that
+ // may move the field around in the window
+ field_slot_t * fieldSlot = window->getFieldSlot(numRows, i);
+ fieldSlot->type = FIELD_TYPE_BLOB;
+ fieldSlot->data.buffer.offset = offset;
+ fieldSlot->data.buffer.size = size;
+
+ LOG_WINDOW("%d,%d is Blob with %u bytes @ %d", startPos + numRows, i, size, offset);
+ } else if (type == SQLITE_NULL) {
+ // NULL field
+ window->putNull(numRows, i);
+
+ LOG_WINDOW("%d,%d is NULL", startPos + numRows, i);
+ } else {
+ // Unknown data
+ LOGE("Unknown column type when filling database window");
+ throw_sqlite3_exception(env, "Unknown column type when filling window");
+ break;
+ }
+ }
+
+ if (i < numColumns) {
+ // Not all the fields fit in the window
+ // Unknown data error happened
+ break;
+ }
+
+ // Mark the row as complete in the window
+ numRows++;
+ } else if (err == SQLITE_DONE) {
+ // All rows processed, bail
+ LOG_WINDOW("Processed all rows");
+ break;
+ } else if (err == SQLITE_LOCKED || err == SQLITE_BUSY) {
+ // The table is locked, retry
+ LOG_WINDOW("Database locked, retrying");
+ if (retryCount > 50) {
+ LOGE("Bailing on database busy rety");
+ break;
+ }
+
+ // Sleep to give the thread holding the lock a chance to finish
+ usleep(1000);
+
+ retryCount++;
+ continue;
+ } else {
+ throw_sqlite3_exception(env, GET_HANDLE(env, object));
+ break;
+ }
+ }
+
+ LOG_WINDOW("Resetting statement %p after fetching %d rows in %d bytes\n\n\n\n", statement,
+ numRows, window->size() - window->freeSpace());
+// LOGI("Filled window with %d rows in %d bytes", numRows, window->size() - window->freeSpace());
+ if (err == SQLITE_ROW) {
+ return -1;
+ } else {
+ sqlite3_reset(statement);
+ return startPos + numRows;
+ }
+}
+
+static jint native_column_count(JNIEnv* env, jobject object)
+{
+ sqlite3_stmt * statement = GET_STATEMENT(env, object);
+
+ return sqlite3_column_count(statement);
+}
+
+static jstring native_column_name(JNIEnv* env, jobject object, jint columnIndex)
+{
+ sqlite3_stmt * statement = GET_STATEMENT(env, object);
+ char const * name;
+
+ name = sqlite3_column_name(statement, columnIndex);
+
+ return env->NewStringUTF(name);
+}
+
+
+static JNINativeMethod sMethods[] =
+{
+ /* name, signature, funcPtr */
+ {"native_fill_window", "(Lnet/sqlcipher/CursorWindow;IIIII)I", (void *)native_fill_window},
+ {"native_column_count", "()I", (void*)native_column_count},
+ {"native_column_name", "(I)Ljava/lang/String;", (void *)native_column_name},
+};
+
+
+int register_android_database_SQLiteQuery(JNIEnv * env)
+{
+ jclass clazz;
+
+ clazz = env->FindClass("net/sqlcipher/database/SQLiteQuery");
+ if (clazz == NULL) {
+ LOGE("Can't find net/sqlcipher/database/SQLiteQuery");
+ return -1;
+ }
+
+ gHandleField = env->GetFieldID(clazz, "nHandle", "J");
+ gStatementField = env->GetFieldID(clazz, "nStatement", "J");
+
+ if (gHandleField == NULL || gStatementField == NULL) {
+ LOGE("Error locating fields");
+ return -1;
+ }
+ return env->RegisterNatives(clazz, sMethods, NELEM(sMethods));
+}
+
+
+} // namespace sqlcipher
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/net_sqlcipher_database_SQLiteStatement.cpp b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/net_sqlcipher_database_SQLiteStatement.cpp
new file mode 100644
index 0000000..c4e757e
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/net_sqlcipher_database_SQLiteStatement.cpp
@@ -0,0 +1,150 @@
+/* //device/libs/android_runtime/android_database_SQLiteCursor.cpp
+**
+** Copyright 2006, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#undef LOG_TAG
+#define LOG_TAG "Cursor"
+
+#include
+#include
+#include
+#include
+#include
+
+#include "log.h"
+#include "jni_elements.h"
+#include "sqlite3_exception.h"
+
+namespace sqlcipher {
+
+
+sqlite3_stmt * compile(JNIEnv* env, jobject object,
+ sqlite3 * handle, jstring sqlString);
+
+static jfieldID gHandleField;
+static jfieldID gStatementField;
+
+
+#define GET_STATEMENT(env, object) \
+ (sqlite3_stmt *)env->GetLongField(object, gStatementField)
+#define GET_HANDLE(env, object) \
+ (sqlite3 *)env->GetLongField(object, gHandleField)
+
+
+static void native_execute(JNIEnv* env, jobject object)
+{
+ int err;
+ sqlite3 * handle = GET_HANDLE(env, object);
+ sqlite3_stmt * statement = GET_STATEMENT(env, object);
+
+ // Execute the statement
+ err = sqlite3_step(statement);
+
+ // Throw an exception if an error occured
+ if (err != SQLITE_DONE) {
+ throw_sqlite3_exception_errcode(env, err, sqlite3_errmsg(handle));
+ }
+
+ // Reset the statment so it's ready to use again
+ sqlite3_reset(statement);
+}
+
+static jlong native_1x1_long(JNIEnv* env, jobject object)
+{
+ int err;
+ sqlite3 * handle = GET_HANDLE(env, object);
+ sqlite3_stmt * statement = GET_STATEMENT(env, object);
+ jlong value = -1;
+
+ // Execute the statement
+ err = sqlite3_step(statement);
+
+ // Handle the result
+ if (err == SQLITE_ROW) {
+ // No errors, read the data and return it
+ value = sqlite3_column_int64(statement, 0);
+ } else {
+ throw_sqlite3_exception_errcode(env, err, sqlite3_errmsg(handle));
+ }
+
+ // Reset the statment so it's ready to use again
+ sqlite3_reset(statement);
+
+ return value;
+}
+
+static jstring native_1x1_string(JNIEnv* env, jobject object)
+{
+ int err;
+ sqlite3 * handle = GET_HANDLE(env, object);
+ sqlite3_stmt * statement = GET_STATEMENT(env, object);
+ jstring value = NULL;
+
+ // Execute the statement
+ err = sqlite3_step(statement);
+
+ // Handle the result
+ if (err == SQLITE_ROW) {
+ // No errors, read the data and return it
+ //char const * text = (char const *)sqlite3_column_text(statement, 0);
+
+ const jchar *str = 0;
+ jint strlength = 0;
+ str = (const jchar*) sqlite3_column_text16(statement, 0);
+ strlength = sqlite3_column_bytes16(statement, 0) / sizeof(jchar);
+ value = str ? env->NewString(str, strlength) : NULL;
+ } else {
+ throw_sqlite3_exception_errcode(env, err, sqlite3_errmsg(handle));
+ }
+
+ // Reset the statment so it's ready to use again
+ sqlite3_reset(statement);
+
+ return value;
+}
+
+
+static JNINativeMethod sMethods[] =
+{
+ /* name, signature, funcPtr */
+ {"native_execute", "()V", (void *)native_execute},
+ {"native_1x1_long", "()J", (void *)native_1x1_long},
+ {"native_1x1_string", "()Ljava/lang/String;", (void *)native_1x1_string},
+};
+
+
+int register_android_database_SQLiteStatement(JNIEnv * env)
+{
+ jclass clazz;
+
+ clazz = env->FindClass("net/sqlcipher/database/SQLiteStatement");
+ if (clazz == NULL) {
+ LOGE("Can't find net/sqlcipher/database/SQLiteStatement");
+ return -1;
+ }
+
+ gHandleField = env->GetFieldID(clazz, "nHandle", "J");
+ gStatementField = env->GetFieldID(clazz, "nStatement", "J");
+
+ if (gHandleField == NULL || gStatementField == NULL) {
+ LOGE("Error locating fields");
+ return -1;
+ }
+ return env->RegisterNatives(clazz, sMethods, NELEM(sMethods));
+}
+
+
+} // namespace sqlcipher
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/sqlcipher_loading.h b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/sqlcipher_loading.h
new file mode 100644
index 0000000..c9201ed
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/sqlcipher_loading.h
@@ -0,0 +1,42 @@
+/* //device/libs/include/android_runtime/sqlite3_exception.h
+**
+** Copyright 2007, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+
+#include
+/* #include */
+/* #include */
+
+#include
+
+namespace sqlcipher {
+
+int register_android_database_SQLiteDatabase(JNIEnv *env);
+
+int register_android_database_SQLiteCompiledSql(JNIEnv * env);
+
+int register_android_database_SQLiteQuery(JNIEnv * env);
+
+int register_android_database_SQLiteProgram(JNIEnv * env);
+
+int register_android_database_SQLiteStatement(JNIEnv * env);
+
+int register_android_database_SQLiteDebug(JNIEnv *env);
+
+int register_android_database_CursorWindow(JNIEnv *env);
+
+}
+
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/sqlite3_exception.h b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/sqlite3_exception.h
new file mode 100644
index 0000000..866d982
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/cpp/sqlite3_exception.h
@@ -0,0 +1,47 @@
+/* //device/libs/include/android_runtime/sqlite3_exception.h
+**
+** Copyright 2007, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef _SQLITE3_EXCEPTION_H
+#define _SQLITE3_EXCEPTION_H 1
+
+#include
+/* #include */
+/* #include */
+
+#include
+
+namespace sqlcipher {
+
+/* throw a SQLiteException with a message appropriate for the error in handle */
+void throw_sqlite3_exception(JNIEnv* env, sqlite3* handle);
+
+/* throw a SQLiteException with the given message */
+void throw_sqlite3_exception(JNIEnv* env, const char* message);
+
+/* throw a SQLiteException with a message appropriate for the error in handle
+ concatenated with the given message
+ */
+void throw_sqlite3_exception(JNIEnv* env, sqlite3* handle, const char* message);
+
+/* throw a SQLiteException for a given error code */
+void throw_sqlite3_exception_errcode(JNIEnv* env, int errcode, const char* message);
+
+void throw_sqlite3_exception(JNIEnv* env, int errcode,
+ const char* sqlite3Message, const char* message);
+}
+
+#endif // _SQLITE3_EXCEPTION_H
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/external/openssl-1.1.1b.tar.gz b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/external/openssl-1.1.1b.tar.gz
new file mode 100644
index 0000000..cdcb6ce
Binary files /dev/null and b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/external/openssl-1.1.1b.tar.gz differ
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/AbstractCursor.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/AbstractCursor.java
new file mode 100644
index 0000000..f3eaf8a
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/AbstractCursor.java
@@ -0,0 +1,627 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher;
+
+import java.lang.ref.WeakReference;
+import java.util.HashMap;
+import java.util.Map;
+
+import android.content.ContentResolver;
+import android.database.CharArrayBuffer;
+import android.database.ContentObservable;
+import android.database.DataSetObservable;
+import android.database.DataSetObserver;
+import android.database.ContentObserver;
+import android.net.Uri;
+import android.os.Bundle;
+import android.util.Config;
+import android.util.Log;
+
+
+/**
+ * This is an abstract cursor class that handles a lot of the common code
+ * that all cursors need to deal with and is provided for convenience reasons.
+ */
+public abstract class AbstractCursor implements android.database.CrossProcessCursor, net.sqlcipher.Cursor {
+ private static final String TAG = "Cursor";
+
+ DataSetObservable mDataSetObservable = new DataSetObservable();
+ ContentObservable mContentObservable = new ContentObservable();
+
+ private Bundle mExtras = Bundle.EMPTY;
+
+ /* -------------------------------------------------------- */
+ /* These need to be implemented by subclasses */
+ abstract public int getCount();
+
+ abstract public String[] getColumnNames();
+
+ abstract public String getString(int column);
+ abstract public short getShort(int column);
+ abstract public int getInt(int column);
+ abstract public long getLong(int column);
+ abstract public float getFloat(int column);
+ abstract public double getDouble(int column);
+ abstract public boolean isNull(int column);
+
+ abstract public int getType(int column);
+
+ // TODO implement getBlob in all cursor types
+ public byte[] getBlob(int column) {
+ throw new UnsupportedOperationException("getBlob is not supported");
+ }
+ /* -------------------------------------------------------- */
+ /* Methods that may optionally be implemented by subclasses */
+
+ /**
+ * returns a pre-filled window, return NULL if no such window
+ */
+ public CursorWindow getWindow() {
+ return null;
+ }
+
+ public int getColumnCount() {
+ return getColumnNames().length;
+ }
+
+ public void deactivate() {
+ deactivateInternal();
+ }
+
+ /**
+ * @hide
+ */
+ public void deactivateInternal() {
+ if (mSelfObserver != null) {
+ mContentResolver.unregisterContentObserver(mSelfObserver);
+ mSelfObserverRegistered = false;
+ }
+ mDataSetObservable.notifyInvalidated();
+ }
+
+ public boolean requery() {
+ if (mSelfObserver != null && mSelfObserverRegistered == false) {
+
+ mContentResolver.registerContentObserver(mNotifyUri, true, mSelfObserver);
+ mSelfObserverRegistered = true;
+ }
+ mDataSetObservable.notifyChanged();
+ return true;
+ }
+
+ public boolean isClosed() {
+ return mClosed;
+ }
+
+ public void close() {
+ mClosed = true;
+ mContentObservable.unregisterAll();
+ deactivateInternal();
+ }
+
+ /**
+ * @hide
+ * @deprecated
+ */
+ public boolean commitUpdates(Map extends Long,? extends Map> values) {
+ return false;
+ }
+
+ /**
+ * @hide
+ * @deprecated
+ */
+ public boolean deleteRow() {
+ return false;
+ }
+
+ /**
+ * This function is called every time the cursor is successfully scrolled
+ * to a new position, giving the subclass a chance to update any state it
+ * may have. If it returns false the move function will also do so and the
+ * cursor will scroll to the beforeFirst position.
+ *
+ * @param oldPosition the position that we're moving from
+ * @param newPosition the position that we're moving to
+ * @return true if the move is successful, false otherwise
+ */
+ public boolean onMove(int oldPosition, int newPosition) {
+ return true;
+ }
+
+
+ public void copyStringToBuffer(int columnIndex, CharArrayBuffer buffer) {
+ // Default implementation, uses getString
+ String result = getString(columnIndex);
+ if (result != null) {
+ char[] data = buffer.data;
+ if (data == null || data.length < result.length()) {
+ buffer.data = result.toCharArray();
+ } else {
+ result.getChars(0, result.length(), data, 0);
+ }
+ buffer.sizeCopied = result.length();
+ } else {
+ buffer.sizeCopied = 0;
+ }
+ }
+
+ /* -------------------------------------------------------- */
+ /* Implementation */
+ public AbstractCursor() {
+ mPos = -1;
+ mRowIdColumnIndex = -1;
+ mCurrentRowID = null;
+ mUpdatedRows = new HashMap>();
+ }
+
+ public final int getPosition() {
+ return mPos;
+ }
+
+ public final boolean moveToPosition(int position) {
+ // Make sure position isn't past the end of the cursor
+ final int count = getCount();
+ if (position >= count) {
+ mPos = count;
+ return false;
+ }
+
+ // Make sure position isn't before the beginning of the cursor
+ if (position < 0) {
+ mPos = -1;
+ return false;
+ }
+
+ // Check for no-op moves, and skip the rest of the work for them
+ if (position == mPos) {
+ return true;
+ }
+
+ boolean result = onMove(mPos, position);
+ if (result == false) {
+ mPos = -1;
+ } else {
+ mPos = position;
+ if (mRowIdColumnIndex != -1) {
+ mCurrentRowID = Long.valueOf(getLong(mRowIdColumnIndex));
+ }
+ }
+
+ return result;
+ }
+
+ /**
+ * Copy data from cursor to CursorWindow
+ * @param position start position of data
+ * @param window
+ */
+ public void fillWindow(int position, android.database.CursorWindow window) {
+ DatabaseUtils.cursorFillWindow(this, position, window);
+ }
+
+ public final boolean move(int offset) {
+ return moveToPosition(mPos + offset);
+ }
+
+ public final boolean moveToFirst() {
+ return moveToPosition(0);
+ }
+
+ public final boolean moveToLast() {
+ return moveToPosition(getCount() - 1);
+ }
+
+ public final boolean moveToNext() {
+ return moveToPosition(mPos + 1);
+ }
+
+ public final boolean moveToPrevious() {
+ return moveToPosition(mPos - 1);
+ }
+
+ public final boolean isFirst() {
+ return mPos == 0 && getCount() != 0;
+ }
+
+ public final boolean isLast() {
+ int cnt = getCount();
+ return mPos == (cnt - 1) && cnt != 0;
+ }
+
+ public final boolean isBeforeFirst() {
+ if (getCount() == 0) {
+ return true;
+ }
+ return mPos == -1;
+ }
+
+ public final boolean isAfterLast() {
+ if (getCount() == 0) {
+ return true;
+ }
+ return mPos == getCount();
+ }
+
+ public int getColumnIndex(String columnName) {
+ // Hack according to bug 903852
+ final int periodIndex = columnName.lastIndexOf('.');
+ if (periodIndex != -1) {
+ Exception e = new Exception();
+ Log.e(TAG, "requesting column name with table name -- " + columnName, e);
+ columnName = columnName.substring(periodIndex + 1);
+ }
+
+ String columnNames[] = getColumnNames();
+ int length = columnNames.length;
+ for (int i = 0; i < length; i++) {
+ if (columnNames[i].equalsIgnoreCase(columnName)) {
+ return i;
+ }
+ }
+
+ if (Config.LOGV) {
+ if (getCount() > 0) {
+ Log.w("AbstractCursor", "Unknown column " + columnName);
+ }
+ }
+ return -1;
+ }
+
+ public int getColumnIndexOrThrow(String columnName) {
+ final int index = getColumnIndex(columnName);
+ if (index < 0) {
+ throw new IllegalArgumentException("column '" + columnName + "' does not exist");
+ }
+ return index;
+ }
+
+ public String getColumnName(int columnIndex) {
+ return getColumnNames()[columnIndex];
+ }
+
+ /**
+ * @hide
+ * @deprecated
+ */
+ public boolean updateBlob(int columnIndex, byte[] value) {
+ return update(columnIndex, value);
+ }
+
+ /**
+ * @hide
+ * @deprecated
+ */
+ public boolean updateString(int columnIndex, String value) {
+ return update(columnIndex, value);
+ }
+
+ /**
+ * @hide
+ * @deprecated
+ */
+ public boolean updateShort(int columnIndex, short value) {
+ return update(columnIndex, Short.valueOf(value));
+ }
+
+ /**
+ * @hide
+ * @deprecated
+ */
+ public boolean updateInt(int columnIndex, int value) {
+ return update(columnIndex, Integer.valueOf(value));
+ }
+
+ /**
+ * @hide
+ * @deprecated
+ */
+ public boolean updateLong(int columnIndex, long value) {
+ return update(columnIndex, Long.valueOf(value));
+ }
+
+ /**
+ * @hide
+ * @deprecated
+ */
+ public boolean updateFloat(int columnIndex, float value) {
+ return update(columnIndex, Float.valueOf(value));
+ }
+
+ /**
+ * @hide
+ * @deprecated
+ */
+ public boolean updateDouble(int columnIndex, double value) {
+ return update(columnIndex, Double.valueOf(value));
+ }
+
+ /**
+ * @hide
+ * @deprecated
+ */
+ public boolean updateToNull(int columnIndex) {
+ return update(columnIndex, null);
+ }
+
+ /**
+ * @hide
+ * @deprecated
+ */
+ public boolean update(int columnIndex, Object obj) {
+ if (!supportsUpdates()) {
+ return false;
+ }
+
+ // Long.valueOf() returns null sometimes!
+// Long rowid = Long.valueOf(getLong(mRowIdColumnIndex));
+ Long rowid = Long.valueOf(getLong(mRowIdColumnIndex));
+ if (rowid == null) {
+ throw new IllegalStateException("null rowid. mRowIdColumnIndex = " + mRowIdColumnIndex);
+ }
+
+ synchronized(mUpdatedRows) {
+ Map row = mUpdatedRows.get(rowid);
+ if (row == null) {
+ row = new HashMap();
+ mUpdatedRows.put(rowid, row);
+ }
+ row.put(getColumnNames()[columnIndex], obj);
+ }
+
+ return true;
+ }
+
+ /**
+ * Returns true if there are pending updates that have not yet been committed.
+ *
+ * @return true if there are pending updates that have not yet been committed.
+ * @hide
+ * @deprecated
+ */
+ public boolean hasUpdates() {
+ synchronized(mUpdatedRows) {
+ return mUpdatedRows.size() > 0;
+ }
+ }
+
+ /**
+ * @hide
+ * @deprecated
+ */
+ public void abortUpdates() {
+ synchronized(mUpdatedRows) {
+ mUpdatedRows.clear();
+ }
+ }
+
+ /**
+ * @hide
+ * @deprecated
+ */
+ public boolean commitUpdates() {
+ return commitUpdates(null);
+ }
+
+ /**
+ * @hide
+ * @deprecated
+ */
+ public boolean supportsUpdates() {
+ return mRowIdColumnIndex != -1;
+ }
+
+ public void registerContentObserver(ContentObserver observer) {
+ mContentObservable.registerObserver(observer);
+ }
+
+ public void unregisterContentObserver(ContentObserver observer) {
+ // cursor will unregister all observers when it close
+ if (!mClosed) {
+ mContentObservable.unregisterObserver(observer);
+ }
+ }
+
+ /**
+ * This is hidden until the data set change model has been re-evaluated.
+ * @hide
+ */
+ protected void notifyDataSetChange() {
+ mDataSetObservable.notifyChanged();
+ }
+
+ /**
+ * This is hidden until the data set change model has been re-evaluated.
+ * @hide
+ */
+ protected DataSetObservable getDataSetObservable() {
+ return mDataSetObservable;
+
+ }
+ public void registerDataSetObserver(DataSetObserver observer) {
+ mDataSetObservable.registerObserver(observer);
+
+ }
+
+ public void unregisterDataSetObserver(DataSetObserver observer) {
+ mDataSetObservable.unregisterObserver(observer);
+ }
+
+ /**
+ * Subclasses must call this method when they finish committing updates to notify all
+ * observers.
+ *
+ * @param selfChange
+ */
+ protected void onChange(boolean selfChange) {
+ synchronized (mSelfObserverLock) {
+ mContentObservable.dispatchChange(selfChange);
+ if (mNotifyUri != null && selfChange) {
+ mContentResolver.notifyChange(mNotifyUri, mSelfObserver);
+ }
+ }
+ }
+
+ /**
+ * Specifies a content URI to watch for changes.
+ *
+ * @param cr The content resolver from the caller's context.
+ * @param notifyUri The URI to watch for changes. This can be a
+ * specific row URI, or a base URI for a whole class of content.
+ */
+ public void setNotificationUri(ContentResolver cr, Uri notifyUri) {
+ synchronized (mSelfObserverLock) {
+ mNotifyUri = notifyUri;
+ mContentResolver = cr;
+ if (mSelfObserver != null) {
+ mContentResolver.unregisterContentObserver(mSelfObserver);
+ }
+ mSelfObserver = new SelfContentObserver(this);
+ mContentResolver.registerContentObserver(mNotifyUri, true, mSelfObserver);
+ mSelfObserverRegistered = true;
+ }
+ }
+
+ public Uri getNotificationUri() {
+ return mNotifyUri;
+ }
+
+ public boolean getWantsAllOnMoveCalls() {
+ return false;
+ }
+
+ public void setExtras(Bundle extras) {
+ mExtras = (extras == null) ? Bundle.EMPTY : extras;
+ }
+
+ public Bundle getExtras() {
+ return mExtras;
+ }
+
+ public Bundle respond(Bundle extras) {
+ return Bundle.EMPTY;
+ }
+
+ /**
+ * This function returns true if the field has been updated and is
+ * used in conjunction with {@link #getUpdatedField} to allow subclasses to
+ * support reading uncommitted updates. NOTE: This function and
+ * {@link #getUpdatedField} should be called together inside of a
+ * block synchronized on mUpdatedRows.
+ *
+ * @param columnIndex the column index of the field to check
+ * @return true if the field has been updated, false otherwise
+ */
+ protected boolean isFieldUpdated(int columnIndex) {
+ if (mRowIdColumnIndex != -1 && mUpdatedRows.size() > 0) {
+ Map updates = mUpdatedRows.get(mCurrentRowID);
+ if (updates != null && updates.containsKey(getColumnNames()[columnIndex])) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * This function returns the uncommitted updated value for the field
+ * at columnIndex. NOTE: This function and {@link #isFieldUpdated} should
+ * be called together inside of a block synchronized on mUpdatedRows.
+ *
+ * @param columnIndex the column index of the field to retrieve
+ * @return the updated value
+ */
+ protected Object getUpdatedField(int columnIndex) {
+ Map updates = mUpdatedRows.get(mCurrentRowID);
+ return updates.get(getColumnNames()[columnIndex]);
+ }
+
+ /**
+ * This function throws CursorIndexOutOfBoundsException if
+ * the cursor position is out of bounds. Subclass implementations of
+ * the get functions should call this before attempting
+ * to retrieve data.
+ *
+ * @throws CursorIndexOutOfBoundsException
+ */
+ protected void checkPosition() {
+ if (-1 == mPos || getCount() == mPos) {
+ throw new CursorIndexOutOfBoundsException(mPos, getCount());
+ }
+ }
+
+ @Override
+ protected void finalize() {
+ if (mSelfObserver != null && mSelfObserverRegistered == true) {
+ mContentResolver.unregisterContentObserver(mSelfObserver);
+ }
+ }
+
+ /**
+ * Cursors use this class to track changes others make to their URI.
+ */
+ protected static class SelfContentObserver extends ContentObserver {
+ WeakReference mCursor;
+
+ public SelfContentObserver(AbstractCursor cursor) {
+ super(null);
+ mCursor = new WeakReference(cursor);
+ }
+
+ @Override
+ public boolean deliverSelfNotifications() {
+ return false;
+ }
+
+ @Override
+ public void onChange(boolean selfChange) {
+ AbstractCursor cursor = mCursor.get();
+ if (cursor != null) {
+ cursor.onChange(false);
+ }
+ }
+
+ }
+
+ /**
+ * This HashMap contains a mapping from Long rowIDs to another Map
+ * that maps from String column names to new values. A NULL value means to
+ * remove an existing value, and all numeric values are in their class
+ * forms, i.e. Integer, Long, Float, etc.
+ */
+ protected HashMap> mUpdatedRows;
+
+ /**
+ * This must be set to the index of the row ID column by any
+ * subclass that wishes to support updates.
+ */
+ protected int mRowIdColumnIndex;
+
+ protected int mPos;
+
+ /**
+ * If {@link #mRowIdColumnIndex} is not -1 this contains contains the value of
+ * the column at {@link #mRowIdColumnIndex} for the current row this cursor is
+ * pointing at.
+ */
+ protected Long mCurrentRowID;
+ protected ContentResolver mContentResolver;
+ protected boolean mClosed = false;
+ private Uri mNotifyUri;
+ private ContentObserver mSelfObserver;
+ final private Object mSelfObserverLock = new Object();
+ private boolean mSelfObserverRegistered;
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/AbstractWindowedCursor.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/AbstractWindowedCursor.java
new file mode 100644
index 0000000..deb25f3
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/AbstractWindowedCursor.java
@@ -0,0 +1,254 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher;
+
+import android.database.CharArrayBuffer;
+
+/**
+ * A base class for Cursors that store their data in {@link CursorWindow}s.
+ */
+public abstract class AbstractWindowedCursor extends AbstractCursor
+{
+ @Override
+ public byte[] getBlob(int columnIndex)
+ {
+ checkPosition();
+
+ synchronized(mUpdatedRows) {
+ if (isFieldUpdated(columnIndex)) {
+ return (byte[])getUpdatedField(columnIndex);
+ }
+ }
+
+ return mWindow.getBlob(mPos, columnIndex);
+ }
+
+ @Override
+ public String getString(int columnIndex)
+ {
+ checkPosition();
+
+ synchronized(mUpdatedRows) {
+ if (isFieldUpdated(columnIndex)) {
+ return (String)getUpdatedField(columnIndex);
+ }
+ }
+
+ return mWindow.getString(mPos, columnIndex);
+ }
+
+ @Override
+ public void copyStringToBuffer(int columnIndex, CharArrayBuffer buffer)
+ {
+ checkPosition();
+
+ synchronized(mUpdatedRows) {
+ if (isFieldUpdated(columnIndex)) {
+ super.copyStringToBuffer(columnIndex, buffer);
+ }
+ }
+
+ mWindow.copyStringToBuffer(mPos, columnIndex, buffer);
+ }
+
+ @Override
+ public short getShort(int columnIndex)
+ {
+ checkPosition();
+
+ synchronized(mUpdatedRows) {
+ if (isFieldUpdated(columnIndex)) {
+ Number value = (Number)getUpdatedField(columnIndex);
+ return value.shortValue();
+ }
+ }
+
+ return mWindow.getShort(mPos, columnIndex);
+ }
+
+ @Override
+ public int getInt(int columnIndex)
+ {
+ checkPosition();
+
+ synchronized(mUpdatedRows) {
+ if (isFieldUpdated(columnIndex)) {
+ Number value = (Number)getUpdatedField(columnIndex);
+ return value.intValue();
+ }
+ }
+
+ return mWindow.getInt(mPos, columnIndex);
+ }
+
+ @Override
+ public long getLong(int columnIndex)
+ {
+ checkPosition();
+
+ synchronized(mUpdatedRows) {
+ if (isFieldUpdated(columnIndex)) {
+ Number value = (Number)getUpdatedField(columnIndex);
+ return value.longValue();
+ }
+ }
+
+ return mWindow.getLong(mPos, columnIndex);
+ }
+
+ @Override
+ public float getFloat(int columnIndex)
+ {
+ checkPosition();
+
+ synchronized(mUpdatedRows) {
+ if (isFieldUpdated(columnIndex)) {
+ Number value = (Number)getUpdatedField(columnIndex);
+ return value.floatValue();
+ }
+ }
+
+ return mWindow.getFloat(mPos, columnIndex);
+ }
+
+ @Override
+ public double getDouble(int columnIndex)
+ {
+ checkPosition();
+
+ synchronized(mUpdatedRows) {
+ if (isFieldUpdated(columnIndex)) {
+ Number value = (Number)getUpdatedField(columnIndex);
+ return value.doubleValue();
+ }
+ }
+
+ return mWindow.getDouble(mPos, columnIndex);
+ }
+
+ @Override
+ public boolean isNull(int columnIndex)
+ {
+ checkPosition();
+
+ synchronized(mUpdatedRows) {
+ if (isFieldUpdated(columnIndex)) {
+ return getUpdatedField(columnIndex) == null;
+ }
+ }
+
+ return mWindow.isNull(mPos, columnIndex);
+ }
+
+ public boolean isBlob(int columnIndex)
+ {
+ checkPosition();
+
+ synchronized(mUpdatedRows) {
+ if (isFieldUpdated(columnIndex)) {
+ Object object = getUpdatedField(columnIndex);
+ return object == null || object instanceof byte[];
+ }
+ }
+
+ return mWindow.isBlob(mPos, columnIndex);
+ }
+
+ public boolean isString(int columnIndex)
+ {
+ checkPosition();
+
+ synchronized(mUpdatedRows) {
+ if (isFieldUpdated(columnIndex)) {
+ Object object = getUpdatedField(columnIndex);
+ return object == null || object instanceof String;
+ }
+ }
+
+ return mWindow.isString(mPos, columnIndex);
+ }
+
+ public boolean isLong(int columnIndex)
+ {
+ checkPosition();
+
+ synchronized(mUpdatedRows) {
+ if (isFieldUpdated(columnIndex)) {
+ Object object = getUpdatedField(columnIndex);
+ return object != null && (object instanceof Integer || object instanceof Long);
+ }
+ }
+
+ return mWindow.isLong(mPos, columnIndex);
+ }
+
+ public boolean isFloat(int columnIndex)
+ {
+ checkPosition();
+
+ synchronized(mUpdatedRows) {
+ if (isFieldUpdated(columnIndex)) {
+ Object object = getUpdatedField(columnIndex);
+ return object != null && (object instanceof Float || object instanceof Double);
+ }
+ }
+
+ return mWindow.isFloat(mPos, columnIndex);
+ }
+
+ @Override
+ public int getType(int columnIndex) {
+ checkPosition();
+ return mWindow.getType(mPos, columnIndex);
+ }
+
+ @Override
+ protected void checkPosition()
+ {
+ super.checkPosition();
+
+ if (mWindow == null) {
+ throw new StaleDataException("Access closed cursor");
+ }
+ }
+
+ @Override
+ public CursorWindow getWindow() {
+ return mWindow;
+ }
+
+ /**
+ * Set a new cursor window to cursor, usually set a remote cursor window
+ * @param window cursor window
+ */
+ public void setWindow(CursorWindow window) {
+ if (mWindow != null) {
+ mWindow.close();
+ }
+ mWindow = window;
+ }
+
+ public boolean hasWindow() {
+ return mWindow != null;
+ }
+
+ /**
+ * This needs be updated in {@link #onMove} by subclasses, and
+ * needs to be set to NULL when the contents of the cursor change.
+ */
+ protected CursorWindow mWindow;
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/BulkCursorNative.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/BulkCursorNative.java
new file mode 100644
index 0000000..868dde6
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/BulkCursorNative.java
@@ -0,0 +1,440 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher;
+
+import android.os.Binder;
+import android.os.RemoteException;
+import android.os.IBinder;
+import android.os.Parcel;
+import android.os.Bundle;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Native implementation of the bulk cursor. This is only for use in implementing
+ * IPC, application code should use the Cursor interface.
+ *
+ * {@hide}
+ */
+public abstract class BulkCursorNative extends Binder implements IBulkCursor
+{
+ public BulkCursorNative()
+ {
+ attachInterface(this, descriptor);
+ }
+
+ /**
+ * Cast a Binder object into a content resolver interface, generating
+ * a proxy if needed.
+ */
+ static public IBulkCursor asInterface(IBinder obj)
+ {
+ if (obj == null) {
+ return null;
+ }
+ IBulkCursor in = (IBulkCursor)obj.queryLocalInterface(descriptor);
+ if (in != null) {
+ return in;
+ }
+
+ return new BulkCursorProxy(obj);
+ }
+
+ @Override
+ public boolean onTransact(int code, Parcel data, Parcel reply, int flags)
+ throws RemoteException {
+ try {
+ switch (code) {
+ case GET_CURSOR_WINDOW_TRANSACTION: {
+ data.enforceInterface(IBulkCursor.descriptor);
+ int startPos = data.readInt();
+ CursorWindow window = getWindow(startPos);
+ if (window == null) {
+ reply.writeInt(0);
+ return true;
+ }
+ reply.writeNoException();
+ reply.writeInt(1);
+ window.writeToParcel(reply, 0);
+ return true;
+ }
+
+ case COUNT_TRANSACTION: {
+ data.enforceInterface(IBulkCursor.descriptor);
+ int count = count();
+ reply.writeNoException();
+ reply.writeInt(count);
+ return true;
+ }
+
+ case GET_COLUMN_NAMES_TRANSACTION: {
+ data.enforceInterface(IBulkCursor.descriptor);
+ String[] columnNames = getColumnNames();
+ reply.writeNoException();
+ reply.writeInt(columnNames.length);
+ int length = columnNames.length;
+ for (int i = 0; i < length; i++) {
+ reply.writeString(columnNames[i]);
+ }
+ return true;
+ }
+
+ case DEACTIVATE_TRANSACTION: {
+ data.enforceInterface(IBulkCursor.descriptor);
+ deactivate();
+ reply.writeNoException();
+ return true;
+ }
+
+ case CLOSE_TRANSACTION: {
+ data.enforceInterface(IBulkCursor.descriptor);
+ close();
+ reply.writeNoException();
+ return true;
+ }
+
+ case REQUERY_TRANSACTION: {
+ data.enforceInterface(IBulkCursor.descriptor);
+ IContentObserver observer =
+ IContentObserver.Stub.asInterface(data.readStrongBinder());
+ CursorWindow window = CursorWindow.CREATOR.createFromParcel(data);
+ int count = requery(observer, window);
+ reply.writeNoException();
+ reply.writeInt(count);
+ reply.writeBundle(getExtras());
+ return true;
+ }
+
+ case UPDATE_ROWS_TRANSACTION: {
+ data.enforceInterface(IBulkCursor.descriptor);
+ // TODO - what ClassLoader should be passed to readHashMap?
+ // TODO - switch to Bundle
+ HashMap> values = data.readHashMap(null);
+ boolean result = updateRows(values);
+ reply.writeNoException();
+ reply.writeInt((result == true ? 1 : 0));
+ return true;
+ }
+
+ case DELETE_ROW_TRANSACTION: {
+ data.enforceInterface(IBulkCursor.descriptor);
+ int position = data.readInt();
+ boolean result = deleteRow(position);
+ reply.writeNoException();
+ reply.writeInt((result == true ? 1 : 0));
+ return true;
+ }
+
+ case ON_MOVE_TRANSACTION: {
+ data.enforceInterface(IBulkCursor.descriptor);
+ int position = data.readInt();
+ onMove(position);
+ reply.writeNoException();
+ return true;
+ }
+
+ case WANTS_ON_MOVE_TRANSACTION: {
+ data.enforceInterface(IBulkCursor.descriptor);
+ boolean result = getWantsAllOnMoveCalls();
+ reply.writeNoException();
+ reply.writeInt(result ? 1 : 0);
+ return true;
+ }
+
+ case GET_EXTRAS_TRANSACTION: {
+ data.enforceInterface(IBulkCursor.descriptor);
+ Bundle extras = getExtras();
+ reply.writeNoException();
+ reply.writeBundle(extras);
+ return true;
+ }
+
+ case RESPOND_TRANSACTION: {
+ data.enforceInterface(IBulkCursor.descriptor);
+ Bundle extras = data.readBundle(getClass().getClassLoader());
+ Bundle returnExtras = respond(extras);
+ reply.writeNoException();
+ reply.writeBundle(returnExtras);
+ return true;
+ }
+ }
+ } catch (Exception e) {
+ DatabaseUtils.writeExceptionToParcel(reply, e);
+ return true;
+ }
+
+ return super.onTransact(code, data, reply, flags);
+ }
+
+ public IBinder asBinder()
+ {
+ return this;
+ }
+}
+
+
+final class BulkCursorProxy implements IBulkCursor {
+ private IBinder mRemote;
+ private Bundle mExtras;
+
+ public BulkCursorProxy(IBinder remote)
+ {
+ mRemote = remote;
+ mExtras = null;
+ }
+
+ public IBinder asBinder()
+ {
+ return mRemote;
+ }
+
+ public CursorWindow getWindow(int startPos) throws RemoteException
+ {
+ Parcel data = Parcel.obtain();
+ Parcel reply = Parcel.obtain();
+
+ data.writeInterfaceToken(IBulkCursor.descriptor);
+
+ data.writeInt(startPos);
+
+ mRemote.transact(GET_CURSOR_WINDOW_TRANSACTION, data, reply, 0);
+
+ DatabaseUtils.readExceptionFromParcel(reply);
+
+ CursorWindow window = null;
+ if (reply.readInt() == 1) {
+ window = CursorWindow.newFromParcel(reply);
+ }
+
+ data.recycle();
+ reply.recycle();
+
+ return window;
+ }
+
+ public void onMove(int position) throws RemoteException {
+ Parcel data = Parcel.obtain();
+ Parcel reply = Parcel.obtain();
+
+ data.writeInterfaceToken(IBulkCursor.descriptor);
+
+ data.writeInt(position);
+
+ mRemote.transact(ON_MOVE_TRANSACTION, data, reply, 0);
+
+ DatabaseUtils.readExceptionFromParcel(reply);
+
+ data.recycle();
+ reply.recycle();
+ }
+
+ public int count() throws RemoteException
+ {
+ Parcel data = Parcel.obtain();
+ Parcel reply = Parcel.obtain();
+
+ data.writeInterfaceToken(IBulkCursor.descriptor);
+
+ boolean result = mRemote.transact(COUNT_TRANSACTION, data, reply, 0);
+
+ DatabaseUtils.readExceptionFromParcel(reply);
+
+ int count;
+ if (result == false) {
+ count = -1;
+ } else {
+ count = reply.readInt();
+ }
+ data.recycle();
+ reply.recycle();
+ return count;
+ }
+
+ public String[] getColumnNames() throws RemoteException
+ {
+ Parcel data = Parcel.obtain();
+ Parcel reply = Parcel.obtain();
+
+ data.writeInterfaceToken(IBulkCursor.descriptor);
+
+ mRemote.transact(GET_COLUMN_NAMES_TRANSACTION, data, reply, 0);
+
+ DatabaseUtils.readExceptionFromParcel(reply);
+
+ String[] columnNames = null;
+ int numColumns = reply.readInt();
+ columnNames = new String[numColumns];
+ for (int i = 0; i < numColumns; i++) {
+ columnNames[i] = reply.readString();
+ }
+
+ data.recycle();
+ reply.recycle();
+ return columnNames;
+ }
+
+ public void deactivate() throws RemoteException
+ {
+ Parcel data = Parcel.obtain();
+ Parcel reply = Parcel.obtain();
+
+ data.writeInterfaceToken(IBulkCursor.descriptor);
+
+ mRemote.transact(DEACTIVATE_TRANSACTION, data, reply, 0);
+ DatabaseUtils.readExceptionFromParcel(reply);
+
+ data.recycle();
+ reply.recycle();
+ }
+
+ public void close() throws RemoteException
+ {
+ Parcel data = Parcel.obtain();
+ Parcel reply = Parcel.obtain();
+
+ data.writeInterfaceToken(IBulkCursor.descriptor);
+
+ mRemote.transact(CLOSE_TRANSACTION, data, reply, 0);
+ DatabaseUtils.readExceptionFromParcel(reply);
+
+ data.recycle();
+ reply.recycle();
+ }
+
+ public int requery(IContentObserver observer, CursorWindow window) throws RemoteException {
+ Parcel data = Parcel.obtain();
+ Parcel reply = Parcel.obtain();
+
+ data.writeInterfaceToken(IBulkCursor.descriptor);
+
+ data.writeStrongInterface(observer);
+ window.writeToParcel(data, 0);
+
+ boolean result = mRemote.transact(REQUERY_TRANSACTION, data, reply, 0);
+
+ DatabaseUtils.readExceptionFromParcel(reply);
+
+ int count;
+ if (!result) {
+ count = -1;
+ } else {
+ count = reply.readInt();
+ mExtras = reply.readBundle(getClass().getClassLoader());
+ }
+
+ data.recycle();
+ reply.recycle();
+
+ return count;
+ }
+
+ public boolean updateRows(Map values) throws RemoteException
+ {
+ Parcel data = Parcel.obtain();
+ Parcel reply = Parcel.obtain();
+
+ data.writeInterfaceToken(IBulkCursor.descriptor);
+
+ data.writeMap(values);
+
+ mRemote.transact(UPDATE_ROWS_TRANSACTION, data, reply, 0);
+
+ DatabaseUtils.readExceptionFromParcel(reply);
+
+ boolean result = (reply.readInt() == 1 ? true : false);
+
+ data.recycle();
+ reply.recycle();
+
+ return result;
+ }
+
+ public boolean deleteRow(int position) throws RemoteException
+ {
+ Parcel data = Parcel.obtain();
+ Parcel reply = Parcel.obtain();
+
+ data.writeInterfaceToken(IBulkCursor.descriptor);
+
+ data.writeInt(position);
+
+ mRemote.transact(DELETE_ROW_TRANSACTION, data, reply, 0);
+
+ DatabaseUtils.readExceptionFromParcel(reply);
+
+ boolean result = (reply.readInt() == 1 ? true : false);
+
+ data.recycle();
+ reply.recycle();
+
+ return result;
+ }
+
+ public boolean getWantsAllOnMoveCalls() throws RemoteException {
+ Parcel data = Parcel.obtain();
+ Parcel reply = Parcel.obtain();
+
+ data.writeInterfaceToken(IBulkCursor.descriptor);
+
+ mRemote.transact(WANTS_ON_MOVE_TRANSACTION, data, reply, 0);
+
+ DatabaseUtils.readExceptionFromParcel(reply);
+
+ int result = reply.readInt();
+ data.recycle();
+ reply.recycle();
+ return result != 0;
+ }
+
+ public Bundle getExtras() throws RemoteException {
+ if (mExtras == null) {
+ Parcel data = Parcel.obtain();
+ Parcel reply = Parcel.obtain();
+
+ data.writeInterfaceToken(IBulkCursor.descriptor);
+
+ mRemote.transact(GET_EXTRAS_TRANSACTION, data, reply, 0);
+
+ DatabaseUtils.readExceptionFromParcel(reply);
+
+ mExtras = reply.readBundle(getClass().getClassLoader());
+ data.recycle();
+ reply.recycle();
+ }
+ return mExtras;
+ }
+
+ public Bundle respond(Bundle extras) throws RemoteException {
+ Parcel data = Parcel.obtain();
+ Parcel reply = Parcel.obtain();
+
+ data.writeInterfaceToken(IBulkCursor.descriptor);
+
+ data.writeBundle(extras);
+
+ mRemote.transact(RESPOND_TRANSACTION, data, reply, 0);
+
+ DatabaseUtils.readExceptionFromParcel(reply);
+
+ Bundle returnExtras = reply.readBundle(getClass().getClassLoader());
+ data.recycle();
+ reply.recycle();
+ return returnExtras;
+ }
+}
+
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/BulkCursorToCursorAdaptor.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/BulkCursorToCursorAdaptor.java
new file mode 100644
index 0000000..932507e
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/BulkCursorToCursorAdaptor.java
@@ -0,0 +1,319 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher;
+
+import java.util.Map;
+
+import android.database.CharArrayBuffer;
+import android.database.ContentObserver;
+import android.database.DataSetObserver;
+
+import android.os.Bundle;
+import android.os.RemoteException;
+import android.util.Log;
+
+/**
+ * Adapts an {@link IBulkCursor} to a {@link Cursor} for use in the local
+ * process.
+ *
+ * {@hide}
+ */
+public final class BulkCursorToCursorAdaptor extends AbstractWindowedCursor {
+ private static final String TAG = "BulkCursor";
+
+ private SelfContentObserver mObserverBridge;
+ private IBulkCursor mBulkCursor;
+ private int mCount;
+ private String[] mColumns;
+ private boolean mWantsAllOnMoveCalls;
+
+ public void set(IBulkCursor bulkCursor) {
+ mBulkCursor = bulkCursor;
+
+ try {
+ mCount = mBulkCursor.count();
+ mWantsAllOnMoveCalls = mBulkCursor.getWantsAllOnMoveCalls();
+
+ // Search for the rowID column index and set it for our parent
+ mColumns = mBulkCursor.getColumnNames();
+ mRowIdColumnIndex = findRowIdColumnIndex(mColumns);
+ } catch (RemoteException ex) {
+ Log.e(TAG, "Setup failed because the remote process is dead");
+ }
+ }
+
+ /**
+ * Version of set() that does fewer Binder calls if the caller
+ * already knows BulkCursorToCursorAdaptor's properties.
+ */
+ public void set(IBulkCursor bulkCursor, int count, int idIndex) {
+ mBulkCursor = bulkCursor;
+ mColumns = null; // lazily retrieved
+ mCount = count;
+ mRowIdColumnIndex = idIndex;
+ }
+
+ /**
+ * Returns column index of "_id" column, or -1 if not found.
+ */
+ public static int findRowIdColumnIndex(String[] columnNames) {
+ int length = columnNames.length;
+ for (int i = 0; i < length; i++) {
+ if (columnNames[i].equals("_id")) {
+ return i;
+ }
+ }
+ return -1;
+ }
+
+ /**
+ * Gets a SelfDataChangeOberserver that can be sent to a remote
+ * process to receive change notifications over IPC.
+ *
+ * @return A SelfContentObserver hooked up to this Cursor
+ */
+ public synchronized IContentObserver getObserver() {
+ if (mObserverBridge == null) {
+ mObserverBridge = new SelfContentObserver(this);
+ }
+ return null;//mObserverBridge.getContentObserver(); //TODO nf fix this
+ }
+
+ @Override
+ public int getCount() {
+ return mCount;
+ }
+
+ @Override
+ public boolean onMove(int oldPosition, int newPosition) {
+ try {
+ // Make sure we have the proper window
+ if (mWindow != null) {
+ if (newPosition < mWindow.getStartPosition() ||
+ newPosition >= (mWindow.getStartPosition() + mWindow.getNumRows())) {
+ mWindow = mBulkCursor.getWindow(newPosition);
+ } else if (mWantsAllOnMoveCalls) {
+ mBulkCursor.onMove(newPosition);
+ }
+ } else {
+ mWindow = mBulkCursor.getWindow(newPosition);
+ }
+ } catch (RemoteException ex) {
+ // We tried to get a window and failed
+ Log.e(TAG, "Unable to get window because the remote process is dead");
+ return false;
+ }
+
+ // Couldn't obtain a window, something is wrong
+ if (mWindow == null) {
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public void deactivate() {
+ // This will call onInvalidated(), so make sure to do it before calling release,
+ // which is what actually makes the data set invalid.
+ super.deactivate();
+
+ try {
+ mBulkCursor.deactivate();
+ } catch (RemoteException ex) {
+ Log.w(TAG, "Remote process exception when deactivating");
+ }
+ mWindow = null;
+ }
+
+ @Override
+ public void close() {
+ super.close();
+ try {
+ mBulkCursor.close();
+ } catch (RemoteException ex) {
+ Log.w(TAG, "Remote process exception when closing");
+ }
+ mWindow = null;
+ }
+
+ @Override
+ public boolean requery() {
+ try {
+ int oldCount = mCount;
+ //TODO get the window from a pool somewhere to avoid creating the memory dealer
+ mCount = mBulkCursor.requery(getObserver(), new CursorWindow(
+ false /* the window will be accessed across processes */));
+ if (mCount != -1) {
+ mPos = -1;
+ mWindow = null;
+
+ // super.requery() will call onChanged. Do it here instead of relying on the
+ // observer from the far side so that observers can see a correct value for mCount
+ // when responding to onChanged.
+ super.requery();
+ return true;
+ } else {
+ deactivate();
+ return false;
+ }
+ } catch (Exception ex) {
+ Log.e(TAG, "Unable to requery because the remote process exception " + ex.getMessage());
+ deactivate();
+ return false;
+ }
+ }
+
+ /**
+ * @hide
+ * @deprecated
+ */
+ @Override
+ public boolean deleteRow() {
+ try {
+ boolean result = mBulkCursor.deleteRow(mPos);
+ if (result != false) {
+ // The window contains the old value, discard it
+ mWindow = null;
+
+ // Fix up the position
+ mCount = mBulkCursor.count();
+ if (mPos < mCount) {
+ int oldPos = mPos;
+ mPos = -1;
+ moveToPosition(oldPos);
+ } else {
+ mPos = mCount;
+ }
+
+ // Send the change notification
+ onChange(true);
+ }
+ return result;
+ } catch (RemoteException ex) {
+ Log.e(TAG, "Unable to delete row because the remote process is dead");
+ return false;
+ }
+ }
+
+ @Override
+ public String[] getColumnNames() {
+ if (mColumns == null) {
+ try {
+ mColumns = mBulkCursor.getColumnNames();
+ } catch (RemoteException ex) {
+ Log.e(TAG, "Unable to fetch column names because the remote process is dead");
+ return null;
+ }
+ }
+ return mColumns;
+ }
+
+ /**
+ * @hide
+ * @deprecated
+ */
+ @Override
+ public boolean commitUpdates(Map extends Long,
+ ? extends Map> additionalValues) {
+ if (!supportsUpdates()) {
+ Log.e(TAG, "commitUpdates not supported on this cursor, did you include the _id column?");
+ return false;
+ }
+
+ synchronized(mUpdatedRows) {
+ if (additionalValues != null) {
+ mUpdatedRows.putAll(additionalValues);
+ }
+
+ if (mUpdatedRows.size() <= 0) {
+ return false;
+ }
+
+ try {
+ boolean result = mBulkCursor.updateRows(mUpdatedRows);
+
+ if (result == true) {
+ mUpdatedRows.clear();
+
+ // Send the change notification
+ onChange(true);
+ }
+ return result;
+ } catch (RemoteException ex) {
+ Log.e(TAG, "Unable to commit updates because the remote process is dead");
+ return false;
+ }
+ }
+ }
+
+ @Override
+ public Bundle getExtras() {
+ try {
+ return mBulkCursor.getExtras();
+ } catch (RemoteException e) {
+ // This should never happen because the system kills processes that are using remote
+ // cursors when the provider process is killed.
+ throw new RuntimeException(e);
+ }
+ }
+
+ @Override
+ public Bundle respond(Bundle extras) {
+ try {
+ return mBulkCursor.respond(extras);
+ } catch (RemoteException e) {
+ // the system kills processes that are using remote cursors when the provider process
+ // is killed, but this can still happen if this is being called from the system process,
+ // so, better to log and return an empty bundle.
+ Log.w(TAG, "respond() threw RemoteException, returning an empty bundle.", e);
+ return Bundle.EMPTY;
+ }
+ }
+
+ @Override
+ public void copyStringToBuffer(int columnIndex, CharArrayBuffer buffer) {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void registerContentObserver(ContentObserver observer) {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void registerDataSetObserver(DataSetObserver observer) {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void unregisterContentObserver(ContentObserver observer) {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void unregisterDataSetObserver(DataSetObserver observer) {
+ // TODO Auto-generated method stub
+
+ }
+
+
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/CrossProcessCursorWrapper.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/CrossProcessCursorWrapper.java
new file mode 100644
index 0000000..76581b5
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/CrossProcessCursorWrapper.java
@@ -0,0 +1,26 @@
+package net.sqlcipher;
+
+import android.database.CrossProcessCursor;
+import android.database.CursorWindow;
+
+public class CrossProcessCursorWrapper extends CursorWrapper implements CrossProcessCursor {
+
+ public CrossProcessCursorWrapper(Cursor cursor) {
+ super(cursor);
+ }
+
+ @Override
+ public CursorWindow getWindow() {
+ return null;
+ }
+
+ @Override
+ public void fillWindow(int position, CursorWindow window) {
+ DatabaseUtils.cursorFillWindow(this, position, window);
+ }
+
+ @Override
+ public boolean onMove(int oldPosition, int newPosition) {
+ return true;
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/Cursor.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/Cursor.java
new file mode 100644
index 0000000..ac70671
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/Cursor.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher;
+
+/**
+ * Extension of android.database.Cursor to support getType() for API < 11.
+ */
+public interface Cursor extends android.database.Cursor {
+ /*
+ * Values returned by {@link #getType(int)}.
+ * These should be consistent with the corresponding types defined in CursorWindow.h
+ */
+ /** Value returned by {@link #getType(int)} if the specified column is null */
+ static final int FIELD_TYPE_NULL = 0;
+
+ /** Value returned by {@link #getType(int)} if the specified column type is integer */
+ static final int FIELD_TYPE_INTEGER = 1;
+
+ /** Value returned by {@link #getType(int)} if the specified column type is float */
+ static final int FIELD_TYPE_FLOAT = 2;
+
+ /** Value returned by {@link #getType(int)} if the specified column type is string */
+ static final int FIELD_TYPE_STRING = 3;
+
+ /** Value returned by {@link #getType(int)} if the specified column type is blob */
+ static final int FIELD_TYPE_BLOB = 4;
+
+ /**
+ * Returns data type of the given column's value.
+ * The preferred type of the column is returned but the data may be converted to other types
+ * as documented in the get-type methods such as {@link #getInt(int)}, {@link #getFloat(int)}
+ * etc.
+ *
+ * Returned column types are
+ *
+ *
{@link #FIELD_TYPE_NULL}
+ *
{@link #FIELD_TYPE_INTEGER}
+ *
{@link #FIELD_TYPE_FLOAT}
+ *
{@link #FIELD_TYPE_STRING}
+ *
{@link #FIELD_TYPE_BLOB}
+ *
+ *
+ *
+ * @param columnIndex the zero-based index of the target column.
+ * @return column value type
+ */
+ int getType(int columnIndex);
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/CursorIndexOutOfBoundsException.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/CursorIndexOutOfBoundsException.java
new file mode 100644
index 0000000..0788b37
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/CursorIndexOutOfBoundsException.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher;
+
+/**
+ * An exception indicating that a cursor is out of bounds.
+ */
+public class CursorIndexOutOfBoundsException extends IndexOutOfBoundsException {
+
+ public CursorIndexOutOfBoundsException(int index, int size) {
+ super("Index " + index + " requested, with a size of " + size);
+ }
+
+ public CursorIndexOutOfBoundsException(String message) {
+ super(message);
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/CursorWindow.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/CursorWindow.java
new file mode 100644
index 0000000..b130dc2
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/CursorWindow.java
@@ -0,0 +1,662 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher;
+
+import android.database.CharArrayBuffer;
+
+import android.content.res.Resources;
+import android.database.sqlite.SQLiteClosable;
+import android.os.Binder;
+import android.os.IBinder;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.os.Process;
+import android.util.Log;
+import android.util.SparseIntArray;
+
+import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
+
+import net.sqlcipher.CursorWindowAllocation;
+import net.sqlcipher.DefaultCursorWindowAllocation;
+
+/**
+ * A buffer containing multiple cursor rows.
+ */
+public class CursorWindow extends android.database.CursorWindow implements Parcelable {
+ /** The pointer to the native window class */
+ @SuppressWarnings("unused")
+
+ /** The pointer to the native window class. set by the native methods in
+ * android_database_CursorWindow.cpp
+ */
+ private long nWindow;
+ private int mStartPos;
+ private int mRequiredPos;
+
+ private static CursorWindowAllocation allocation = new DefaultCursorWindowAllocation();
+
+ public static void setCursorWindowAllocation(CursorWindowAllocation value){
+ allocation = value;
+ }
+
+ public static CursorWindowAllocation getCursorWindowAllocation() {
+ return allocation;
+ }
+
+ /**
+ * Creates a new empty window.
+ *
+ * @param localWindow true if this window will be used in this process only
+ */
+ public CursorWindow(boolean localWindow) {
+ super(localWindow);
+ mStartPos = 0;
+ if(allocation == null){
+ allocation = new DefaultCursorWindowAllocation();
+ }
+ native_init(localWindow,
+ allocation.getInitialAllocationSize(),
+ allocation.getGrowthPaddingSize(),
+ allocation.getMaxAllocationSize());
+ }
+
+ /**
+ * Returns the starting position of this window within the entire
+ * Cursor's result set.
+ *
+ * @return the starting position of this window within the entire
+ * Cursor's result set.
+ */
+ public int getStartPosition() {
+ return mStartPos;
+ }
+
+ /**
+ * Set the start position of cursor window
+ * @param pos
+ */
+ public void setStartPosition(int pos) {
+ mStartPos = pos;
+ }
+
+ public int getRequiredPosition(){
+ return mRequiredPos;
+ }
+
+ public void setRequiredPosition(int pos) {
+ mRequiredPos = pos;
+ }
+
+ /**
+ * Returns the number of rows in this window.
+ *
+ * @return the number of rows in this window.
+ */
+ public int getNumRows() {
+ acquireReference();
+ try {
+ return getNumRows_native();
+ } finally {
+ releaseReference();
+ }
+ }
+
+ private native int getNumRows_native();
+ /**
+ * Set number of Columns
+ * @param columnNum
+ * @return true if success
+ */
+ public boolean setNumColumns(int columnNum) {
+ acquireReference();
+ try {
+ return setNumColumns_native(columnNum);
+ } finally {
+ releaseReference();
+ }
+ }
+
+ private native boolean setNumColumns_native(int columnNum);
+
+ /**
+ * Allocate a row in cursor window
+ * @return false if cursor window is out of memory
+ */
+ public boolean allocRow(){
+ acquireReference();
+ try {
+ return allocRow_native();
+ } finally {
+ releaseReference();
+ }
+ }
+
+ private native boolean allocRow_native();
+
+ /**
+ * Free the last row
+ */
+ public void freeLastRow(){
+ acquireReference();
+ try {
+ freeLastRow_native();
+ } finally {
+ releaseReference();
+ }
+ }
+
+ private native void freeLastRow_native();
+
+ /**
+ * copy byte array to cursor window
+ * @param value
+ * @param row
+ * @param col
+ * @return false if fail to copy
+ */
+ public boolean putBlob(byte[] value, int row, int col) {
+ acquireReference();
+ try {
+ return putBlob_native(value, row - mStartPos, col);
+ } finally {
+ releaseReference();
+ }
+ }
+
+ private native boolean putBlob_native(byte[] value, int row, int col);
+
+ /**
+ * Copy String to cursor window
+ * @param value
+ * @param row
+ * @param col
+ * @return false if fail to copy
+ */
+ public boolean putString(String value, int row, int col) {
+ acquireReference();
+ try {
+ return putString_native(value, row - mStartPos, col);
+ } finally {
+ releaseReference();
+ }
+ }
+
+ private native boolean putString_native(String value, int row, int col);
+
+ /**
+ * Copy integer to cursor window
+ * @param value
+ * @param row
+ * @param col
+ * @return false if fail to copy
+ */
+ public boolean putLong(long value, int row, int col) {
+ acquireReference();
+ try {
+ return putLong_native(value, row - mStartPos, col);
+ } finally {
+ releaseReference();
+ }
+ }
+
+ private native boolean putLong_native(long value, int row, int col);
+
+
+ /**
+ * Copy double to cursor window
+ * @param value
+ * @param row
+ * @param col
+ * @return false if fail to copy
+ */
+ public boolean putDouble(double value, int row, int col) {
+ acquireReference();
+ try {
+ return putDouble_native(value, row - mStartPos, col);
+ } finally {
+ releaseReference();
+ }
+ }
+
+ private native boolean putDouble_native(double value, int row, int col);
+
+ /**
+ * Set the [row, col] value to NULL
+ * @param row
+ * @param col
+ * @return false if fail to copy
+ */
+ public boolean putNull(int row, int col) {
+ acquireReference();
+ try {
+ return putNull_native(row - mStartPos, col);
+ } finally {
+ releaseReference();
+ }
+ }
+
+ private native boolean putNull_native(int row, int col);
+
+
+ /**
+ * Returns {@code true} if given field is {@code NULL}.
+ *
+ * @param row the row to read from, row - getStartPosition() being the actual row in the window
+ * @param col the column to read from
+ * @return {@code true} if given field is {@code NULL}
+ */
+ public boolean isNull(int row, int col) {
+ acquireReference();
+ try {
+ return isNull_native(row - mStartPos, col);
+ } finally {
+ releaseReference();
+ }
+ }
+
+ private native boolean isNull_native(int row, int col);
+
+ /**
+ * Returns a byte array for the given field.
+ *
+ * @param row the row to read from, row - getStartPosition() being the actual row in the window
+ * @param col the column to read from
+ * @return a String value for the given field
+ */
+ public byte[] getBlob(int row, int col) {
+ acquireReference();
+ try {
+ return getBlob_native(row - mStartPos, col);
+ } finally {
+ releaseReference();
+ }
+ }
+
+ /**
+ * Returns the value at (row, col) as a byte array.
+ *
+ *
If the value is null, then null is returned. If the
+ * type of column col is a string type, then the result
+ * is the array of bytes that make up the internal representation of the
+ * string value. If the type of column col is integral or floating-point,
+ * then an {@link SQLiteException} is thrown.
+ */
+ private native byte[] getBlob_native(int row, int col);
+
+ /**
+ * Returns data type of the given column's value.
+ *
+ * Returned column types are
+ *
+ *
{@link Cursor#FIELD_TYPE_NULL}
+ *
{@link Cursor#FIELD_TYPE_INTEGER}
+ *
{@link Cursor#FIELD_TYPE_FLOAT}
+ *
{@link Cursor#FIELD_TYPE_STRING}
+ *
{@link Cursor#FIELD_TYPE_BLOB}
+ *
+ *
+ *
+ * @param row the row to read from, row - getStartPosition() being the actual row in the window
+ * @param col the column to read from
+ * @return the value type
+ */
+ public int getType(int row, int col) {
+ acquireReference();
+ try {
+ return getType_native(row - mStartPos, col);
+ } finally {
+ releaseReference();
+ }
+ }
+
+ /**
+ * Checks if a field contains either a blob or is null.
+ *
+ * @param row the row to read from, row - getStartPosition() being the actual row in the window
+ * @param col the column to read from
+ * @return {@code true} if given field is {@code NULL} or a blob
+ * @deprecated use {@link #getType(int, int)} instead
+ */
+ public boolean isBlob(int row, int col) {
+ acquireReference();
+ try {
+ return isBlob_native(row - mStartPos, col);
+ } finally {
+ releaseReference();
+ }
+ }
+
+ /**
+ * Checks if a field contains a long
+ *
+ * @param row the row to read from, row - getStartPosition() being the actual row in the window
+ * @param col the column to read from
+ * @return {@code true} if given field is a long
+ * @deprecated use {@link #getType(int, int)} instead
+ */
+ public boolean isLong(int row, int col) {
+ acquireReference();
+ try {
+ return isInteger_native(row - mStartPos, col);
+ } finally {
+ releaseReference();
+ }
+ }
+
+ /**
+ * Checks if a field contains a float.
+ *
+ * @param row the row to read from, row - getStartPosition() being the actual row in the window
+ * @param col the column to read from
+ * @return {@code true} if given field is a float
+ * @deprecated use {@link #getType(int, int)} instead
+ */
+ public boolean isFloat(int row, int col) {
+ acquireReference();
+ try {
+ return isFloat_native(row - mStartPos, col);
+ } finally {
+ releaseReference();
+ }
+ }
+
+ /**
+ * Checks if a field contains either a String or is null.
+ *
+ * @param row the row to read from, row - getStartPosition() being the actual row in the window
+ * @param col the column to read from
+ * @return {@code true} if given field is {@code NULL} or a String
+ * @deprecated use {@link #getType(int, int)} instead
+ */
+ public boolean isString(int row, int col) {
+ acquireReference();
+ try {
+ return isString_native(row - mStartPos, col);
+ } finally {
+ releaseReference();
+ }
+ }
+
+ private native boolean isBlob_native(int row, int col);
+ private native boolean isString_native(int row, int col);
+ private native boolean isInteger_native(int row, int col);
+ private native boolean isFloat_native(int row, int col);
+
+ private native int getType_native(int row, int col);
+
+ /**
+ * Returns a String for the given field.
+ *
+ * @param row the row to read from, row - getStartPosition() being the actual row in the window
+ * @param col the column to read from
+ * @return a String value for the given field
+ */
+ public String getString(int row, int col) {
+ acquireReference();
+ try {
+ return getString_native(row - mStartPos, col);
+ } finally {
+ releaseReference();
+ }
+ }
+
+ /**
+ * Returns the value at (row, col) as a String.
+ *
+ *
If the value is null, then null is returned. If the
+ * type of column col is integral, then the result is the string
+ * that is obtained by formatting the integer value with the printf
+ * family of functions using format specifier %lld. If the
+ * type of column col is floating-point, then the result is the string
+ * that is obtained by formatting the floating-point value with the
+ * printf family of functions using format specifier %g.
+ * If the type of column col is a blob type, then an
+ * {@link SQLiteException} is thrown.
+ */
+ private native String getString_native(int row, int col);
+ //private native byte[] getString_native(int row, int col);
+
+ /**
+ * copy the text for the given field in the provided char array.
+ *
+ * @param row the row to read from, row - getStartPosition() being the actual row in the window
+ * @param col the column to read from
+ * @param buffer the CharArrayBuffer to copy the text into,
+ * If the requested string is larger than the buffer
+ * a new char buffer will be created to hold the string. and assigne to
+ * CharArrayBuffer.data
+ */
+ public void copyStringToBuffer(int row, int col, CharArrayBuffer buffer) {
+ if (buffer == null) {
+ throw new IllegalArgumentException("CharArrayBuffer should not be null");
+ }
+ if (buffer.data == null) {
+ buffer.data = new char[64];
+ }
+ acquireReference();
+ try {
+ char[] newbuf = copyStringToBuffer_native(
+ row - mStartPos, col, buffer.data.length, buffer);
+ if (newbuf != null) {
+ buffer.data = newbuf;
+ }
+ } finally {
+ releaseReference();
+ }
+ }
+
+ private native char[] copyStringToBuffer_native(
+ int row, int col, int bufferSize, CharArrayBuffer buffer);
+
+ /**
+ * Returns a long for the given field.
+ * row is 0 based
+ *
+ * @param row the row to read from, row - getStartPosition() being the actual row in the window
+ * @param col the column to read from
+ * @return a long value for the given field
+ */
+ public long getLong(int row, int col) {
+ acquireReference();
+ try {
+ return getLong_native(row - mStartPos, col);
+ } finally {
+ releaseReference();
+ }
+ }
+
+ /**
+ * Returns the value at (row, col) as a long.
+ *
+ *
If the value is null, then 0L is returned. If the
+ * type of column col is a string type, then the result
+ * is the long that is obtained by parsing the string value with
+ * strtoll. If the type of column col is
+ * floating-point, then the result is the floating-point value casted to a long.
+ * If the type of column col is a blob type, then an
+ * {@link SQLiteException} is thrown.
+ */
+ private native long getLong_native(int row, int col);
+
+ /**
+ * Returns a double for the given field.
+ * row is 0 based
+ *
+ * @param row the row to read from, row - getStartPosition() being the actual row in the window
+ * @param col the column to read from
+ * @return a double value for the given field
+ */
+ public double getDouble(int row, int col) {
+ acquireReference();
+ try {
+ return getDouble_native(row - mStartPos, col);
+ } finally {
+ releaseReference();
+ }
+ }
+
+ /**
+ * Returns the value at (row, col) as a double.
+ *
+ *
If the value is null, then 0.0 is returned. If the
+ * type of column col is a string type, then the result
+ * is the double that is obtained by parsing the string value with
+ * strtod. If the type of column col is
+ * integral, then the result is the integer value casted to a double.
+ * If the type of column col is a blob type, then an
+ * {@link SQLiteException} is thrown.
+ */
+ private native double getDouble_native(int row, int col);
+
+ /**
+ * Returns a short for the given field.
+ * row is 0 based
+ *
+ * @param row the row to read from, row - getStartPosition() being the actual row in the window
+ * @param col the column to read from
+ * @return a short value for the given field
+ */
+ public short getShort(int row, int col) {
+ acquireReference();
+ try {
+ return (short) getLong_native(row - mStartPos, col);
+ } finally {
+ releaseReference();
+ }
+ }
+
+ /**
+ * Returns an int for the given field.
+ *
+ * @param row the row to read from, row - getStartPosition() being the actual row in the window
+ * @param col the column to read from
+ * @return an int value for the given field
+ */
+ public int getInt(int row, int col) {
+ acquireReference();
+ try {
+ return (int) getLong_native(row - mStartPos, col);
+ } finally {
+ releaseReference();
+ }
+ }
+
+ /**
+ * Returns a float for the given field.
+ * row is 0 based
+ *
+ * @param row the row to read from, row - getStartPosition() being the actual row in the window
+ * @param col the column to read from
+ * @return a float value for the given field
+ */
+ public float getFloat(int row, int col) {
+ acquireReference();
+ try {
+ return (float) getDouble_native(row - mStartPos, col);
+ } finally {
+ releaseReference();
+ }
+ }
+
+ /**
+ * Clears out the existing contents of the window, making it safe to reuse
+ * for new data. Note that the number of columns in the window may NOT
+ * change across a call to clear().
+ */
+ public void clear() {
+ acquireReference();
+ try {
+ mStartPos = 0;
+ native_clear();
+ } finally {
+ releaseReference();
+ }
+ }
+
+ /** Clears out the native side of things */
+ private native void native_clear();
+
+ /**
+ * Cleans up the native resources associated with the window.
+ */
+ public void close() {
+ releaseReference();
+ }
+
+ private native void close_native();
+
+ @Override
+ protected void finalize() {
+ // Just in case someone forgot to call close...
+ if (nWindow == 0) {
+ return;
+ }
+ close_native();
+ }
+
+ public static final Parcelable.Creator CREATOR
+ = new Parcelable.Creator() {
+ public CursorWindow createFromParcel(Parcel source) {
+ return new CursorWindow(source,0);
+ }
+
+ public CursorWindow[] newArray(int size) {
+ return new CursorWindow[size];
+ }
+ };
+
+ public static CursorWindow newFromParcel(Parcel p) {
+ return CREATOR.createFromParcel(p);
+ }
+
+ public int describeContents() {
+ return 0;
+ }
+
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeStrongBinder(native_getBinder());
+ dest.writeInt(mStartPos);
+ }
+
+ public CursorWindow(Parcel source,int foo) {
+
+ super(true);
+
+ IBinder nativeBinder = source.readStrongBinder();
+ mStartPos = source.readInt();
+
+ native_init(nativeBinder);
+ }
+
+ /** Get the binder for the native side of the window */
+ private native IBinder native_getBinder();
+
+ /** Does the native side initialization for an empty window */
+ private native void native_init(boolean localOnly, long initialSize,
+ long growthPaddingSize, long maxSize);
+
+ /** Does the native side initialization with an existing binder from another process */
+ private native void native_init(IBinder nativeBinder);
+
+ @Override
+ protected void onAllReferencesReleased() {
+ close_native();
+
+ super.onAllReferencesReleased();
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/CursorWindowAllocation.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/CursorWindowAllocation.java
new file mode 100644
index 0000000..6b4c47f
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/CursorWindowAllocation.java
@@ -0,0 +1,7 @@
+package net.sqlcipher;
+
+public interface CursorWindowAllocation {
+ long getInitialAllocationSize();
+ long getGrowthPaddingSize();
+ long getMaxAllocationSize();
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/CursorWrapper.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/CursorWrapper.java
new file mode 100644
index 0000000..aae6be0
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/CursorWrapper.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher;
+
+/**
+ * Extension of android.database.CursorWrapper to support getType() for API < 11.
+ */
+public class CursorWrapper extends android.database.CursorWrapper implements Cursor {
+
+ private final Cursor mCursor;
+
+ public CursorWrapper(Cursor cursor) {
+ super(cursor);
+ mCursor = cursor;
+ }
+
+ public int getType(int columnIndex) {
+ return mCursor.getType(columnIndex);
+ }
+
+ public Cursor getWrappedCursor() {
+ return mCursor;
+ }
+}
+
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/CustomCursorWindowAllocation.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/CustomCursorWindowAllocation.java
new file mode 100644
index 0000000..9575daf
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/CustomCursorWindowAllocation.java
@@ -0,0 +1,30 @@
+package net.sqlcipher;
+
+import net.sqlcipher.CursorWindowAllocation;
+
+public class CustomCursorWindowAllocation implements CursorWindowAllocation {
+
+ private long initialAllocationSize = 0L;
+ private long growthPaddingSize = 0L;
+ private long maxAllocationSize = 0L;
+
+ public CustomCursorWindowAllocation(long initialSize,
+ long growthPaddingSize,
+ long maxAllocationSize){
+ this.initialAllocationSize = initialSize;
+ this.growthPaddingSize = growthPaddingSize;
+ this.maxAllocationSize = maxAllocationSize;
+ }
+
+ public long getInitialAllocationSize() {
+ return initialAllocationSize;
+ }
+
+ public long getGrowthPaddingSize() {
+ return growthPaddingSize;
+ }
+
+ public long getMaxAllocationSize() {
+ return maxAllocationSize;
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/DatabaseErrorHandler.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/DatabaseErrorHandler.java
new file mode 100644
index 0000000..58096f1
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/DatabaseErrorHandler.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher;
+
+import net.sqlcipher.database.SQLiteDatabase;
+
+/**
+ * An interface to let the apps define the actions to take when the following errors are detected
+ * database corruption
+ */
+public interface DatabaseErrorHandler {
+
+ /**
+ * defines the method to be invoked when database corruption is detected.
+ * @param dbObj the {@link SQLiteDatabase} object representing the database on which corruption
+ * is detected.
+ */
+ void onCorruption(SQLiteDatabase dbObj);
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/DatabaseUtils.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/DatabaseUtils.java
new file mode 100644
index 0000000..3ab0ff8
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/DatabaseUtils.java
@@ -0,0 +1,1235 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher;
+
+import net.sqlcipher.database.SQLiteAbortException;
+import net.sqlcipher.database.SQLiteConstraintException;
+import net.sqlcipher.database.SQLiteDatabase;
+import net.sqlcipher.database.SQLiteDatabaseCorruptException;
+import net.sqlcipher.database.SQLiteDiskIOException;
+import net.sqlcipher.database.SQLiteException;
+import net.sqlcipher.database.SQLiteFullException;
+import net.sqlcipher.database.SQLiteProgram;
+import net.sqlcipher.database.SQLiteStatement;
+
+import java.io.FileNotFoundException;
+import java.io.PrintStream;
+import java.text.Collator;
+import java.util.HashMap;
+import java.util.Map;
+
+import android.content.ContentValues;
+import android.content.OperationApplicationException;
+import android.os.Parcel;
+import android.text.TextUtils;
+import android.util.Config;
+import android.util.Log;
+
+/**
+ * Static utility methods for dealing with databases and {@link Cursor}s.
+ */
+public class DatabaseUtils {
+ private static final String TAG = "DatabaseUtils";
+
+ private static final boolean DEBUG = false;
+ private static final boolean LOCAL_LOGV = DEBUG ? Config.LOGD : Config.LOGV;
+
+ private static final String[] countProjection = new String[]{"count(*)"};
+
+ /**
+ * Special function for writing an exception result at the header of
+ * a parcel, to be used when returning an exception from a transaction.
+ * exception will be re-thrown by the function in another process
+ * @param reply Parcel to write to
+ * @param e The Exception to be written.
+ * @see Parcel#writeNoException
+ * @see Parcel#writeException
+ */
+ public static final void writeExceptionToParcel(Parcel reply, Exception e) {
+ int code = 0;
+ boolean logException = true;
+ if (e instanceof FileNotFoundException) {
+ code = 1;
+ logException = false;
+ } else if (e instanceof IllegalArgumentException) {
+ code = 2;
+ } else if (e instanceof UnsupportedOperationException) {
+ code = 3;
+ } else if (e instanceof SQLiteAbortException) {
+ code = 4;
+ } else if (e instanceof SQLiteConstraintException) {
+ code = 5;
+ } else if (e instanceof SQLiteDatabaseCorruptException) {
+ code = 6;
+ } else if (e instanceof SQLiteFullException) {
+ code = 7;
+ } else if (e instanceof SQLiteDiskIOException) {
+ code = 8;
+ } else if (e instanceof SQLiteException) {
+ code = 9;
+ } else if (e instanceof OperationApplicationException) {
+ code = 10;
+ } else {
+ reply.writeException(e);
+ Log.e(TAG, "Writing exception to parcel", e);
+ return;
+ }
+ reply.writeInt(code);
+ reply.writeString(e.getMessage());
+
+ if (logException) {
+ Log.e(TAG, "Writing exception to parcel", e);
+ }
+ }
+
+ /**
+ * Special function for reading an exception result from the header of
+ * a parcel, to be used after receiving the result of a transaction. This
+ * will throw the exception for you if it had been written to the Parcel,
+ * otherwise return and let you read the normal result data from the Parcel.
+ * @param reply Parcel to read from
+ * @see Parcel#writeNoException
+ * @see Parcel#readException
+ */
+ public static final void readExceptionFromParcel(Parcel reply) {
+ int code = reply.readInt();
+ if (code == 0) return;
+ String msg = reply.readString();
+ DatabaseUtils.readExceptionFromParcel(reply, msg, code);
+ }
+
+ public static void readExceptionWithFileNotFoundExceptionFromParcel(
+ Parcel reply) throws FileNotFoundException {
+ int code = reply.readInt();
+ if (code == 0) return;
+ String msg = reply.readString();
+ if (code == 1) {
+ throw new FileNotFoundException(msg);
+ } else {
+ DatabaseUtils.readExceptionFromParcel(reply, msg, code);
+ }
+ }
+
+ public static void readExceptionWithOperationApplicationExceptionFromParcel(
+ Parcel reply) throws OperationApplicationException {
+ int code = reply.readInt();
+ if (code == 0) return;
+ String msg = reply.readString();
+ if (code == 10) {
+ throw new OperationApplicationException(msg);
+ } else {
+ DatabaseUtils.readExceptionFromParcel(reply, msg, code);
+ }
+ }
+
+ private static final void readExceptionFromParcel(Parcel reply, String msg, int code) {
+ switch (code) {
+ case 2:
+ throw new IllegalArgumentException(msg);
+ case 3:
+ throw new UnsupportedOperationException(msg);
+ case 4:
+ throw new SQLiteAbortException(msg);
+ case 5:
+ throw new SQLiteConstraintException(msg);
+ case 6:
+ throw new SQLiteDatabaseCorruptException(msg);
+ case 7:
+ throw new SQLiteFullException(msg);
+ case 8:
+ throw new SQLiteDiskIOException(msg);
+ case 9:
+ throw new SQLiteException(msg);
+ default:
+ reply.readException(code, msg);
+ }
+ }
+
+ /**
+ * Binds the given Object to the given SQLiteProgram using the proper
+ * typing. For example, bind numbers as longs/doubles, and everything else
+ * as a string by call toString() on it.
+ *
+ * @param prog the program to bind the object to
+ * @param index the 1-based index to bind at
+ * @param value the value to bind
+ */
+ public static void bindObjectToProgram(SQLiteProgram prog, int index,
+ Object value) {
+ if (value == null) {
+ prog.bindNull(index);
+ } else if (value instanceof Double || value instanceof Float) {
+ prog.bindDouble(index, ((Number)value).doubleValue());
+ } else if (value instanceof Number) {
+ prog.bindLong(index, ((Number)value).longValue());
+ } else if (value instanceof Boolean) {
+ Boolean bool = (Boolean)value;
+ if (bool) {
+ prog.bindLong(index, 1);
+ } else {
+ prog.bindLong(index, 0);
+ }
+ } else if (value instanceof byte[]){
+ prog.bindBlob(index, (byte[]) value);
+ } else {
+ prog.bindString(index, value.toString());
+ }
+ }
+
+ /**
+ * Returns data type of the given object's value.
+ *
+ * Returned values are
+ *
+ *
{@link Cursor#FIELD_TYPE_NULL}
+ *
{@link Cursor#FIELD_TYPE_INTEGER}
+ *
{@link Cursor#FIELD_TYPE_FLOAT}
+ *
{@link Cursor#FIELD_TYPE_STRING}
+ *
{@link Cursor#FIELD_TYPE_BLOB}
+ *
+ *
+ *
+ * @param obj the object whose value type is to be returned
+ * @return object value type
+ * @hide
+ */
+ public static int getTypeOfObject(Object obj) {
+ if (obj == null) {
+ return 0; /* Cursor.FIELD_TYPE_NULL */
+ } else if (obj instanceof byte[]) {
+ return 4; /* Cursor.FIELD_TYPE_BLOB */
+ } else if (obj instanceof Float || obj instanceof Double) {
+ return 2; /* Cursor.FIELD_TYPE_FLOAT */
+ } else if (obj instanceof Long || obj instanceof Integer) {
+ return 1; /* Cursor.FIELD_TYPE_INTEGER */
+ } else {
+ return 3; /* Cursor.FIELD_TYPE_STRING */
+ }
+ }
+
+ /**
+ * Appends an SQL string to the given StringBuilder, including the opening
+ * and closing single quotes. Any single quotes internal to sqlString will
+ * be escaped.
+ *
+ * This method is deprecated because we want to encourage everyone
+ * to use the "?" binding form. However, when implementing a
+ * ContentProvider, one may want to add WHERE clauses that were
+ * not provided by the caller. Since "?" is a positional form,
+ * using it in this case could break the caller because the
+ * indexes would be shifted to accomodate the ContentProvider's
+ * internal bindings. In that case, it may be necessary to
+ * construct a WHERE clause manually. This method is useful for
+ * those cases.
+ *
+ * @param sb the StringBuilder that the SQL string will be appended to
+ * @param sqlString the raw string to be appended, which may contain single
+ * quotes
+ */
+ public static void appendEscapedSQLString(StringBuilder sb, String sqlString) {
+ sb.append('\'');
+ if (sqlString.indexOf('\'') != -1) {
+ int length = sqlString.length();
+ for (int i = 0; i < length; i++) {
+ char c = sqlString.charAt(i);
+ if (c == '\'') {
+ sb.append('\'');
+ }
+ sb.append(c);
+ }
+ } else
+ sb.append(sqlString);
+ sb.append('\'');
+ }
+
+ /**
+ * SQL-escape a string.
+ */
+ public static String sqlEscapeString(String value) {
+ StringBuilder escaper = new StringBuilder();
+
+ DatabaseUtils.appendEscapedSQLString(escaper, value);
+
+ return escaper.toString();
+ }
+
+ /**
+ * Appends an Object to an SQL string with the proper escaping, etc.
+ */
+ public static final void appendValueToSql(StringBuilder sql, Object value) {
+ if (value == null) {
+ sql.append("NULL");
+ } else if (value instanceof Boolean) {
+ Boolean bool = (Boolean)value;
+ if (bool) {
+ sql.append('1');
+ } else {
+ sql.append('0');
+ }
+ } else {
+ appendEscapedSQLString(sql, value.toString());
+ }
+ }
+
+ /**
+ * Concatenates two SQL WHERE clauses, handling empty or null values.
+ * @hide
+ */
+ public static String concatenateWhere(String a, String b) {
+ if (TextUtils.isEmpty(a)) {
+ return b;
+ }
+ if (TextUtils.isEmpty(b)) {
+ return a;
+ }
+
+ return "(" + a + ") AND (" + b + ")";
+ }
+
+ /**
+ * return the collation key
+ * @param name
+ * @return the collation key
+ */
+ public static String getCollationKey(String name) {
+ byte [] arr = getCollationKeyInBytes(name);
+ try {
+ return new String(arr, 0, getKeyLen(arr), "ISO8859_1");
+ } catch (Exception ex) {
+ return "";
+ }
+ }
+
+ /**
+ * return the collation key in hex format
+ * @param name
+ * @return the collation key in hex format
+ */
+ public static String getHexCollationKey(String name) {
+ byte [] arr = getCollationKeyInBytes(name);
+ char[] keys = encodeHex(arr, HEX_DIGITS_LOWER);
+ return new String(keys, 0, getKeyLen(arr) * 2);
+ }
+
+ private static final char[] HEX_DIGITS_LOWER = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'};
+
+ private static char[] encodeHex(final byte[] data, final char[] toDigits) {
+ final int l = data.length;
+ final char[] out = new char[l << 1];
+ // two characters form the hex value.
+ for (int i = 0, j = 0; i < l; i++) {
+ out[j++] = toDigits[(0xF0 & data[i]) >>> 4];
+ out[j++] = toDigits[0x0F & data[i]];
+ }
+ return out;
+ }
+
+ private static int getKeyLen(byte[] arr) {
+ if (arr[arr.length - 1] != 0) {
+ return arr.length;
+ } else {
+ // remove zero "termination"
+ return arr.length-1;
+ }
+ }
+
+ private static byte[] getCollationKeyInBytes(String name) {
+ if (mColl == null) {
+ mColl = Collator.getInstance();
+ mColl.setStrength(Collator.PRIMARY);
+ }
+ return mColl.getCollationKey(name).toByteArray();
+ }
+
+ private static Collator mColl = null;
+ /**
+ * Prints the contents of a Cursor to System.out. The position is restored
+ * after printing.
+ *
+ * @param cursor the cursor to print
+ */
+ public static void dumpCursor(Cursor cursor) {
+ dumpCursor(cursor, System.out);
+ }
+
+ /**
+ * Prints the contents of a Cursor to a PrintSteam. The position is restored
+ * after printing.
+ *
+ * @param cursor the cursor to print
+ * @param stream the stream to print to
+ */
+ public static void dumpCursor(Cursor cursor, PrintStream stream) {
+ stream.println(">>>>> Dumping cursor " + cursor);
+ if (cursor != null) {
+ int startPos = cursor.getPosition();
+
+ cursor.moveToPosition(-1);
+ while (cursor.moveToNext()) {
+ dumpCurrentRow(cursor, stream);
+ }
+ cursor.moveToPosition(startPos);
+ }
+ stream.println("<<<<<");
+ }
+
+ /**
+ * Prints the contents of a Cursor to a StringBuilder. The position
+ * is restored after printing.
+ *
+ * @param cursor the cursor to print
+ * @param sb the StringBuilder to print to
+ */
+ public static void dumpCursor(Cursor cursor, StringBuilder sb) {
+ sb.append(">>>>> Dumping cursor " + cursor + "\n");
+ if (cursor != null) {
+ int startPos = cursor.getPosition();
+
+ cursor.moveToPosition(-1);
+ while (cursor.moveToNext()) {
+ dumpCurrentRow(cursor, sb);
+ }
+ cursor.moveToPosition(startPos);
+ }
+ sb.append("<<<<<\n");
+ }
+
+ /**
+ * Prints the contents of a Cursor to a String. The position is restored
+ * after printing.
+ *
+ * @param cursor the cursor to print
+ * @return a String that contains the dumped cursor
+ */
+ public static String dumpCursorToString(Cursor cursor) {
+ StringBuilder sb = new StringBuilder();
+ dumpCursor(cursor, sb);
+ return sb.toString();
+ }
+
+ /**
+ * Prints the contents of a Cursor's current row to System.out.
+ *
+ * @param cursor the cursor to print from
+ */
+ public static void dumpCurrentRow(Cursor cursor) {
+ dumpCurrentRow(cursor, System.out);
+ }
+
+ /**
+ * Prints the contents of a Cursor's current row to a PrintSteam.
+ *
+ * @param cursor the cursor to print
+ * @param stream the stream to print to
+ */
+ public static void dumpCurrentRow(Cursor cursor, PrintStream stream) {
+ String[] cols = cursor.getColumnNames();
+ stream.println("" + cursor.getPosition() + " {");
+ int length = cols.length;
+ for (int i = 0; i< length; i++) {
+ String value;
+ try {
+ value = cursor.getString(i);
+ } catch (SQLiteException e) {
+ // assume that if the getString threw this exception then the column is not
+ // representable by a string, e.g. it is a BLOB.
+ value = "";
+ }
+ stream.println(" " + cols[i] + '=' + value);
+ }
+ stream.println("}");
+ }
+
+ /**
+ * Prints the contents of a Cursor's current row to a StringBuilder.
+ *
+ * @param cursor the cursor to print
+ * @param sb the StringBuilder to print to
+ */
+ public static void dumpCurrentRow(Cursor cursor, StringBuilder sb) {
+ String[] cols = cursor.getColumnNames();
+ sb.append("" + cursor.getPosition() + " {\n");
+ int length = cols.length;
+ for (int i = 0; i < length; i++) {
+ String value;
+ try {
+ value = cursor.getString(i);
+ } catch (SQLiteException e) {
+ // assume that if the getString threw this exception then the column is not
+ // representable by a string, e.g. it is a BLOB.
+ value = "";
+ }
+ sb.append(" " + cols[i] + '=' + value + "\n");
+ }
+ sb.append("}\n");
+ }
+
+ /**
+ * Dump the contents of a Cursor's current row to a String.
+ *
+ * @param cursor the cursor to print
+ * @return a String that contains the dumped cursor row
+ */
+ public static String dumpCurrentRowToString(Cursor cursor) {
+ StringBuilder sb = new StringBuilder();
+ dumpCurrentRow(cursor, sb);
+ return sb.toString();
+ }
+
+ /**
+ * Reads a String out of a field in a Cursor and writes it to a Map.
+ *
+ * @param cursor The cursor to read from
+ * @param field The TEXT field to read
+ * @param values The ContentValues to put the value into, with the field as the key
+ */
+ public static void cursorStringToContentValues(Cursor cursor, String field,
+ ContentValues values) {
+ cursorStringToContentValues(cursor, field, values, field);
+ }
+
+ /**
+ * Reads a String out of a field in a Cursor and writes it to an InsertHelper.
+ *
+ * @param cursor The cursor to read from
+ * @param field The TEXT field to read
+ * @param inserter The InsertHelper to bind into
+ * @param index the index of the bind entry in the InsertHelper
+ */
+ public static void cursorStringToInsertHelper(Cursor cursor, String field,
+ InsertHelper inserter, int index) {
+ inserter.bind(index, cursor.getString(cursor.getColumnIndexOrThrow(field)));
+ }
+
+ /**
+ * Reads a String out of a field in a Cursor and writes it to a Map.
+ *
+ * @param cursor The cursor to read from
+ * @param field The TEXT field to read
+ * @param values The ContentValues to put the value into, with the field as the key
+ * @param key The key to store the value with in the map
+ */
+ public static void cursorStringToContentValues(Cursor cursor, String field,
+ ContentValues values, String key) {
+ values.put(key, cursor.getString(cursor.getColumnIndexOrThrow(field)));
+ }
+
+ /**
+ * Reads an Integer out of a field in a Cursor and writes it to a Map.
+ *
+ * @param cursor The cursor to read from
+ * @param field The INTEGER field to read
+ * @param values The ContentValues to put the value into, with the field as the key
+ */
+ public static void cursorIntToContentValues(Cursor cursor, String field, ContentValues values) {
+ cursorIntToContentValues(cursor, field, values, field);
+ }
+
+ /**
+ * Reads a Integer out of a field in a Cursor and writes it to a Map.
+ *
+ * @param cursor The cursor to read from
+ * @param field The INTEGER field to read
+ * @param values The ContentValues to put the value into, with the field as the key
+ * @param key The key to store the value with in the map
+ */
+ public static void cursorIntToContentValues(Cursor cursor, String field, ContentValues values,
+ String key) {
+ int colIndex = cursor.getColumnIndex(field);
+ if (!cursor.isNull(colIndex)) {
+ values.put(key, cursor.getInt(colIndex));
+ } else {
+ values.put(key, (Integer) null);
+ }
+ }
+
+ /**
+ * Reads a Long out of a field in a Cursor and writes it to a Map.
+ *
+ * @param cursor The cursor to read from
+ * @param field The INTEGER field to read
+ * @param values The ContentValues to put the value into, with the field as the key
+ */
+ public static void cursorLongToContentValues(Cursor cursor, String field, ContentValues values)
+ {
+ cursorLongToContentValues(cursor, field, values, field);
+ }
+
+ /**
+ * Reads a Long out of a field in a Cursor and writes it to a Map.
+ *
+ * @param cursor The cursor to read from
+ * @param field The INTEGER field to read
+ * @param values The ContentValues to put the value into
+ * @param key The key to store the value with in the map
+ */
+ public static void cursorLongToContentValues(Cursor cursor, String field, ContentValues values,
+ String key) {
+ int colIndex = cursor.getColumnIndex(field);
+ if (!cursor.isNull(colIndex)) {
+ Long value = Long.valueOf(cursor.getLong(colIndex));
+ values.put(key, value);
+ } else {
+ values.put(key, (Long) null);
+ }
+ }
+
+ /**
+ * Reads a Double out of a field in a Cursor and writes it to a Map.
+ *
+ * @param cursor The cursor to read from
+ * @param field The REAL field to read
+ * @param values The ContentValues to put the value into
+ */
+ public static void cursorDoubleToCursorValues(Cursor cursor, String field, ContentValues values)
+ {
+ cursorDoubleToContentValues(cursor, field, values, field);
+ }
+
+ /**
+ * Reads a Double out of a field in a Cursor and writes it to a Map.
+ *
+ * @param cursor The cursor to read from
+ * @param field The REAL field to read
+ * @param values The ContentValues to put the value into
+ * @param key The key to store the value with in the map
+ */
+ public static void cursorDoubleToContentValues(Cursor cursor, String field,
+ ContentValues values, String key) {
+ int colIndex = cursor.getColumnIndex(field);
+ if (!cursor.isNull(colIndex)) {
+ values.put(key, cursor.getDouble(colIndex));
+ } else {
+ values.put(key, (Double) null);
+ }
+ }
+
+ /**
+ * Read the entire contents of a cursor row and store them in a ContentValues.
+ *
+ * @param cursor the cursor to read from.
+ * @param values the ContentValues to put the row into.
+ */
+ public static void cursorRowToContentValues(Cursor cursor, ContentValues values) {
+ AbstractWindowedCursor awc =
+ (cursor instanceof AbstractWindowedCursor) ? (AbstractWindowedCursor) cursor : null;
+
+ String[] columns = cursor.getColumnNames();
+ int length = columns.length;
+ for (int i = 0; i < length; i++) {
+ if (awc != null && awc.isBlob(i)) {
+ values.put(columns[i], cursor.getBlob(i));
+ } else {
+ values.put(columns[i], cursor.getString(i));
+ }
+ }
+ }
+
+ /**
+ * Query the table for the number of rows in the table.
+ * @param db the database the table is in
+ * @param table the name of the table to query
+ * @return the number of rows in the table
+ */
+ public static long queryNumEntries(SQLiteDatabase db, String table) {
+ Cursor cursor = db.query(table, countProjection,
+ null, null, null, null, null);
+ try {
+ cursor.moveToFirst();
+ return cursor.getLong(0);
+ } finally {
+ cursor.close();
+ }
+ }
+
+ /**
+ * Utility method to run the query on the db and return the value in the
+ * first column of the first row.
+ */
+ public static long longForQuery(SQLiteDatabase db, String query, String[] selectionArgs) {
+ SQLiteStatement prog = db.compileStatement(query);
+ try {
+ return longForQuery(prog, selectionArgs);
+ } finally {
+ prog.close();
+ }
+ }
+
+ /**
+ * Utility method to run the pre-compiled query and return the value in the
+ * first column of the first row.
+ */
+ public static long longForQuery(SQLiteStatement prog, String[] selectionArgs) {
+ if (selectionArgs != null) {
+ int size = selectionArgs.length;
+ for (int i = 0; i < size; i++) {
+ bindObjectToProgram(prog, i + 1, selectionArgs[i]);
+ }
+ }
+ long value = prog.simpleQueryForLong();
+ return value;
+ }
+
+ /**
+ * Utility method to run the query on the db and return the value in the
+ * first column of the first row.
+ */
+ public static String stringForQuery(SQLiteDatabase db, String query, String[] selectionArgs) {
+ SQLiteStatement prog = db.compileStatement(query);
+ try {
+ return stringForQuery(prog, selectionArgs);
+ } finally {
+ prog.close();
+ }
+ }
+
+ /**
+ * Utility method to run the pre-compiled query and return the value in the
+ * first column of the first row.
+ */
+ public static String stringForQuery(SQLiteStatement prog, String[] selectionArgs) {
+ if (selectionArgs != null) {
+ int size = selectionArgs.length;
+ for (int i = 0; i < size; i++) {
+ bindObjectToProgram(prog, i + 1, selectionArgs[i]);
+ }
+ }
+ String value = prog.simpleQueryForString();
+ return value;
+ }
+
+ /**
+ * Reads a String out of a column in a Cursor and writes it to a ContentValues.
+ * Adds nothing to the ContentValues if the column isn't present or if its value is null.
+ *
+ * @param cursor The cursor to read from
+ * @param column The column to read
+ * @param values The ContentValues to put the value into
+ */
+ public static void cursorStringToContentValuesIfPresent(Cursor cursor, ContentValues values,
+ String column) {
+ final int index = cursor.getColumnIndexOrThrow(column);
+ if (!cursor.isNull(index)) {
+ values.put(column, cursor.getString(index));
+ }
+ }
+
+ /**
+ * Reads a Long out of a column in a Cursor and writes it to a ContentValues.
+ * Adds nothing to the ContentValues if the column isn't present or if its value is null.
+ *
+ * @param cursor The cursor to read from
+ * @param column The column to read
+ * @param values The ContentValues to put the value into
+ */
+ public static void cursorLongToContentValuesIfPresent(Cursor cursor, ContentValues values,
+ String column) {
+ final int index = cursor.getColumnIndexOrThrow(column);
+ if (!cursor.isNull(index)) {
+ values.put(column, cursor.getLong(index));
+ }
+ }
+
+ /**
+ * Reads a Short out of a column in a Cursor and writes it to a ContentValues.
+ * Adds nothing to the ContentValues if the column isn't present or if its value is null.
+ *
+ * @param cursor The cursor to read from
+ * @param column The column to read
+ * @param values The ContentValues to put the value into
+ */
+ public static void cursorShortToContentValuesIfPresent(Cursor cursor, ContentValues values,
+ String column) {
+ final int index = cursor.getColumnIndexOrThrow(column);
+ if (!cursor.isNull(index)) {
+ values.put(column, cursor.getShort(index));
+ }
+ }
+
+ /**
+ * Reads a Integer out of a column in a Cursor and writes it to a ContentValues.
+ * Adds nothing to the ContentValues if the column isn't present or if its value is null.
+ *
+ * @param cursor The cursor to read from
+ * @param column The column to read
+ * @param values The ContentValues to put the value into
+ */
+ public static void cursorIntToContentValuesIfPresent(Cursor cursor, ContentValues values,
+ String column) {
+ final int index = cursor.getColumnIndexOrThrow(column);
+ if (!cursor.isNull(index)) {
+ values.put(column, cursor.getInt(index));
+ }
+ }
+
+ /**
+ * Reads a Float out of a column in a Cursor and writes it to a ContentValues.
+ * Adds nothing to the ContentValues if the column isn't present or if its value is null.
+ *
+ * @param cursor The cursor to read from
+ * @param column The column to read
+ * @param values The ContentValues to put the value into
+ */
+ public static void cursorFloatToContentValuesIfPresent(Cursor cursor, ContentValues values,
+ String column) {
+ final int index = cursor.getColumnIndexOrThrow(column);
+ if (!cursor.isNull(index)) {
+ values.put(column, cursor.getFloat(index));
+ }
+ }
+
+ /**
+ * Reads a Double out of a column in a Cursor and writes it to a ContentValues.
+ * Adds nothing to the ContentValues if the column isn't present or if its value is null.
+ *
+ * @param cursor The cursor to read from
+ * @param column The column to read
+ * @param values The ContentValues to put the value into
+ */
+ public static void cursorDoubleToContentValuesIfPresent(Cursor cursor, ContentValues values,
+ String column) {
+ final int index = cursor.getColumnIndexOrThrow(column);
+ if (!cursor.isNull(index)) {
+ values.put(column, cursor.getDouble(index));
+ }
+ }
+
+ /**
+ * This class allows users to do multiple inserts into a table but
+ * compile the SQL insert statement only once, which may increase
+ * performance.
+ */
+ public static class InsertHelper {
+ private final SQLiteDatabase mDb;
+ private final String mTableName;
+ private HashMap mColumns;
+ private String mInsertSQL = null;
+ private SQLiteStatement mInsertStatement = null;
+ private SQLiteStatement mReplaceStatement = null;
+ private SQLiteStatement mPreparedStatement = null;
+
+ /**
+ * {@hide}
+ *
+ * These are the columns returned by sqlite's "PRAGMA
+ * table_info(...)" command that we depend on.
+ */
+ public static final int TABLE_INFO_PRAGMA_COLUMNNAME_INDEX = 1;
+ public static final int TABLE_INFO_PRAGMA_DEFAULT_INDEX = 4;
+
+ /**
+ * @param db the SQLiteDatabase to insert into
+ * @param tableName the name of the table to insert into
+ */
+ public InsertHelper(SQLiteDatabase db, String tableName) {
+ mDb = db;
+ mTableName = tableName;
+ }
+
+ private void buildSQL() throws SQLException {
+ StringBuilder sb = new StringBuilder(128);
+ sb.append("INSERT INTO ");
+ sb.append(mTableName);
+ sb.append(" (");
+
+ StringBuilder sbv = new StringBuilder(128);
+ sbv.append("VALUES (");
+
+ int i = 1;
+ Cursor cur = null;
+ try {
+ cur = mDb.rawQuery("PRAGMA table_info(" + mTableName + ")", null);
+ mColumns = new HashMap(cur.getCount());
+ while (cur.moveToNext()) {
+ String columnName = cur.getString(TABLE_INFO_PRAGMA_COLUMNNAME_INDEX);
+ String defaultValue = cur.getString(TABLE_INFO_PRAGMA_DEFAULT_INDEX);
+
+ mColumns.put(columnName, i);
+ sb.append("'");
+ sb.append(columnName);
+ sb.append("'");
+
+ if (defaultValue == null) {
+ sbv.append("?");
+ } else {
+ sbv.append("COALESCE(?, ");
+ sbv.append(defaultValue);
+ sbv.append(")");
+ }
+
+ sb.append(i == cur.getCount() ? ") " : ", ");
+ sbv.append(i == cur.getCount() ? ");" : ", ");
+ ++i;
+ }
+ } finally {
+ if (cur != null) cur.close();
+ }
+
+ sb.append(sbv);
+
+ mInsertSQL = sb.toString();
+ if (LOCAL_LOGV) Log.v(TAG, "insert statement is " + mInsertSQL);
+ }
+
+ private SQLiteStatement getStatement(boolean allowReplace) throws SQLException {
+ if (allowReplace) {
+ if (mReplaceStatement == null) {
+ if (mInsertSQL == null) buildSQL();
+ // chop "INSERT" off the front and prepend "INSERT OR REPLACE" instead.
+ String replaceSQL = "INSERT OR REPLACE" + mInsertSQL.substring(6);
+ mReplaceStatement = mDb.compileStatement(replaceSQL);
+ }
+ return mReplaceStatement;
+ } else {
+ if (mInsertStatement == null) {
+ if (mInsertSQL == null) buildSQL();
+ mInsertStatement = mDb.compileStatement(mInsertSQL);
+ }
+ return mInsertStatement;
+ }
+ }
+
+ /**
+ * Performs an insert, adding a new row with the given values.
+ *
+ * @param values the set of values with which to populate the
+ * new row
+ * @param allowReplace if true, the statement does "INSERT OR
+ * REPLACE" instead of "INSERT", silently deleting any
+ * previously existing rows that would cause a conflict
+ *
+ * @return the row ID of the newly inserted row, or -1 if an
+ * error occurred
+ */
+ private synchronized long insertInternal(ContentValues values, boolean allowReplace) {
+ try {
+ SQLiteStatement stmt = getStatement(allowReplace);
+ stmt.clearBindings();
+ if (LOCAL_LOGV) Log.v(TAG, "--- inserting in table " + mTableName);
+ for (Map.Entry e: values.valueSet()) {
+ final String key = e.getKey();
+ int i = getColumnIndex(key);
+ DatabaseUtils.bindObjectToProgram(stmt, i, e.getValue());
+ if (LOCAL_LOGV) {
+ Log.v(TAG, "binding " + e.getValue() + " to column " +
+ i + " (" + key + ")");
+ }
+ }
+ return stmt.executeInsert();
+ } catch (SQLException e) {
+ Log.e(TAG, "Error inserting " + values + " into table " + mTableName, e);
+ return -1;
+ }
+ }
+
+ /**
+ * Returns the index of the specified column. This is index is suitagble for use
+ * in calls to bind().
+ * @param key the column name
+ * @return the index of the column
+ */
+ public int getColumnIndex(String key) {
+ getStatement(false);
+ final Integer index = mColumns.get(key);
+ if (index == null) {
+ throw new IllegalArgumentException("column '" + key + "' is invalid");
+ }
+ return index;
+ }
+
+ /**
+ * Bind the value to an index. A prepareForInsert() or prepareForReplace()
+ * without a matching execute() must have already have been called.
+ * @param index the index of the slot to which to bind
+ * @param value the value to bind
+ */
+ public void bind(int index, double value) {
+ mPreparedStatement.bindDouble(index, value);
+ }
+
+ /**
+ * Bind the value to an index. A prepareForInsert() or prepareForReplace()
+ * without a matching execute() must have already have been called.
+ * @param index the index of the slot to which to bind
+ * @param value the value to bind
+ */
+ public void bind(int index, float value) {
+ mPreparedStatement.bindDouble(index, value);
+ }
+
+ /**
+ * Bind the value to an index. A prepareForInsert() or prepareForReplace()
+ * without a matching execute() must have already have been called.
+ * @param index the index of the slot to which to bind
+ * @param value the value to bind
+ */
+ public void bind(int index, long value) {
+ mPreparedStatement.bindLong(index, value);
+ }
+
+ /**
+ * Bind the value to an index. A prepareForInsert() or prepareForReplace()
+ * without a matching execute() must have already have been called.
+ * @param index the index of the slot to which to bind
+ * @param value the value to bind
+ */
+ public void bind(int index, int value) {
+ mPreparedStatement.bindLong(index, value);
+ }
+
+ /**
+ * Bind the value to an index. A prepareForInsert() or prepareForReplace()
+ * without a matching execute() must have already have been called.
+ * @param index the index of the slot to which to bind
+ * @param value the value to bind
+ */
+ public void bind(int index, boolean value) {
+ mPreparedStatement.bindLong(index, value ? 1 : 0);
+ }
+
+ /**
+ * Bind null to an index. A prepareForInsert() or prepareForReplace()
+ * without a matching execute() must have already have been called.
+ * @param index the index of the slot to which to bind
+ */
+ public void bindNull(int index) {
+ mPreparedStatement.bindNull(index);
+ }
+
+ /**
+ * Bind the value to an index. A prepareForInsert() or prepareForReplace()
+ * without a matching execute() must have already have been called.
+ * @param index the index of the slot to which to bind
+ * @param value the value to bind
+ */
+ public void bind(int index, byte[] value) {
+ if (value == null) {
+ mPreparedStatement.bindNull(index);
+ } else {
+ mPreparedStatement.bindBlob(index, value);
+ }
+ }
+
+ /**
+ * Bind the value to an index. A prepareForInsert() or prepareForReplace()
+ * without a matching execute() must have already have been called.
+ * @param index the index of the slot to which to bind
+ * @param value the value to bind
+ */
+ public void bind(int index, String value) {
+ if (value == null) {
+ mPreparedStatement.bindNull(index);
+ } else {
+ mPreparedStatement.bindString(index, value);
+ }
+ }
+
+ /**
+ * Performs an insert, adding a new row with the given values.
+ * If the table contains conflicting rows, an error is
+ * returned.
+ *
+ * @param values the set of values with which to populate the
+ * new row
+ *
+ * @return the row ID of the newly inserted row, or -1 if an
+ * error occurred
+ */
+ public long insert(ContentValues values) {
+ return insertInternal(values, false);
+ }
+
+ /**
+ * Execute the previously prepared insert or replace using the bound values
+ * since the last call to prepareForInsert or prepareForReplace.
+ *
+ *
Note that calling bind() and then execute() is not thread-safe. The only thread-safe
+ * way to use this class is to call insert() or replace().
+ *
+ * @return the row ID of the newly inserted row, or -1 if an
+ * error occurred
+ */
+ public long execute() {
+ if (mPreparedStatement == null) {
+ throw new IllegalStateException("you must prepare this inserter before calling "
+ + "execute");
+ }
+ try {
+ if (LOCAL_LOGV) Log.v(TAG, "--- doing insert or replace in table " + mTableName);
+ return mPreparedStatement.executeInsert();
+ } catch (SQLException e) {
+ Log.e(TAG, "Error executing InsertHelper with table " + mTableName, e);
+ return -1;
+ } finally {
+ // you can only call this once per prepare
+ mPreparedStatement = null;
+ }
+ }
+
+ /**
+ * Prepare the InsertHelper for an insert. The pattern for this is:
+ *
+ *
prepareForInsert()
+ *
bind(index, value);
+ *
bind(index, value);
+ *
...
+ *
bind(index, value);
+ *
execute();
+ *
+ */
+ public void prepareForInsert() {
+ mPreparedStatement = getStatement(false);
+ mPreparedStatement.clearBindings();
+ }
+
+ /**
+ * Prepare the InsertHelper for a replace. The pattern for this is:
+ *
+ *
prepareForReplace()
+ *
bind(index, value);
+ *
bind(index, value);
+ *
...
+ *
bind(index, value);
+ *
execute();
+ *
+ */
+ public void prepareForReplace() {
+ mPreparedStatement = getStatement(true);
+ mPreparedStatement.clearBindings();
+ }
+
+ /**
+ * Performs an insert, adding a new row with the given values.
+ * If the table contains conflicting rows, they are deleted
+ * and replaced with the new row.
+ *
+ * @param values the set of values with which to populate the
+ * new row
+ *
+ * @return the row ID of the newly inserted row, or -1 if an
+ * error occurred
+ */
+ public long replace(ContentValues values) {
+ return insertInternal(values, true);
+ }
+
+ /**
+ * Close this object and release any resources associated with
+ * it. The behavior of calling insert() after
+ * calling this method is undefined.
+ */
+ public void close() {
+ if (mInsertStatement != null) {
+ mInsertStatement.close();
+ mInsertStatement = null;
+ }
+ if (mReplaceStatement != null) {
+ mReplaceStatement.close();
+ mReplaceStatement = null;
+ }
+ mInsertSQL = null;
+ mColumns = null;
+ }
+ }
+
+ public static void cursorFillWindow(final Cursor cursor,
+ int position, final android.database.CursorWindow window) {
+ if (position < 0 || position >= cursor.getCount()) {
+ return;
+ }
+ final int oldPos = cursor.getPosition();
+ final int numColumns = cursor.getColumnCount();
+ window.clear();
+ window.setStartPosition(position);
+ window.setNumColumns(numColumns);
+ if (cursor.moveToPosition(position)) {
+ do {
+ if (!window.allocRow()) {
+ break;
+ }
+ for (int i = 0; i < numColumns; i++) {
+ final int type = cursor.getType(i);
+ final boolean success;
+ switch (type) {
+ case Cursor.FIELD_TYPE_NULL:
+ success = window.putNull(position, i);
+ break;
+
+ case Cursor.FIELD_TYPE_INTEGER:
+ success = window.putLong(cursor.getLong(i), position, i);
+ break;
+
+ case Cursor.FIELD_TYPE_FLOAT:
+ success = window.putDouble(cursor.getDouble(i), position, i);
+ break;
+
+ case Cursor.FIELD_TYPE_BLOB: {
+ final byte[] value = cursor.getBlob(i);
+ success = value != null ? window.putBlob(value, position, i)
+ : window.putNull(position, i);
+ break;
+ }
+
+ default: // assume value is convertible to String
+ case Cursor.FIELD_TYPE_STRING: {
+ final String value = cursor.getString(i);
+ success = value != null ? window.putString(value, position, i)
+ : window.putNull(position, i);
+ break;
+ }
+ }
+ if (!success) {
+ window.freeLastRow();
+ break;
+ }
+ }
+ position += 1;
+ } while (cursor.moveToNext());
+ }
+ cursor.moveToPosition(oldPos);
+ }
+
+
+ /**
+ * Creates a db and populates it with the sql statements in sqlStatements.
+ *
+ * @param context the context to use to create the db
+ * @param dbName the name of the db to create
+ * @param dbVersion the version to set on the db
+ * @param sqlStatements the statements to use to populate the db. This should be a single string
+ * of the form returned by sqlite3's .dump command (statements separated by
+ * semicolons)
+ */
+ /*
+ static public void createDbFromSqlStatements(
+ Context context, String dbName, int dbVersion, String sqlStatements) {
+
+ //TODO TODO TODO what needs ot happen here
+ SQLiteDatabase db = context.openOrCreateDatabase(dbName, 0, null);
+
+ // TODO: this is not quite safe since it assumes that all semicolons at the end of a line
+ // terminate statements. It is possible that a text field contains ;\n. We will have to fix
+ // this if that turns out to be a problem.
+ String[] statements = TextUtils.split(sqlStatements, ";\n");
+ for (String statement : statements) {
+ if (TextUtils.isEmpty(statement)) continue;
+ db.execSQL(statement);
+ }
+ db.setVersion(dbVersion);
+ db.close();
+ }*/
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/DefaultCursorWindowAllocation.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/DefaultCursorWindowAllocation.java
new file mode 100644
index 0000000..47b5f54
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/DefaultCursorWindowAllocation.java
@@ -0,0 +1,21 @@
+package net.sqlcipher;
+
+import net.sqlcipher.CursorWindowAllocation;
+
+public class DefaultCursorWindowAllocation implements CursorWindowAllocation {
+
+ private long initialAllocationSize = 1024 * 1024;
+ private long WindowAllocationUnbounded = 0;
+
+ public long getInitialAllocationSize() {
+ return initialAllocationSize;
+ }
+
+ public long getGrowthPaddingSize() {
+ return initialAllocationSize;
+ }
+
+ public long getMaxAllocationSize() {
+ return WindowAllocationUnbounded;
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/DefaultDatabaseErrorHandler.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/DefaultDatabaseErrorHandler.java
new file mode 100644
index 0000000..d9f4e0d
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/DefaultDatabaseErrorHandler.java
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher;
+
+import java.io.File;
+import java.util.List;
+
+import net.sqlcipher.database.SQLiteDatabase;
+import net.sqlcipher.database.SQLiteException;
+
+import android.util.Log;
+import android.util.Pair;
+
+/**
+ * Default class used to define the actions to take when the database corruption is reported
+ * by sqlite.
+ *
+ * If null is specified for DatabaeErrorHandler param in the above calls, then this class is used
+ * as the default {@link DatabaseErrorHandler}.
+ */
+public final class DefaultDatabaseErrorHandler implements DatabaseErrorHandler {
+
+ private final String TAG = getClass().getSimpleName();
+
+ /**
+ * defines the default method to be invoked when database corruption is detected.
+ * @param dbObj the {@link SQLiteDatabase} object representing the database on which corruption
+ * is detected.
+ */
+ public void onCorruption(SQLiteDatabase dbObj) {
+ // NOTE: Unlike the AOSP, this version does NOT attempt to delete any attached databases.
+ // TBD: Are we really certain that the attached databases would really be corrupt?
+ Log.e(TAG, "Corruption reported by sqlite on database, deleting: " + dbObj.getPath());
+
+ if (dbObj.isOpen()) {
+ Log.e(TAG, "Database object for corrupted database is already open, closing");
+
+ try {
+ dbObj.close();
+ } catch (Exception e) {
+ /* ignored */
+ Log.e(TAG, "Exception closing Database object for corrupted database, ignored", e);
+ }
+ }
+
+ deleteDatabaseFile(dbObj.getPath());
+ }
+
+ private void deleteDatabaseFile(String fileName) {
+ if (fileName.equalsIgnoreCase(":memory:") || fileName.trim().length() == 0) {
+ return;
+ }
+ Log.e(TAG, "deleting the database file: " + fileName);
+ try {
+ new File(fileName).delete();
+ } catch (Exception e) {
+ /* print warning and ignore exception */
+ Log.w(TAG, "delete failed: " + e.getMessage());
+ }
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/IBulkCursor.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/IBulkCursor.java
new file mode 100644
index 0000000..a0a8616
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/IBulkCursor.java
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher;
+
+import android.os.RemoteException;
+import android.os.IBinder;
+import android.os.IInterface;
+import android.os.Bundle;
+
+import java.util.Map;
+
+/**
+ * This interface provides a low-level way to pass bulk cursor data across
+ * both process and language boundries. Application code should use the Cursor
+ * interface directly.
+ *
+ * {@hide}
+ */
+public interface IBulkCursor extends IInterface {
+ /**
+ * Returns a BulkCursorWindow, which either has a reference to a shared
+ * memory segment with the rows, or an array of JSON strings.
+ */
+ public CursorWindow getWindow(int startPos) throws RemoteException;
+
+ public void onMove(int position) throws RemoteException;
+
+ /**
+ * Returns the number of rows in the cursor.
+ *
+ * @return the number of rows in the cursor.
+ */
+ public int count() throws RemoteException;
+
+ /**
+ * Returns a string array holding the names of all of the columns in the
+ * cursor in the order in which they were listed in the result.
+ *
+ * @return the names of the columns returned in this query.
+ */
+ public String[] getColumnNames() throws RemoteException;
+
+ public boolean updateRows(Map extends Long, ? extends Map> values) throws RemoteException;
+
+ public boolean deleteRow(int position) throws RemoteException;
+
+ public void deactivate() throws RemoteException;
+
+ public void close() throws RemoteException;
+
+ public int requery(IContentObserver observer, CursorWindow window) throws RemoteException;
+
+ boolean getWantsAllOnMoveCalls() throws RemoteException;
+
+ Bundle getExtras() throws RemoteException;
+
+ Bundle respond(Bundle extras) throws RemoteException;
+
+ /* IPC constants */
+ static final String descriptor = "android.content.IBulkCursor";
+
+ static final int GET_CURSOR_WINDOW_TRANSACTION = IBinder.FIRST_CALL_TRANSACTION;
+ static final int COUNT_TRANSACTION = IBinder.FIRST_CALL_TRANSACTION + 1;
+ static final int GET_COLUMN_NAMES_TRANSACTION = IBinder.FIRST_CALL_TRANSACTION + 2;
+ static final int UPDATE_ROWS_TRANSACTION = IBinder.FIRST_CALL_TRANSACTION + 3;
+ static final int DELETE_ROW_TRANSACTION = IBinder.FIRST_CALL_TRANSACTION + 4;
+ static final int DEACTIVATE_TRANSACTION = IBinder.FIRST_CALL_TRANSACTION + 5;
+ static final int REQUERY_TRANSACTION = IBinder.FIRST_CALL_TRANSACTION + 6;
+ static final int ON_MOVE_TRANSACTION = IBinder.FIRST_CALL_TRANSACTION + 7;
+ static final int WANTS_ON_MOVE_TRANSACTION = IBinder.FIRST_CALL_TRANSACTION + 8;
+ static final int GET_EXTRAS_TRANSACTION = IBinder.FIRST_CALL_TRANSACTION + 9;
+ static final int RESPOND_TRANSACTION = IBinder.FIRST_CALL_TRANSACTION + 10;
+ static final int CLOSE_TRANSACTION = IBinder.FIRST_CALL_TRANSACTION + 11;
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/InvalidRowColumnException.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/InvalidRowColumnException.java
new file mode 100644
index 0000000..275b28d
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/InvalidRowColumnException.java
@@ -0,0 +1,14 @@
+package net.sqlcipher;
+
+/**
+ * An exception that indicates there was an error accessing a specific row/column.
+ */
+public class InvalidRowColumnException extends RuntimeException
+{
+ public InvalidRowColumnException() {}
+
+ public InvalidRowColumnException(String error)
+ {
+ super(error);
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/MatrixCursor.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/MatrixCursor.java
new file mode 100644
index 0000000..6ca0798
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/MatrixCursor.java
@@ -0,0 +1,288 @@
+/*
+ * Copyright (C) 2007 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher;
+
+import java.util.ArrayList;
+
+import android.database.CharArrayBuffer;
+
+
+/**
+ * A mutable cursor implementation backed by an array of {@code Object}s. Use
+ * {@link #newRow()} to add rows. Automatically expands internal capacity
+ * as needed.
+ */
+public class MatrixCursor extends AbstractCursor {
+
+ private final String[] columnNames;
+ private Object[] data;
+ private int rowCount = 0;
+ private final int columnCount;
+
+ /**
+ * Constructs a new cursor with the given initial capacity.
+ *
+ * @param columnNames names of the columns, the ordering of which
+ * determines column ordering elsewhere in this cursor
+ * @param initialCapacity in rows
+ */
+ public MatrixCursor(String[] columnNames, int initialCapacity) {
+ this.columnNames = columnNames;
+ this.columnCount = columnNames.length;
+
+ if (initialCapacity < 1) {
+ initialCapacity = 1;
+ }
+
+ this.data = new Object[columnCount * initialCapacity];
+ }
+
+ /**
+ * Constructs a new cursor.
+ *
+ * @param columnNames names of the columns, the ordering of which
+ * determines column ordering elsewhere in this cursor
+ */
+ public MatrixCursor(String[] columnNames) {
+ this(columnNames, 16);
+ }
+
+ /**
+ * Gets value at the given column for the current row.
+ */
+ private Object get(int column) {
+ if (column < 0 || column >= columnCount) {
+ throw new CursorIndexOutOfBoundsException("Requested column: "
+ + column + ", # of columns: " + columnCount);
+ }
+ if (mPos < 0) {
+ throw new CursorIndexOutOfBoundsException("Before first row.");
+ }
+ if (mPos >= rowCount) {
+ throw new CursorIndexOutOfBoundsException("After last row.");
+ }
+ return data[mPos * columnCount + column];
+ }
+
+ /**
+ * Adds a new row to the end and returns a builder for that row. Not safe
+ * for concurrent use.
+ *
+ * @return builder which can be used to set the column values for the new
+ * row
+ */
+ public RowBuilder newRow() {
+ rowCount++;
+ int endIndex = rowCount * columnCount;
+ ensureCapacity(endIndex);
+ int start = endIndex - columnCount;
+ return new RowBuilder(start, endIndex);
+ }
+
+ /**
+ * Adds a new row to the end with the given column values. Not safe
+ * for concurrent use.
+ *
+ * @throws IllegalArgumentException if {@code columnValues.length !=
+ * columnNames.length}
+ * @param columnValues in the same order as the the column names specified
+ * at cursor construction time
+ */
+ public void addRow(Object[] columnValues) {
+ if (columnValues.length != columnCount) {
+ throw new IllegalArgumentException("columnNames.length = "
+ + columnCount + ", columnValues.length = "
+ + columnValues.length);
+ }
+
+ int start = rowCount++ * columnCount;
+ ensureCapacity(start + columnCount);
+ System.arraycopy(columnValues, 0, data, start, columnCount);
+ }
+
+ /**
+ * Adds a new row to the end with the given column values. Not safe
+ * for concurrent use.
+ *
+ * @throws IllegalArgumentException if {@code columnValues.size() !=
+ * columnNames.length}
+ * @param columnValues in the same order as the the column names specified
+ * at cursor construction time
+ */
+ public void addRow(Iterable> columnValues) {
+ int start = rowCount * columnCount;
+ int end = start + columnCount;
+ ensureCapacity(end);
+
+ if (columnValues instanceof ArrayList>) {
+ addRow((ArrayList>) columnValues, start);
+ return;
+ }
+
+ int current = start;
+ Object[] localData = data;
+ for (Object columnValue : columnValues) {
+ if (current == end) {
+ // TODO: null out row?
+ throw new IllegalArgumentException(
+ "columnValues.size() > columnNames.length");
+ }
+ localData[current++] = columnValue;
+ }
+
+ if (current != end) {
+ // TODO: null out row?
+ throw new IllegalArgumentException(
+ "columnValues.size() < columnNames.length");
+ }
+
+ // Increase row count here in case we encounter an exception.
+ rowCount++;
+ }
+
+ /** Optimization for {@link ArrayList}. */
+ private void addRow(ArrayList> columnValues, int start) {
+ int size = columnValues.size();
+ if (size != columnCount) {
+ throw new IllegalArgumentException("columnNames.length = "
+ + columnCount + ", columnValues.size() = " + size);
+ }
+
+ rowCount++;
+ Object[] localData = data;
+ for (int i = 0; i < size; i++) {
+ localData[start + i] = columnValues.get(i);
+ }
+ }
+
+ /** Ensures that this cursor has enough capacity. */
+ private void ensureCapacity(int size) {
+ if (size > data.length) {
+ Object[] oldData = this.data;
+ int newSize = data.length * 2;
+ if (newSize < size) {
+ newSize = size;
+ }
+ this.data = new Object[newSize];
+ System.arraycopy(oldData, 0, this.data, 0, oldData.length);
+ }
+ }
+
+ /**
+ * Builds a row, starting from the left-most column and adding one column
+ * value at a time. Follows the same ordering as the column names specified
+ * at cursor construction time.
+ */
+ public class RowBuilder {
+
+ private int index;
+ private final int endIndex;
+
+ RowBuilder(int index, int endIndex) {
+ this.index = index;
+ this.endIndex = endIndex;
+ }
+
+ /**
+ * Sets the next column value in this row.
+ *
+ * @throws CursorIndexOutOfBoundsException if you try to add too many
+ * values
+ * @return this builder to support chaining
+ */
+ public RowBuilder add(Object columnValue) {
+ if (index == endIndex) {
+ throw new CursorIndexOutOfBoundsException(
+ "No more columns left.");
+ }
+
+ data[index++] = columnValue;
+ return this;
+ }
+ }
+
+ // AbstractCursor implementation.
+
+ @Override
+ public int getCount() {
+ return rowCount;
+ }
+
+ @Override
+ public String[] getColumnNames() {
+ return columnNames;
+ }
+
+ @Override
+ public String getString(int column) {
+ Object value = get(column);
+ if (value == null) return null;
+ return value.toString();
+ }
+
+ @Override
+ public short getShort(int column) {
+ Object value = get(column);
+ if (value == null) return 0;
+ if (value instanceof Number) return ((Number) value).shortValue();
+ return Short.parseShort(value.toString());
+ }
+
+ @Override
+ public int getInt(int column) {
+ Object value = get(column);
+ if (value == null) return 0;
+ if (value instanceof Number) return ((Number) value).intValue();
+ return Integer.parseInt(value.toString());
+ }
+
+ @Override
+ public long getLong(int column) {
+ Object value = get(column);
+ if (value == null) return 0;
+ if (value instanceof Number) return ((Number) value).longValue();
+ return Long.parseLong(value.toString());
+ }
+
+ @Override
+ public float getFloat(int column) {
+ Object value = get(column);
+ if (value == null) return 0.0f;
+ if (value instanceof Number) return ((Number) value).floatValue();
+ return Float.parseFloat(value.toString());
+ }
+
+ @Override
+ public double getDouble(int column) {
+ Object value = get(column);
+ if (value == null) return 0.0d;
+ if (value instanceof Number) return ((Number) value).doubleValue();
+ return Double.parseDouble(value.toString());
+ }
+
+ @Override
+ public int getType(int column) {
+ return DatabaseUtils.getTypeOfObject(get(column));
+ }
+
+ @Override
+ public boolean isNull(int column) {
+ return get(column) == null;
+ }
+
+
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/RowAllocationException.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/RowAllocationException.java
new file mode 100644
index 0000000..a468056
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/RowAllocationException.java
@@ -0,0 +1,15 @@
+package net.sqlcipher;
+
+/**
+ * An exception that indicates there was an error attempting to allocate a row
+ * for the CursorWindow.
+ */
+public class RowAllocationException extends RuntimeException
+{
+ public RowAllocationException() {}
+
+ public RowAllocationException(String error)
+ {
+ super(error);
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/SQLException.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/SQLException.java
new file mode 100644
index 0000000..8c8c037
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/SQLException.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher;
+
+/**
+ * An exception that indicates there was an error with SQL parsing or execution.
+ */
+public class SQLException extends RuntimeException
+{
+ public SQLException() {}
+
+ public SQLException(String error)
+ {
+ super(error);
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/StaleDataException.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/StaleDataException.java
new file mode 100644
index 0000000..17209bc
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/StaleDataException.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher;
+
+/**
+ * This exception is thrown when a Cursor contains stale data and must be
+ * requeried before being used again.
+ */
+public class StaleDataException extends java.lang.RuntimeException
+{
+ public StaleDataException()
+ {
+ super();
+ }
+
+ public StaleDataException(String description)
+ {
+ super(description);
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/UnknownTypeException.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/UnknownTypeException.java
new file mode 100644
index 0000000..4da359f
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/UnknownTypeException.java
@@ -0,0 +1,14 @@
+package net.sqlcipher;
+
+/**
+ * An exception that indicates an unknown type was returned.
+ */
+public class UnknownTypeException extends RuntimeException
+{
+ public UnknownTypeException() {}
+
+ public UnknownTypeException(String error)
+ {
+ super(error);
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/BindingsRecorder.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/BindingsRecorder.java
new file mode 100644
index 0000000..7c1d775
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/BindingsRecorder.java
@@ -0,0 +1,86 @@
+/*
+ * Copyright (C) 2019 Mark L. Murphy
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher.database;
+
+import android.util.SparseArray;
+import androidx.sqlite.db.SupportSQLiteProgram;
+
+/*
+ This class is a concrete implementation of SupportSQLiteProgram,
+ for use in supporting bind arguments for the SQLiteDatabase query()
+ methods that take a SupportSQLiteQuery parameter.
+
+ In Google's FrameworkSQLiteDatabase implementation of those query()
+ methods, it uses FrameworkSQLiteProgram, which has the same basic implementation
+ as does BindingsRecorder.
+ */
+class BindingsRecorder implements SupportSQLiteProgram {
+ private SparseArray
+ *
+ * @param sql the SQL string to compile
+ * @param forceCompilation forces the SQL to be recompiled in the event that there is an
+ * existing compiled SQL program already around
+ */
+ private void compile(String sql, boolean forceCompilation) {
+ if (!mDatabase.isOpen()) {
+ throw new IllegalStateException("database " + mDatabase.getPath() + " already closed");
+ }
+ // Only compile if we don't have a valid statement already or the caller has
+ // explicitly requested a recompile.
+ if (forceCompilation) {
+ mDatabase.lock();
+ try {
+ // Note that the native_compile() takes care of destroying any previously
+ // existing programs before it compiles.
+ native_compile(sql);
+ } finally {
+ mDatabase.unlock();
+ }
+ }
+ }
+
+ /* package */ void releaseSqlStatement() {
+ // Note that native_finalize() checks to make sure that nStatement is
+ // non-null before destroying it.
+ if (nStatement != 0) {
+ if (SQLiteDebug.DEBUG_ACTIVE_CURSOR_FINALIZATION) {
+ Log.v(TAG, "closed and deallocated DbObj (id#" + nStatement +")");
+ }
+ try {
+ mDatabase.lock();
+ native_finalize();
+ nStatement = 0;
+ } finally {
+ mDatabase.unlock();
+ }
+ }
+ }
+
+ /**
+ * returns true if acquire() succeeds. false otherwise.
+ */
+ /* package */ synchronized boolean acquire() {
+ if (mInUse) {
+ // someone already has acquired it.
+ return false;
+ }
+ mInUse = true;
+ if (SQLiteDebug.DEBUG_ACTIVE_CURSOR_FINALIZATION) {
+ Log.v(TAG, "Acquired DbObj (id#" + nStatement + ") from DB cache");
+ }
+ return true;
+ }
+
+ /* package */ synchronized void release() {
+ if (SQLiteDebug.DEBUG_ACTIVE_CURSOR_FINALIZATION) {
+ Log.v(TAG, "Released DbObj (id#" + nStatement + ") back to DB cache");
+ }
+ mInUse = false;
+ }
+
+ /**
+ * Make sure that the native resource is cleaned up.
+ */
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ if (nStatement == 0) return;
+ // finalizer should NEVER get called
+ if (SQLiteDebug.DEBUG_ACTIVE_CURSOR_FINALIZATION) {
+ Log.v(TAG, "** warning ** Finalized DbObj (id#" + nStatement + ")");
+ }
+ int len = mSqlStmt.length();
+ Log.w(TAG, "Releasing statement in a finalizer. Please ensure " +
+ "that you explicitly call close() on your cursor: " +
+ mSqlStmt.substring(0, (len > 100) ? 100 : len), mStackTrace);
+ releaseSqlStatement();
+ } finally {
+ super.finalize();
+ }
+ }
+
+ /**
+ * Compiles SQL into a SQLite program.
+ *
+ *
The database lock must be held when calling this method.
+ * @param sql The SQL to compile.
+ */
+ private final native void native_compile(String sql);
+ private final native void native_finalize();
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteConstraintException.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteConstraintException.java
new file mode 100644
index 0000000..d9d548f
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteConstraintException.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher.database;
+
+/**
+ * An exception that indicates that an integrity constraint was violated.
+ */
+public class SQLiteConstraintException extends SQLiteException {
+ public SQLiteConstraintException() {}
+
+ public SQLiteConstraintException(String error) {
+ super(error);
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteContentHelper.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteContentHelper.java
new file mode 100644
index 0000000..3300b61
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteContentHelper.java
@@ -0,0 +1,106 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher.database;
+import net.sqlcipher.*;
+
+import android.content.res.AssetFileDescriptor;
+import android.os.MemoryFile;
+
+import android.database.Cursor;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+
+/**
+ * Some helper functions for using SQLite database to implement content providers.
+ *
+ * @hide
+ */
+public class SQLiteContentHelper {
+
+ /**
+ * Runs an SQLite query and returns an AssetFileDescriptor for the
+ * blob in column 0 of the first row. If the first column does
+ * not contain a blob, an unspecified exception is thrown.
+ *
+ * @param db Handle to a readable database.
+ * @param sql SQL query, possibly with query arguments.
+ * @param selectionArgs Query argument values, or {@code null} for no argument.
+ * @return If no exception is thrown, a non-null AssetFileDescriptor is returned.
+ * @throws FileNotFoundException If the query returns no results or the
+ * value of column 0 is NULL, or if there is an error creating the
+ * asset file descriptor.
+ */
+ public static AssetFileDescriptor getBlobColumnAsAssetFile(SQLiteDatabase db, String sql,
+ String[] selectionArgs) throws FileNotFoundException {
+ android.os.ParcelFileDescriptor fd = null;
+
+ try {
+ MemoryFile file = simpleQueryForBlobMemoryFile(db, sql, selectionArgs);
+ if (file == null) {
+ throw new FileNotFoundException("No results.");
+ }
+ Class c = file.getClass();
+ try {
+ java.lang.reflect.Method m = c.getDeclaredMethod("getParcelFileDescriptor");
+ m.setAccessible(true);
+ fd = (android.os.ParcelFileDescriptor)m.invoke(file);
+ } catch (Exception e) {
+ android.util.Log.i("SQLiteContentHelper", "SQLiteCursor.java: " + e);
+ }
+ AssetFileDescriptor afd = new AssetFileDescriptor(fd, 0, file.length());
+ return afd;
+ } catch (IOException ex) {
+ throw new FileNotFoundException(ex.toString());
+ }
+ }
+
+ /**
+ * Runs an SQLite query and returns a MemoryFile for the
+ * blob in column 0 of the first row. If the first column does
+ * not contain a blob, an unspecified exception is thrown.
+ *
+ * @return A memory file, or {@code null} if the query returns no results
+ * or the value column 0 is NULL.
+ * @throws IOException If there is an error creating the memory file.
+ */
+ // TODO: make this native and use the SQLite blob API to reduce copying
+ private static MemoryFile simpleQueryForBlobMemoryFile(SQLiteDatabase db, String sql,
+ String[] selectionArgs) throws IOException {
+ Cursor cursor = db.rawQuery(sql, selectionArgs);
+ if (cursor == null) {
+ return null;
+ }
+ try {
+ if (!cursor.moveToFirst()) {
+ return null;
+ }
+ byte[] bytes = cursor.getBlob(0);
+ if (bytes == null) {
+ return null;
+ }
+ MemoryFile file = new MemoryFile(null, bytes.length);
+ file.writeBytes(bytes, 0, 0, bytes.length);
+
+ // file.deactivate();
+ return file;
+ } finally {
+ cursor.close();
+ }
+ }
+
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteCursor.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteCursor.java
new file mode 100644
index 0000000..258699b
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteCursor.java
@@ -0,0 +1,700 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher.database;
+
+import net.sqlcipher.AbstractWindowedCursor;
+import net.sqlcipher.BuildConfig;
+import net.sqlcipher.CursorWindow;
+import net.sqlcipher.SQLException;
+
+import java.lang.ref.WeakReference;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.concurrent.locks.ReentrantLock;
+
+import android.database.CharArrayBuffer;
+import android.database.DataSetObserver;
+import android.os.Handler;
+import android.os.Message;
+import android.os.Process;
+import android.text.TextUtils;
+import android.util.Config;
+import android.util.Log;
+
+/**
+ * A Cursor implementation that exposes results from a query on a
+ * {@link SQLiteDatabase}.
+ *
+ * SQLiteCursor is not internally synchronized so code using a SQLiteCursor from multiple
+ * threads should perform its own synchronization when using the SQLiteCursor.
+ */
+public class SQLiteCursor extends AbstractWindowedCursor {
+ static final String TAG = "Cursor";
+ static final int NO_COUNT = -1;
+
+ /** The name of the table to edit */
+ private String mEditTable;
+
+ /** The names of the columns in the rows */
+ private String[] mColumns;
+
+ /** The query object for the cursor */
+ private SQLiteQuery mQuery;
+
+ /** The database the cursor was created from */
+ private SQLiteDatabase mDatabase;
+
+ /** The compiled query this cursor came from */
+ private SQLiteCursorDriver mDriver;
+
+ /** The number of rows in the cursor */
+ private int mCount = NO_COUNT;
+
+ private int mCursorWindowCapacity = 0;
+
+ private boolean fillWindowForwardOnly = false;
+
+ /** A mapping of column names to column indices, to speed up lookups */
+ private Map mColumnNameMap;
+
+ /** Used to find out where a cursor was allocated in case it never got released. */
+ private Throwable mStackTrace;
+
+ /**
+ * mMaxRead is the max items that each cursor window reads
+ * default to a very high value
+ */
+ private int mMaxRead = Integer.MAX_VALUE;
+ private int mInitialRead = Integer.MAX_VALUE;
+ private int mCursorState = 0;
+ private ReentrantLock mLock = null;
+ private boolean mPendingData = false;
+
+ public void setFillWindowForwardOnly(boolean value) {
+ fillWindowForwardOnly = value;
+ }
+
+ /**
+ * support for a cursor variant that doesn't always read all results
+ * initialRead is the initial number of items that cursor window reads
+ * if query contains more than this number of items, a thread will be
+ * created and handle the left over items so that caller can show
+ * results as soon as possible
+ * @param initialRead initial number of items that cursor read
+ * @param maxRead leftover items read at maxRead items per time
+ * @hide
+ */
+ public void setLoadStyle(int initialRead, int maxRead) {
+ mMaxRead = maxRead;
+ mInitialRead = initialRead;
+ mLock = new ReentrantLock(true);
+ }
+
+ private void queryThreadLock() {
+ if (mLock != null) {
+ mLock.lock();
+ }
+ }
+
+ private void queryThreadUnlock() {
+ if (mLock != null) {
+ mLock.unlock();
+ }
+ }
+
+
+ /**
+ * @hide
+ */
+ final private class QueryThread implements Runnable {
+ private final int mThreadState;
+ QueryThread(int version) {
+ mThreadState = version;
+ }
+ private void sendMessage() {
+ if (mNotificationHandler != null) {
+ mNotificationHandler.sendEmptyMessage(1);
+ mPendingData = false;
+ } else {
+ mPendingData = true;
+ }
+
+ }
+ public void run() {
+ // use cached mWindow, to avoid get null mWindow
+ CursorWindow cw = mWindow;
+ Process.setThreadPriority(Process.myTid(), Process.THREAD_PRIORITY_BACKGROUND);
+ // the cursor's state doesn't change
+ while (true) {
+ if(mLock == null){
+ mLock = new ReentrantLock(true);
+ }
+ mLock.lock();
+ if (mCursorState != mThreadState) {
+ mLock.unlock();
+ break;
+ }
+ try {
+ int count = mQuery.fillWindow(cw, mMaxRead, mCount);
+ // return -1 means not finished
+ if (count != 0) {
+ if (count == NO_COUNT){
+ mCount += mMaxRead;
+ sendMessage();
+ } else {
+ mCount = count;
+ sendMessage();
+ break;
+ }
+ } else {
+ break;
+ }
+ } catch (Exception e) {
+ // end the tread when the cursor is close
+ break;
+ } finally {
+ mLock.unlock();
+ }
+ }
+ }
+ }
+
+
+ /**
+ * @hide
+ */
+ protected static class MainThreadNotificationHandler extends Handler {
+
+ private final WeakReference wrappedCursor;
+
+ MainThreadNotificationHandler(SQLiteCursor cursor) {
+ wrappedCursor = new WeakReference(cursor);
+ }
+
+ public void handleMessage(Message msg) {
+ SQLiteCursor cursor = wrappedCursor.get();
+ if(cursor != null){
+ cursor.notifyDataSetChange();
+ }
+ }
+ }
+
+ /**
+ * @hide
+ */
+ protected MainThreadNotificationHandler mNotificationHandler;
+
+ public void registerDataSetObserver(DataSetObserver observer) {
+ super.registerDataSetObserver(observer);
+ if ((Integer.MAX_VALUE != mMaxRead || Integer.MAX_VALUE != mInitialRead) &&
+ mNotificationHandler == null) {
+ queryThreadLock();
+ try {
+ mNotificationHandler = new MainThreadNotificationHandler(this);
+ if (mPendingData) {
+ notifyDataSetChange();
+ mPendingData = false;
+ }
+ } finally {
+ queryThreadUnlock();
+ }
+ }
+
+ }
+
+ /**
+ * Execute a query and provide access to its result set through a Cursor
+ * interface. For a query such as: {@code SELECT name, birth, phone FROM
+ * myTable WHERE ... LIMIT 1,20 ORDER BY...} the column names (name, birth,
+ * phone) would be in the projection argument and everything from
+ * {@code FROM} onward would be in the params argument. This constructor
+ * has package scope.
+ *
+ * @param db a reference to a Database object that is already constructed
+ * and opened
+ * @param editTable the name of the table used for this query
+ * @param query the rest of the query terms
+ * cursor is finalized
+ */
+ public SQLiteCursor(SQLiteDatabase db, SQLiteCursorDriver driver,
+ String editTable, SQLiteQuery query) {
+ // The AbstractCursor constructor needs to do some setup.
+ super();
+ mStackTrace = new DatabaseObjectNotClosedException().fillInStackTrace();
+ mDatabase = db;
+ mDriver = driver;
+ mEditTable = editTable;
+ mColumnNameMap = null;
+ mQuery = query;
+
+ try {
+ db.lock();
+
+ // Setup the list of columns
+ int columnCount = mQuery.columnCountLocked();
+ mColumns = new String[columnCount];
+
+ // Read in all column names
+ for (int i = 0; i < columnCount; i++) {
+ String columnName = mQuery.columnNameLocked(i);
+ mColumns[i] = columnName;
+ if(BuildConfig.DEBUG){
+ Log.v("DatabaseWindow", "mColumns[" + i + "] is "
+ + mColumns[i]);
+ }
+
+ // Make note of the row ID column index for quick access to it
+ if ("_id".equals(columnName)) {
+ mRowIdColumnIndex = i;
+ }
+ }
+ } finally {
+ db.unlock();
+ }
+ }
+
+ /**
+ * @return the SQLiteDatabase that this cursor is associated with.
+ */
+ public SQLiteDatabase getDatabase() {
+ return mDatabase;
+ }
+
+ @Override
+ public boolean onMove(int oldPosition, int newPosition) {
+ // Make sure the row at newPosition is present in the window
+ if (mWindow == null || newPosition < mWindow.getStartPosition() ||
+ newPosition >= (mWindow.getStartPosition() + mWindow.getNumRows())) {
+ fillWindow(newPosition);
+ }
+
+ return true;
+ }
+
+ @Override
+ public int getCount() {
+ if (mCount == NO_COUNT) {
+ fillWindow(0);
+ }
+ return mCount;
+ }
+
+ private void fillWindow (int requiredPos) {
+ int startPos = 0;
+ if (mWindow == null) {
+ // If there isn't a window set already it will only be accessed locally
+ mWindow = new CursorWindow(true /* the window is local only */);
+ } else {
+ mCursorState++;
+ queryThreadLock();
+ try {
+ mWindow.clear();
+ } finally {
+ queryThreadUnlock();
+ }
+ }
+ if(fillWindowForwardOnly) {
+ startPos = requiredPos;
+ } else {
+ startPos = mCount == NO_COUNT
+ ? cursorPickFillWindowStartPosition(requiredPos, 0)
+ : cursorPickFillWindowStartPosition(requiredPos, mCursorWindowCapacity);
+ }
+ mWindow.setStartPosition(startPos);
+ mWindow.setRequiredPosition(requiredPos);
+ if(BuildConfig.DEBUG){
+ Log.v(TAG, String.format("Filling cursor window with start position:%d required position:%d",
+ startPos, requiredPos));
+ }
+ mCount = mQuery.fillWindow(mWindow, mInitialRead, 0);
+ if(mCursorWindowCapacity == 0) {
+ mCursorWindowCapacity = mWindow.getNumRows();
+ }
+ // return -1 means not finished
+ if (mCount == NO_COUNT){
+ mCount = startPos + mInitialRead;
+ Thread t = new Thread(new QueryThread(mCursorState), "query thread");
+ t.start();
+ }
+ }
+
+ @Override
+ public int getColumnIndex(String columnName) {
+ // Create mColumnNameMap on demand
+ if (mColumnNameMap == null) {
+ String[] columns = mColumns;
+ int columnCount = columns.length;
+ HashMap map = new HashMap(columnCount, 1);
+ for (int i = 0; i < columnCount; i++) {
+ map.put(columns[i], i);
+ }
+ mColumnNameMap = map;
+ }
+
+ // Hack according to bug 903852
+ final int periodIndex = columnName.lastIndexOf('.');
+ if (periodIndex != -1) {
+ Exception e = new Exception();
+ if(BuildConfig.DEBUG){
+ Log.e(TAG, "requesting column name with table name -- " + columnName, e);
+ columnName = columnName.substring(periodIndex + 1);
+ }
+ }
+
+ Integer i = mColumnNameMap.get(columnName);
+ if (i != null) {
+ return i.intValue();
+ } else {
+ return -1;
+ }
+ }
+
+ /**
+ * @hide
+ * @deprecated
+ */
+ // @Override
+ public boolean deleteRow() {
+ checkPosition();
+
+ // Only allow deletes if there is an ID column, and the ID has been read from it
+ if (mRowIdColumnIndex == -1 || mCurrentRowID == null) {
+ if(BuildConfig.DEBUG){
+ Log.e(TAG,
+ "Could not delete row because either the row ID column is not available or it" +
+ "has not been read.");
+ }
+ return false;
+ }
+
+ boolean success;
+
+ /*
+ * Ensure we don't change the state of the database when another
+ * thread is holding the database lock. requery() and moveTo() are also
+ * synchronized here to make sure they get the state of the database
+ * immediately following the DELETE.
+ */
+ mDatabase.lock();
+ try {
+ try {
+ mDatabase.delete(mEditTable, mColumns[mRowIdColumnIndex] + "=?",
+ new String[] {mCurrentRowID.toString()});
+ success = true;
+ } catch (SQLException e) {
+ success = false;
+ }
+
+ int pos = mPos;
+ requery();
+
+ /*
+ * Ensure proper cursor state. Note that mCurrentRowID changes
+ * in this call.
+ */
+ moveToPosition(pos);
+ } finally {
+ mDatabase.unlock();
+ }
+
+ if (success) {
+ onChange(true);
+ return true;
+ } else {
+ return false;
+ }
+ }
+
+ @Override
+ public String[] getColumnNames() {
+ return mColumns;
+ }
+
+ /**
+ * @hide
+ * @deprecated
+ */
+ // @Override
+ public boolean supportsUpdates() {
+ // return super.supportsUpdates() && !TextUtils.isEmpty(mEditTable);
+ return !TextUtils.isEmpty(mEditTable);
+ }
+
+ /**
+ * @hide
+ * @deprecated
+ */
+ // @Override
+ public boolean commitUpdates(Map extends Long,
+ ? extends Map> additionalValues) {
+ if (!supportsUpdates()) {
+ if(BuildConfig.DEBUG){
+ Log.e(TAG, "commitUpdates not supported on this cursor, did you "
+ + "include the _id column?");
+ }
+ return false;
+ }
+
+ /*
+ * Prevent other threads from changing the updated rows while they're
+ * being processed here.
+ */
+ synchronized (mUpdatedRows) {
+ if (additionalValues != null) {
+ mUpdatedRows.putAll(additionalValues);
+ }
+
+ if (mUpdatedRows.size() == 0) {
+ return true;
+ }
+
+ /*
+ * Prevent other threads from changing the database state while
+ * we process the updated rows, and prevents us from changing the
+ * database behind the back of another thread.
+ */
+ mDatabase.beginTransaction();
+ try {
+ StringBuilder sql = new StringBuilder(128);
+
+ // For each row that has been updated
+ for (Map.Entry> rowEntry :
+ mUpdatedRows.entrySet()) {
+ Map values = rowEntry.getValue();
+ Long rowIdObj = rowEntry.getKey();
+
+ if (rowIdObj == null || values == null) {
+ throw new IllegalStateException("null rowId or values found! rowId = "
+ + rowIdObj + ", values = " + values);
+ }
+
+ if (values.size() == 0) {
+ continue;
+ }
+
+ long rowId = rowIdObj.longValue();
+
+ Iterator> valuesIter =
+ values.entrySet().iterator();
+
+ sql.setLength(0);
+ sql.append("UPDATE " + mEditTable + " SET ");
+
+ // For each column value that has been updated
+ Object[] bindings = new Object[values.size()];
+ int i = 0;
+ while (valuesIter.hasNext()) {
+ Map.Entry entry = valuesIter.next();
+ sql.append(entry.getKey());
+ sql.append("=?");
+ bindings[i] = entry.getValue();
+ if (valuesIter.hasNext()) {
+ sql.append(", ");
+ }
+ i++;
+ }
+
+ sql.append(" WHERE " + mColumns[mRowIdColumnIndex]
+ + '=' + rowId);
+ sql.append(';');
+ mDatabase.execSQL(sql.toString(), bindings);
+ mDatabase.rowUpdated(mEditTable, rowId);
+ }
+ mDatabase.setTransactionSuccessful();
+ } finally {
+ mDatabase.endTransaction();
+ }
+
+ mUpdatedRows.clear();
+ }
+
+ // Let any change observers know about the update
+ onChange(true);
+
+ return true;
+ }
+
+ private void deactivateCommon() {
+ if(BuildConfig.DEBUG) Log.v(TAG, "<<< Releasing cursor " + this);
+ mCursorState = 0;
+ if (mWindow != null) {
+ mWindow.close();
+ mWindow = null;
+ }
+ if(BuildConfig.DEBUG) Log.v("DatabaseWindow", "closing window in release()");
+ }
+
+ @Override
+ public void deactivate() {
+ super.deactivate();
+ deactivateCommon();
+ mDriver.cursorDeactivated();
+ }
+
+ @Override
+ public void close() {
+ super.close();
+ deactivateCommon();
+ mQuery.close();
+ mDriver.cursorClosed();
+ }
+
+ @Override
+ public boolean requery() {
+ if (isClosed()) {
+ return false;
+ }
+ long timeStart = 0;
+ if (Config.LOGV) {
+ timeStart = System.currentTimeMillis();
+ }
+ /*
+ * Synchronize on the database lock to ensure that mCount matches the
+ * results of mQuery.requery().
+ */
+ mDatabase.lock();
+ try {
+ if (mWindow != null) {
+ mWindow.clear();
+ }
+ mPos = -1;
+ // This one will recreate the temp table, and get its count
+ mDriver.cursorRequeried(this);
+ mCount = NO_COUNT;
+ mCursorState++;
+ queryThreadLock();
+ try {
+ mQuery.requery();
+ } finally {
+ queryThreadUnlock();
+ }
+ } finally {
+ mDatabase.unlock();
+ }
+
+ if(BuildConfig.DEBUG){
+ Log.v("DatabaseWindow", "closing window in requery()");
+ Log.v(TAG, "--- Requery()ed cursor " + this + ": " + mQuery);
+ }
+
+ boolean result = super.requery();
+ if(BuildConfig.DEBUG){
+ long timeEnd = System.currentTimeMillis();
+ Log.v(TAG, "requery (" + (timeEnd - timeStart) + " ms): " + mDriver.toString());
+ }
+ return result;
+ }
+
+ @Override
+ public void setWindow(CursorWindow window) {
+ if (mWindow != null) {
+ mCursorState++;
+ queryThreadLock();
+ try {
+ mWindow.close();
+ } finally {
+ queryThreadUnlock();
+ }
+ mCount = NO_COUNT;
+ }
+ mWindow = window;
+ }
+
+ /**
+ * Changes the selection arguments. The new values take effect after a call to requery().
+ */
+ public void setSelectionArguments(String[] selectionArgs) {
+ mDriver.setBindArguments(selectionArgs);
+ }
+
+ /**
+ * Release the native resources, if they haven't been released yet.
+ */
+ @Override
+ protected void finalize() {
+ try {
+ // if the cursor hasn't been closed yet, close it first
+ if (mWindow != null) {
+ int len = mQuery.mSql.length();
+ if(BuildConfig.DEBUG){
+ Log.e(TAG, "Finalizing a Cursor that has not been deactivated or closed. " +
+ "database = " + mDatabase.getPath() + ", table = " + mEditTable +
+ ", query = " + mQuery.mSql.substring(0, (len > 100) ? 100 : len),
+ mStackTrace);
+ }
+ close();
+ SQLiteDebug.notifyActiveCursorFinalized();
+ } else {
+ if(BuildConfig.DEBUG) {
+ Log.v(TAG, "Finalizing cursor on database = " + mDatabase.getPath() +
+ ", table = " + mEditTable + ", query = " + mQuery.mSql);
+ }
+ }
+ } finally {
+ super.finalize();
+ }
+ }
+
+
+
+ @Override
+ public void fillWindow(int requiredPos, android.database.CursorWindow window) {
+ int startPos = 0;
+ if (mWindow == null) {
+ // If there isn't a window set already it will only be accessed locally
+ mWindow = new CursorWindow(true /* the window is local only */);
+ } else {
+ mCursorState++;
+ queryThreadLock();
+ try {
+ mWindow.clear();
+ } finally {
+ queryThreadUnlock();
+ }
+ }
+ if(fillWindowForwardOnly) {
+ startPos = requiredPos;
+ } else {
+ startPos = mCount == NO_COUNT
+ ? cursorPickFillWindowStartPosition(requiredPos, 0)
+ : cursorPickFillWindowStartPosition(requiredPos, mCursorWindowCapacity);
+ }
+ mWindow.setStartPosition(startPos);
+ mWindow.setRequiredPosition(requiredPos);
+ if(BuildConfig.DEBUG) {
+ Log.v(TAG, String.format("Filling cursor window with start position:%d required position:%d",
+ startPos, requiredPos));
+ }
+ mCount = mQuery.fillWindow(mWindow, mInitialRead, 0);
+ if(mCursorWindowCapacity == 0) {
+ mCursorWindowCapacity = mWindow.getNumRows();
+ }
+ // return -1 means not finished
+ if (mCount == NO_COUNT){
+ mCount = startPos + mInitialRead;
+ Thread t = new Thread(new QueryThread(mCursorState), "query thread");
+ t.start();
+ }
+ }
+
+ public int cursorPickFillWindowStartPosition(
+ int cursorPosition, int cursorWindowCapacity) {
+ return Math.max(cursorPosition - cursorWindowCapacity / 3, 0);
+ }
+
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteCursorDriver.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteCursorDriver.java
new file mode 100644
index 0000000..1ea66ab
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteCursorDriver.java
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2007 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher.database;
+
+import net.sqlcipher.database.SQLiteDatabase.CursorFactory;
+import net.sqlcipher.*;
+
+/**
+ * A driver for SQLiteCursors that is used to create them and gets notified
+ * by the cursors it creates on significant events in their lifetimes.
+ */
+public interface SQLiteCursorDriver {
+ /**
+ * Executes the query returning a Cursor over the result set.
+ *
+ * @param factory The CursorFactory to use when creating the Cursors, or
+ * null if standard SQLiteCursors should be returned.
+ * @return a Cursor over the result set
+ */
+ Cursor query(CursorFactory factory, String[] bindArgs);
+
+ /**
+ * Called by a SQLiteCursor when it is released.
+ */
+ void cursorDeactivated();
+
+ /**
+ * Called by a SQLiteCursor when it is requeryed.
+ *
+ * @return The new count value.
+ */
+ void cursorRequeried(android.database.Cursor cursor);
+
+ /**
+ * Called by a SQLiteCursor when it it closed to destroy this object as well.
+ */
+ void cursorClosed();
+
+ /**
+ * Set new bind arguments. These will take effect in cursorRequeried().
+ * @param bindArgs the new arguments
+ */
+ public void setBindArguments(String[] bindArgs);
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteDatabase.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteDatabase.java
new file mode 100644
index 0000000..69a7392
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteDatabase.java
@@ -0,0 +1,3269 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher.database;
+
+import net.sqlcipher.BuildConfig;
+import net.sqlcipher.Cursor;
+import net.sqlcipher.CrossProcessCursorWrapper;
+import net.sqlcipher.DatabaseUtils;
+import net.sqlcipher.DatabaseErrorHandler;
+import net.sqlcipher.DefaultDatabaseErrorHandler;
+import net.sqlcipher.SQLException;
+import net.sqlcipher.database.SQLiteDebug.DbStats;
+import net.sqlcipher.database.SQLiteDatabaseHook;
+import net.sqlcipher.database.SQLiteQueryStats;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.UnsupportedEncodingException;
+import java.nio.ByteBuffer;
+import java.nio.CharBuffer;
+import java.nio.charset.Charset;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Set;
+import java.util.WeakHashMap;
+import java.util.concurrent.locks.ReentrantLock;
+import java.util.regex.Pattern;
+import java.util.zip.ZipInputStream;
+
+import android.content.ContentValues;
+
+import android.content.Context;
+
+import android.os.CancellationSignal;
+import android.os.Debug;
+import android.os.SystemClock;
+import android.text.TextUtils;
+import android.util.Config;
+import android.util.Log;
+import android.util.Pair;
+
+import java.io.UnsupportedEncodingException;
+import androidx.sqlite.db.SupportSQLiteDatabase;
+import androidx.sqlite.db.SupportSQLiteQuery;
+
+/**
+ * Exposes methods to manage a SQLCipher database.
+ *
SQLiteDatabase has methods to create, delete, execute SQL commands, and
+ * perform other common database management tasks.
+ *
A call to loadLibs(…) should occur before attempting to
+ * create or open a database connection.
+ *
Database names must be unique within an application, not across all
+ * applications.
+ *
+ */
+public class SQLiteDatabase extends SQLiteClosable implements
+ SupportSQLiteDatabase {
+ private static final String TAG = "Database";
+ private static final int EVENT_DB_OPERATION = 52000;
+ private static final int EVENT_DB_CORRUPT = 75004;
+ private static final String KEY_ENCODING = "UTF-8";
+
+ private enum SQLiteDatabaseTransactionType {
+ Deferred,
+ Immediate,
+ Exclusive,
+ }
+
+ /**
+ * The version number of the SQLCipher for Android Java client library.
+ */
+ public static final String SQLCIPHER_ANDROID_VERSION = BuildConfig.VERSION_NAME;
+
+ // Stores reference to all databases opened in the current process.
+ // (The referent Object is not used at this time.)
+ // INVARIANT: Guarded by sActiveDatabases.
+ private static WeakHashMap sActiveDatabases =
+ new WeakHashMap();
+
+ public int status(int operation, boolean reset){
+ return native_status(operation, reset);
+ }
+
+ /**
+ * Change the password of the open database using sqlite3_rekey().
+ *
+ * @param password new database password
+ *
+ * @throws SQLiteException if there is an issue changing the password internally
+ * OR if the database is not open
+ *
+ * FUTURE @todo throw IllegalStateException if the database is not open and
+ * update the test suite
+ */
+ public void changePassword(String password) throws SQLiteException {
+ /* safeguard: */
+ if (!isOpen()) {
+ throw new SQLiteException("database not open");
+ }
+ if (password != null) {
+ byte[] keyMaterial = getBytes(password.toCharArray());
+ rekey(keyMaterial);
+ Arrays.fill(keyMaterial, (byte) 0);
+ }
+ }
+
+ /**
+ * Change the password of the open database using sqlite3_rekey().
+ *
+ * @param password new database password (char array)
+ *
+ * @throws SQLiteException if there is an issue changing the password internally
+ * OR if the database is not open
+ *
+ * FUTURE @todo throw IllegalStateException if the database is not open and
+ * update the test suite
+ */
+ public void changePassword(char[] password) throws SQLiteException {
+ /* safeguard: */
+ if (!isOpen()) {
+ throw new SQLiteException("database not open");
+ }
+ if (password != null) {
+ byte[] keyMaterial = getBytes(password);
+ rekey(keyMaterial);
+ Arrays.fill(keyMaterial, (byte) 0);
+ }
+ }
+
+ private static void loadICUData(Context context, File workingDir) {
+ OutputStream out = null;
+ ZipInputStream in = null;
+ File icuDir = new File(workingDir, "icu");
+ File icuDataFile = new File(icuDir, "icudt46l.dat");
+ try {
+ if(!icuDir.exists()) icuDir.mkdirs();
+ if(!icuDataFile.exists()) {
+ in = new ZipInputStream(context.getAssets().open("icudt46l.zip"));
+ in.getNextEntry();
+ out = new FileOutputStream(icuDataFile);
+ byte[] buf = new byte[1024];
+ int len;
+ while ((len = in.read(buf)) > 0) {
+ out.write(buf, 0, len);
+ }
+ }
+ }
+ catch (Exception ex) {
+ if(BuildConfig.DEBUG){
+ Log.e(TAG, "Error copying icu dat file", ex);
+ }
+ if(icuDataFile.exists()){
+ icuDataFile.delete();
+ }
+ throw new RuntimeException(ex);
+ }
+ finally {
+ try {
+ if(in != null){
+ in.close();
+ }
+ if(out != null){
+ out.flush();
+ out.close();
+ }
+ } catch (IOException ioe){
+ if(BuildConfig.DEBUG){
+ Log.e(TAG, "Error in closing streams IO streams after expanding ICU dat file", ioe);
+ }
+ throw new RuntimeException(ioe);
+ }
+ }
+ }
+
+ /**
+ * Implement this interface to provide custom strategy for loading jni libraries.
+ */
+ public interface LibraryLoader {
+ /**
+ * Load jni libraries by given names.
+ * Straightforward implementation will be calling {@link System#loadLibrary(String name)}
+ * for every provided library name.
+ *
+ * @param libNames library names that sqlcipher need to load
+ */
+ void loadLibraries(String... libNames);
+ }
+
+ /**
+ * Loads the native SQLCipher library into the application process.
+ */
+ public static synchronized void loadLibs (Context context) {
+ loadLibs(context, context.getFilesDir());
+ }
+
+ /**
+ * Loads the native SQLCipher library into the application process.
+ */
+ public static synchronized void loadLibs (Context context, File workingDir) {
+ loadLibs(context, workingDir, new LibraryLoader() {
+ @Override
+ public void loadLibraries(String... libNames) {
+ for (String libName : libNames) {
+ System.loadLibrary(libName);
+ }
+ }
+ });
+ }
+
+ /**
+ * Loads the native SQLCipher library into the application process.
+ */
+ public static synchronized void loadLibs(Context context, LibraryLoader libraryLoader) {
+ loadLibs(context, context.getFilesDir(), libraryLoader);
+ }
+
+ /**
+ * Loads the native SQLCipher library into the application process.
+ */
+ public static synchronized void loadLibs (Context context, File workingDir, LibraryLoader libraryLoader) {
+ libraryLoader.loadLibraries("sqlcipher");
+
+ // System.loadLibrary("stlport_shared");
+ // System.loadLibrary("sqlcipher_android");
+ // System.loadLibrary("database_sqlcipher");
+
+ // boolean systemICUFileExists = new File("/system/usr/icu/icudt46l.dat").exists();
+
+ // String icuRootPath = systemICUFileExists ? "/system/usr" : workingDir.getAbsolutePath();
+ // setICURoot(icuRootPath);
+ // if(!systemICUFileExists){
+ // loadICUData(context, workingDir);
+ // }
+ }
+
+ /**
+ * Algorithms used in ON CONFLICT clause
+ * http://www.sqlite.org/lang_conflict.html
+ */
+ /**
+ * When a constraint violation occurs, an immediate ROLLBACK occurs,
+ * thus ending the current transaction, and the command aborts with a
+ * return code of SQLITE_CONSTRAINT. If no transaction is active
+ * (other than the implied transaction that is created on every command)
+ * then this algorithm works the same as ABORT.
+ */
+ public static final int CONFLICT_ROLLBACK = 1;
+
+ /**
+ * When a constraint violation occurs,no ROLLBACK is executed
+ * so changes from prior commands within the same transaction
+ * are preserved. This is the default behavior.
+ */
+ public static final int CONFLICT_ABORT = 2;
+
+ /**
+ * When a constraint violation occurs, the command aborts with a return
+ * code SQLITE_CONSTRAINT. But any changes to the database that
+ * the command made prior to encountering the constraint violation
+ * are preserved and are not backed out.
+ */
+ public static final int CONFLICT_FAIL = 3;
+
+ /**
+ * When a constraint violation occurs, the one row that contains
+ * the constraint violation is not inserted or changed.
+ * But the command continues executing normally. Other rows before and
+ * after the row that contained the constraint violation continue to be
+ * inserted or updated normally. No error is returned.
+ */
+ public static final int CONFLICT_IGNORE = 4;
+
+ /**
+ * When a UNIQUE constraint violation occurs, the pre-existing rows that
+ * are causing the constraint violation are removed prior to inserting
+ * or updating the current row. Thus the insert or update always occurs.
+ * The command continues executing normally. No error is returned.
+ * If a NOT NULL constraint violation occurs, the NULL value is replaced
+ * by the default value for that column. If the column has no default
+ * value, then the ABORT algorithm is used. If a CHECK constraint
+ * violation occurs then the IGNORE algorithm is used. When this conflict
+ * resolution strategy deletes rows in order to satisfy a constraint,
+ * it does not invoke delete triggers on those rows.
+ * This behavior might change in a future release.
+ */
+ public static final int CONFLICT_REPLACE = 5;
+
+ /**
+ * use the following when no conflict action is specified.
+ */
+ public static final int CONFLICT_NONE = 0;
+ private static final String[] CONFLICT_VALUES = new String[]
+ {"", " OR ROLLBACK ", " OR ABORT ", " OR FAIL ", " OR IGNORE ", " OR REPLACE "};
+
+ /**
+ * Maximum Length Of A LIKE Or GLOB Pattern
+ * The pattern matching algorithm used in the default LIKE and GLOB implementation
+ * of SQLite can exhibit O(N^2) performance (where N is the number of characters in
+ * the pattern) for certain pathological cases. To avoid denial-of-service attacks
+ * the length of the LIKE or GLOB pattern is limited to SQLITE_MAX_LIKE_PATTERN_LENGTH bytes.
+ * The default value of this limit is 50000. A modern workstation can evaluate
+ * even a pathological LIKE or GLOB pattern of 50000 bytes relatively quickly.
+ * The denial of service problem only comes into play when the pattern length gets
+ * into millions of bytes. Nevertheless, since most useful LIKE or GLOB patterns
+ * are at most a few dozen bytes in length, paranoid application developers may
+ * want to reduce this parameter to something in the range of a few hundred
+ * if they know that external users are able to generate arbitrary patterns.
+ */
+ public static final int SQLITE_MAX_LIKE_PATTERN_LENGTH = 50000;
+
+ /**
+ * Flag for {@link #openDatabase} to open the database for reading and writing.
+ * If the disk is full, this may fail even before you actually write anything.
+ *
+ * {@more} Note that the value of this flag is 0, so it is the default.
+ */
+ public static final int OPEN_READWRITE = 0x00000000; // update native code if changing
+
+ /**
+ * Flag for {@link #openDatabase} to open the database for reading only.
+ * This is the only reliable way to open a database if the disk may be full.
+ */
+ public static final int OPEN_READONLY = 0x00000001; // update native code if changing
+
+ private static final int OPEN_READ_MASK = 0x00000001; // update native code if changing
+
+ /**
+ * Flag for {@link #openDatabase} to open the database without support for localized collators.
+ *
+ * {@more} This causes the collator LOCALIZED not to be created.
+ * You must be consistent when using this flag to use the setting the database was
+ * created with. If this is set, {@link #setLocale} will do nothing.
+ */
+ public static final int NO_LOCALIZED_COLLATORS = 0x00000010; // update native code if changing
+
+ /**
+ * Flag for {@link #openDatabase} to create the database file if it does not already exist.
+ */
+ public static final int CREATE_IF_NECESSARY = 0x10000000; // update native code if changing
+
+ /**
+ * SQLite memory database name
+ */
+ public static final String MEMORY = ":memory:";
+
+ /**
+ * Indicates whether the most-recently started transaction has been marked as successful.
+ */
+ private boolean mInnerTransactionIsSuccessful;
+
+ /**
+ * Valid during the life of a transaction, and indicates whether the entire transaction (the
+ * outer one and all of the inner ones) so far has been successful.
+ */
+ private boolean mTransactionIsSuccessful;
+
+ /**
+ * Valid during the life of a transaction.
+ */
+ private SQLiteTransactionListener mTransactionListener;
+
+ /** Synchronize on this when accessing the database */
+ private final ReentrantLock mLock = new ReentrantLock(true);
+
+ private long mLockAcquiredWallTime = 0L;
+ private long mLockAcquiredThreadTime = 0L;
+
+ // limit the frequency of complaints about each database to one within 20 sec
+ // unless run command adb shell setprop log.tag.Database VERBOSE
+ private static final int LOCK_WARNING_WINDOW_IN_MS = 20000;
+ /** If the lock is held this long then a warning will be printed when it is released. */
+ private static final int LOCK_ACQUIRED_WARNING_TIME_IN_MS = 300;
+ private static final int LOCK_ACQUIRED_WARNING_THREAD_TIME_IN_MS = 100;
+ private static final int LOCK_ACQUIRED_WARNING_TIME_IN_MS_ALWAYS_PRINT = 2000;
+
+ private static final int SLEEP_AFTER_YIELD_QUANTUM = 1000;
+
+ // The pattern we remove from database filenames before
+ // potentially logging them.
+ private static final Pattern EMAIL_IN_DB_PATTERN = Pattern.compile("[\\w\\.\\-]+@[\\w\\.\\-]+");
+
+ private long mLastLockMessageTime = 0L;
+
+ // Things related to query logging/sampling for debugging
+ // slow/frequent queries during development. Always log queries
+ // which take (by default) 500ms+; shorter queries are sampled
+ // accordingly. Commit statements, which are typically slow, are
+ // logged together with the most recently executed SQL statement,
+ // for disambiguation. The 500ms value is configurable via a
+ // SystemProperty, but developers actively debugging database I/O
+ // should probably use the regular log tunable,
+ // LOG_SLOW_QUERIES_PROPERTY, defined below.
+ private static int sQueryLogTimeInMillis = 0; // lazily initialized
+ private static final int QUERY_LOG_SQL_LENGTH = 64;
+ private static final String COMMIT_SQL = "COMMIT;";
+ private String mLastSqlStatement = null;
+
+ // String prefix for slow database query EventLog records that show
+ // lock acquistions of the database.
+ /* package */ static final String GET_LOCK_LOG_PREFIX = "GETLOCK:";
+
+ /** Used by native code, do not rename */
+ /* package */ long mNativeHandle = 0;
+
+ /** Used to make temp table names unique */
+ /* package */ int mTempTableSequence = 0;
+
+ /** The path for the database file */
+ private String mPath;
+
+ /** The anonymized path for the database file for logging purposes */
+ private String mPathForLogs = null; // lazily populated
+
+ /** The flags passed to open/create */
+ private int mFlags;
+
+ /** The optional factory to use when creating new Cursors */
+ private CursorFactory mFactory;
+
+ private WeakHashMap mPrograms;
+
+ /**
+ * for each instance of this class, a cache is maintained to store
+ * the compiled query statement ids returned by sqlite database.
+ * key = sql statement with "?" for bind args
+ * value = {@link SQLiteCompiledSql}
+ * If an application opens the database and keeps it open during its entire life, then
+ * there will not be an overhead of compilation of sql statements by sqlite.
+ *
+ * why is this cache NOT static? because sqlite attaches compiledsql statements to the
+ * struct created when {@link SQLiteDatabase#openDatabase(String, CursorFactory, int)} is
+ * invoked.
+ *
+ * this cache has an upper limit of mMaxSqlCacheSize (settable by calling the method
+ * (@link setMaxCacheSize(int)}). its default is 0 - i.e., no caching by default because
+ * most of the apps don't use "?" syntax in their sql, caching is not useful for them.
+ */
+ /* package */ Map mCompiledQueries = new HashMap();
+ /**
+ * @hide
+ */
+ public static final int MAX_SQL_CACHE_SIZE = 250;
+ private int mMaxSqlCacheSize = MAX_SQL_CACHE_SIZE; // max cache size per Database instance
+ private int mCacheFullWarnings;
+ private static final int MAX_WARNINGS_ON_CACHESIZE_CONDITION = 1;
+
+ /** {@link DatabaseErrorHandler} to be used when SQLite returns any of the following errors
+ * Corruption
+ * */
+ private final DatabaseErrorHandler mErrorHandler;
+
+ /** maintain stats about number of cache hits and misses */
+ private int mNumCacheHits;
+ private int mNumCacheMisses;
+
+ /** the following 2 members maintain the time when a database is opened and closed */
+ private String mTimeOpened = null;
+ private String mTimeClosed = null;
+
+ /** Used to find out where this object was created in case it never got closed. */
+ private Throwable mStackTrace = null;
+
+ // System property that enables logging of slow queries. Specify the threshold in ms.
+ private static final String LOG_SLOW_QUERIES_PROPERTY = "db.log.slow_query_threshold";
+ private final int mSlowQueryThreshold;
+
+ /**
+ * @param closable
+ */
+ void addSQLiteClosable(SQLiteClosable closable) {
+ lock();
+ try {
+ mPrograms.put(closable, null);
+ } finally {
+ unlock();
+ }
+ }
+
+ void removeSQLiteClosable(SQLiteClosable closable) {
+ lock();
+ try {
+ mPrograms.remove(closable);
+ } finally {
+ unlock();
+ }
+ }
+
+ @Override
+ protected void onAllReferencesReleased() {
+ if (isOpen()) {
+ if (SQLiteDebug.DEBUG_SQL_CACHE) {
+ mTimeClosed = getTime();
+ }
+ dbclose();
+
+ synchronized (sActiveDatabases) {
+ sActiveDatabases.remove(this);
+ }
+ }
+ }
+
+ /**
+ * Attempts to release memory that SQLite holds but does not require to
+ * operate properly. Typically this memory will come from the page cache.
+ *
+ * @return the number of bytes actually released
+ */
+ static public native int releaseMemory();
+
+ /**
+ * Control whether or not the SQLiteDatabase is made thread-safe by using locks
+ * around critical sections. This is pretty expensive, so if you know that your
+ * DB will only be used by a single thread then you should set this to false.
+ * The default is true.
+ * @param lockingEnabled set to true to enable locks, false otherwise
+ */
+ public void setLockingEnabled(boolean lockingEnabled) {
+ mLockingEnabled = lockingEnabled;
+ }
+
+ /**
+ * If set then the SQLiteDatabase is made thread-safe by using locks
+ * around critical sections
+ */
+ private boolean mLockingEnabled = true;
+
+ /* package */
+ void onCorruption() {
+ if(BuildConfig.DEBUG){
+ Log.e(TAG, "Calling error handler for corrupt database (detected) " + mPath);
+ }
+
+ // NOTE: DefaultDatabaseErrorHandler deletes the corrupt file, EXCEPT for memory database
+ mErrorHandler.onCorruption(this);
+ }
+
+ /**
+ * Locks the database for exclusive access. The database lock must be held when
+ * touch the native sqlite3* object since it is single threaded and uses
+ * a polling lock contention algorithm. The lock is recursive, and may be acquired
+ * multiple times by the same thread. This is a no-op if mLockingEnabled is false.
+ *
+ * @see #unlock()
+ */
+ /* package */ void lock() {
+ if (!mLockingEnabled) return;
+ mLock.lock();
+ if (SQLiteDebug.DEBUG_LOCK_TIME_TRACKING) {
+ if (mLock.getHoldCount() == 1) {
+ // Use elapsed real-time since the CPU may sleep when waiting for IO
+ mLockAcquiredWallTime = SystemClock.elapsedRealtime();
+ mLockAcquiredThreadTime = Debug.threadCpuTimeNanos();
+ }
+ }
+ }
+
+ /**
+ * Locks the database for exclusive access. The database lock must be held when
+ * touch the native sqlite3* object since it is single threaded and uses
+ * a polling lock contention algorithm. The lock is recursive, and may be acquired
+ * multiple times by the same thread.
+ *
+ * @see #unlockForced()
+ */
+ private void lockForced() {
+ mLock.lock();
+ if (SQLiteDebug.DEBUG_LOCK_TIME_TRACKING) {
+ if (mLock.getHoldCount() == 1) {
+ // Use elapsed real-time since the CPU may sleep when waiting for IO
+ mLockAcquiredWallTime = SystemClock.elapsedRealtime();
+ mLockAcquiredThreadTime = Debug.threadCpuTimeNanos();
+ }
+ }
+ }
+
+ /**
+ * Releases the database lock. This is a no-op if mLockingEnabled is false.
+ *
+ * @see #unlock()
+ */
+ /* package */ void unlock() {
+ if (!mLockingEnabled) return;
+ if (SQLiteDebug.DEBUG_LOCK_TIME_TRACKING) {
+ if (mLock.getHoldCount() == 1) {
+ checkLockHoldTime();
+ }
+ }
+ mLock.unlock();
+ }
+
+ /**
+ * Releases the database lock.
+ *
+ * @see #unlockForced()
+ */
+ private void unlockForced() {
+ if (SQLiteDebug.DEBUG_LOCK_TIME_TRACKING) {
+ if (mLock.getHoldCount() == 1) {
+ checkLockHoldTime();
+ }
+ }
+ mLock.unlock();
+ }
+
+ private void checkLockHoldTime() {
+ // Use elapsed real-time since the CPU may sleep when waiting for IO
+ long elapsedTime = SystemClock.elapsedRealtime();
+ long lockedTime = elapsedTime - mLockAcquiredWallTime;
+ if (lockedTime < LOCK_ACQUIRED_WARNING_TIME_IN_MS_ALWAYS_PRINT &&
+ !Log.isLoggable(TAG, Log.VERBOSE) &&
+ (elapsedTime - mLastLockMessageTime) < LOCK_WARNING_WINDOW_IN_MS) {
+ return;
+ }
+ if (lockedTime > LOCK_ACQUIRED_WARNING_TIME_IN_MS) {
+ int threadTime = (int)
+ ((Debug.threadCpuTimeNanos() - mLockAcquiredThreadTime) / 1000000);
+ if (threadTime > LOCK_ACQUIRED_WARNING_THREAD_TIME_IN_MS ||
+ lockedTime > LOCK_ACQUIRED_WARNING_TIME_IN_MS_ALWAYS_PRINT) {
+ mLastLockMessageTime = elapsedTime;
+ String msg = "lock held on " + mPath + " for " + lockedTime + "ms. Thread time was "
+ + threadTime + "ms";
+ if (SQLiteDebug.DEBUG_LOCK_TIME_TRACKING_STACK_TRACE) {
+ if(BuildConfig.DEBUG){
+ Log.d(TAG, msg, new Exception());
+ }
+ } else {
+ if(BuildConfig.DEBUG){
+ Log.d(TAG, msg);
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Performs a PRAGMA integrity_check; command against the database.
+ * @return true if the integrity check is ok, otherwise false
+ */
+ public boolean isDatabaseIntegrityOk() {
+ Pair result = getResultFromPragma("PRAGMA integrity_check;");
+ return result.first ? result.second.equals("ok") : result.first;
+ }
+
+ /**
+ * Returns a list of attached databases including the main database
+ * by executing PRAGMA database_list
+ * @return a list of pairs of database name and filename
+ */
+ public List> getAttachedDbs() {
+ return getAttachedDbs(this);
+ }
+
+ /**
+ * Sets the journal mode of the database to WAL
+ * @return true if successful, false otherwise
+ */
+ public boolean enableWriteAheadLogging() {
+ if(inTransaction()) {
+ String message = "Write Ahead Logging cannot be enabled while in a transaction";
+ throw new IllegalStateException(message);
+ }
+ List> attachedDbs = getAttachedDbs(this);
+ if(attachedDbs != null && attachedDbs.size() > 1) return false;
+ if(isReadOnly() || getPath().equals(MEMORY)) return false;
+ String command = "PRAGMA journal_mode = WAL;";
+ rawExecSQL(command);
+ return true;
+ }
+
+ /**
+ * Sets the journal mode of the database to DELETE (the default mode)
+ */
+ public void disableWriteAheadLogging() {
+ if(inTransaction()) {
+ String message = "Write Ahead Logging cannot be disabled while in a transaction";
+ throw new IllegalStateException(message);
+ }
+ String command = "PRAGMA journal_mode = DELETE;";
+ rawExecSQL(command);
+ }
+
+ /**
+ * @return true if the journal mode is set to WAL, otherwise false
+ */
+ public boolean isWriteAheadLoggingEnabled() {
+ Pair result = getResultFromPragma("PRAGMA journal_mode;");
+ return result.first ? result.second.equals("wal") : result.first;
+ }
+
+ /**
+ * Enables or disables foreign key constraints
+ * @param enable used to determine whether or not foreign key constraints are on
+ */
+ public void setForeignKeyConstraintsEnabled(boolean enable) {
+ if(inTransaction()) {
+ String message = "Foreign key constraints may not be changed while in a transaction";
+ throw new IllegalStateException(message);
+ }
+ String command = String.format("PRAGMA foreign_keys = %s;",
+ enable ? "ON" : "OFF");
+ execSQL(command);
+ }
+
+ /**
+ * Begins a transaction. Transactions can be nested. When the outer transaction is ended all of
+ * the work done in that transaction and all of the nested transactions will be committed or
+ * rolled back. The changes will be rolled back if any transaction is ended without being
+ * marked as clean (by calling setTransactionSuccessful). Otherwise they will be committed.
+ *
+ *
Here is the standard idiom for transactions:
+ *
+ *
+ *
+ * @throws IllegalStateException if the database is not open
+ */
+ public void beginTransaction() {
+ beginTransactionWithListener((SQLiteTransactionListener)null /* transactionStatusCallback */);
+ }
+
+ /**
+ * Begins a transaction in Exlcusive mode. Transactions can be nested. When
+ * the outer transaction is ended all of the work done in that transaction
+ * and all of the nested transactions will be committed or rolled back. The
+ * changes will be rolled back if any transaction is ended without being
+ * marked as clean (by calling setTransactionSuccessful). Otherwise they
+ * will be committed.
+ *
+ *
Here is the standard idiom for transactions:
+ *
+ *
+ * @param transactionListener listener that should be notified when the transaction begins,
+ * commits, or is rolled back, either explicitly or by a call to
+ * {@link #yieldIfContendedSafely}.
+ *
+ * @throws IllegalStateException if the database is not open
+ */
+ public void beginTransactionWithListener(SQLiteTransactionListener transactionListener) {
+ beginTransactionWithListenerInternal(transactionListener,
+ SQLiteDatabaseTransactionType.Exclusive);
+ }
+
+ /**
+ * Begins a transaction in Immediate mode
+ */
+ public void beginTransactionNonExclusive() {
+ beginTransactionWithListenerInternal(null,
+ SQLiteDatabaseTransactionType.Immediate);
+ }
+
+ /**
+ * Begins a transaction in Immediate mode
+ * @param transactionListener is the listener used to report transaction events
+ */
+ public void beginTransactionWithListenerNonExclusive(SQLiteTransactionListener transactionListener) {
+ beginTransactionWithListenerInternal(transactionListener,
+ SQLiteDatabaseTransactionType.Immediate);
+ }
+
+ /**
+ * End a transaction. See beginTransaction for notes about how to use this and when transactions
+ * are committed and rolled back.
+ *
+ * @throws IllegalStateException if the database is not open or is not locked by the current thread
+ */
+ public void endTransaction() {
+ if (!isOpen()) {
+ throw new IllegalStateException("database not open");
+ }
+ if (!mLock.isHeldByCurrentThread()) {
+ throw new IllegalStateException("no transaction pending");
+ }
+ try {
+ if (mInnerTransactionIsSuccessful) {
+ mInnerTransactionIsSuccessful = false;
+ } else {
+ mTransactionIsSuccessful = false;
+ }
+ if (mLock.getHoldCount() != 1) {
+ return;
+ }
+ RuntimeException savedException = null;
+ if (mTransactionListener != null) {
+ try {
+ if (mTransactionIsSuccessful) {
+ mTransactionListener.onCommit();
+ } else {
+ mTransactionListener.onRollback();
+ }
+ } catch (RuntimeException e) {
+ savedException = e;
+ mTransactionIsSuccessful = false;
+ }
+ }
+ if (mTransactionIsSuccessful) {
+ execSQL(COMMIT_SQL);
+ } else {
+ try {
+ execSQL("ROLLBACK;");
+ if (savedException != null) {
+ throw savedException;
+ }
+ } catch (SQLException e) {
+ if(BuildConfig.DEBUG){
+ Log.d(TAG, "exception during rollback, maybe the DB previously "
+ + "performed an auto-rollback");
+ }
+ }
+ }
+ } finally {
+ mTransactionListener = null;
+ unlockForced();
+ if(BuildConfig.DEBUG){
+ Log.v(TAG, "unlocked " + Thread.currentThread()
+ + ", holdCount is " + mLock.getHoldCount());
+ }
+ }
+ }
+
+ /**
+ * Marks the current transaction as successful. Do not do any more database work between
+ * calling this and calling endTransaction. Do as little non-database work as possible in that
+ * situation too. If any errors are encountered between this and endTransaction the transaction
+ * will still be committed.
+ *
+ * @throws IllegalStateException if the database is not open, the current thread is not in a transaction,
+ * or the transaction is already marked as successful.
+ */
+ public void setTransactionSuccessful() {
+ if (!isOpen()) {
+ throw new IllegalStateException("database not open");
+ }
+ if (!mLock.isHeldByCurrentThread()) {
+ throw new IllegalStateException("no transaction pending");
+ }
+ if (mInnerTransactionIsSuccessful) {
+ throw new IllegalStateException(
+ "setTransactionSuccessful may only be called once per call to beginTransaction");
+ }
+ mInnerTransactionIsSuccessful = true;
+ }
+
+ /**
+ * return true if there is a transaction pending
+ */
+ public boolean inTransaction() {
+ return mLock.getHoldCount() > 0;
+ }
+
+ /**
+ * Checks if the database lock is held by this thread.
+ *
+ * @return true, if this thread is holding the database lock.
+ */
+ public boolean isDbLockedByCurrentThread() {
+ return mLock.isHeldByCurrentThread();
+ }
+
+ /**
+ * Checks if the database is locked by another thread. This is
+ * just an estimate, since this status can change at any time,
+ * including after the call is made but before the result has
+ * been acted upon.
+ *
+ * @return true if the transaction was yielded, false if queue was empty or database was not open
+ */
+ public boolean isDbLockedByOtherThreads() {
+ return !mLock.isHeldByCurrentThread() && mLock.isLocked();
+ }
+
+ /**
+ * Temporarily end the transaction to let other threads run. The transaction is assumed to be
+ * successful so far. Do not call setTransactionSuccessful before calling this. When this
+ * returns a new transaction will have been created but not marked as successful.
+ *
+ * @return true if the transaction was yielded
+ *
+ * @deprecated if the db is locked more than once (becuase of nested transactions) then the lock
+ * will not be yielded. Use yieldIfContendedSafely instead.
+ */
+ @Deprecated
+ public boolean yieldIfContended() {
+ /* safeguard: */
+ if (!isOpen()) return false;
+
+ return yieldIfContendedHelper(false /* do not check yielding */,
+ -1 /* sleepAfterYieldDelay */);
+ }
+
+ /**
+ * Temporarily end the transaction to let other threads run. The transaction is assumed to be
+ * successful so far. Do not call setTransactionSuccessful before calling this. When this
+ * returns a new transaction will have been created but not marked as successful. This assumes
+ * that there are no nested transactions (beginTransaction has only been called once) and will
+ * throw an exception if that is not the case.
+ *
+ * @return true if the transaction was yielded, false if queue was empty or database was not open
+ */
+ public boolean yieldIfContendedSafely() {
+ /* safeguard: */
+ if (!isOpen()) return false;
+
+ return yieldIfContendedHelper(true /* check yielding */, -1 /* sleepAfterYieldDelay*/);
+ }
+
+ /**
+ * Temporarily end the transaction to let other threads run. The transaction is assumed to be
+ * successful so far. Do not call setTransactionSuccessful before calling this. When this
+ * returns a new transaction will have been created but not marked as successful. This assumes
+ * that there are no nested transactions (beginTransaction has only been called once) and will
+ * throw an exception if that is not the case.
+ *
+ * @param sleepAfterYieldDelay if > 0, sleep this long before starting a new transaction if
+ * the lock was actually yielded. This will allow other background threads to make some
+ * more progress than they would if we started the transaction immediately.
+ *
+ * @return true if the transaction was yielded, false if queue was empty or database was not open
+ *
+ * @throws IllegalStateException if the database is locked more than once by the current thread
+ * @throws InterruptedException if the thread was interrupted while sleeping
+ */
+ public boolean yieldIfContendedSafely(long sleepAfterYieldDelay) {
+ /* safeguard: */
+ if (!isOpen()) return false;
+
+ return yieldIfContendedHelper(true /* check yielding */, sleepAfterYieldDelay);
+ }
+
+ private boolean yieldIfContendedHelper(boolean checkFullyYielded, long sleepAfterYieldDelay) {
+ if (mLock.getQueueLength() == 0) {
+ // Reset the lock acquire time since we know that the thread was willing to yield
+ // the lock at this time.
+ mLockAcquiredWallTime = SystemClock.elapsedRealtime();
+ mLockAcquiredThreadTime = Debug.threadCpuTimeNanos();
+ return false;
+ }
+ setTransactionSuccessful();
+ SQLiteTransactionListener transactionListener = mTransactionListener;
+ endTransaction();
+ if (checkFullyYielded) {
+ if (this.isDbLockedByCurrentThread()) {
+ throw new IllegalStateException(
+ "Db locked more than once. yielfIfContended cannot yield");
+ }
+ }
+ if (sleepAfterYieldDelay > 0) {
+ // Sleep for up to sleepAfterYieldDelay milliseconds, waking up periodically to
+ // check if anyone is using the database. If the database is not contended,
+ // retake the lock and return.
+ long remainingDelay = sleepAfterYieldDelay;
+ while (remainingDelay > 0) {
+ try {
+ Thread.sleep(remainingDelay < SLEEP_AFTER_YIELD_QUANTUM ?
+ remainingDelay : SLEEP_AFTER_YIELD_QUANTUM);
+ } catch (InterruptedException e) {
+ Thread.interrupted();
+ }
+ remainingDelay -= SLEEP_AFTER_YIELD_QUANTUM;
+ if (mLock.getQueueLength() == 0) {
+ break;
+ }
+ }
+ }
+ beginTransactionWithListener(transactionListener);
+ return true;
+ }
+
+ /** Maps table names to info about what to which _sync_time column to set
+ * to NULL on an update. This is used to support syncing. */
+ private final Map mSyncUpdateInfo =
+ new HashMap();
+
+ public Map getSyncedTables() {
+ synchronized(mSyncUpdateInfo) {
+ HashMap tables = new HashMap();
+ for (String table : mSyncUpdateInfo.keySet()) {
+ SyncUpdateInfo info = mSyncUpdateInfo.get(table);
+ if (info.deletedTable != null) {
+ tables.put(table, info.deletedTable);
+ }
+ }
+ return tables;
+ }
+ }
+
+ /**
+ * Internal class used to keep track what needs to be marked as changed
+ * when an update occurs. This is used for syncing, so the sync engine
+ * knows what data has been updated locally.
+ */
+ static private class SyncUpdateInfo {
+ /**
+ * Creates the SyncUpdateInfo class.
+ *
+ * @param masterTable The table to set _sync_time to NULL in
+ * @param deletedTable The deleted table that corresponds to the
+ * master table
+ * @param foreignKey The key that refers to the primary key in table
+ */
+ SyncUpdateInfo(String masterTable, String deletedTable,
+ String foreignKey) {
+ this.masterTable = masterTable;
+ this.deletedTable = deletedTable;
+ this.foreignKey = foreignKey;
+ }
+
+ /** The table containing the _sync_time column */
+ String masterTable;
+
+ /** The deleted table that corresponds to the master table */
+ String deletedTable;
+
+ /** The key in the local table the row in table. It may be _id, if table
+ * is the local table. */
+ String foreignKey;
+ }
+
+ /**
+ * Used to allow returning sub-classes of {@link Cursor} when calling query.
+ */
+ public interface CursorFactory {
+ /**
+ * See
+ * {@link SQLiteCursor#SQLiteCursor(SQLiteDatabase, SQLiteCursorDriver,
+ * String, SQLiteQuery)}.
+ */
+ public Cursor newCursor(SQLiteDatabase db,
+ SQLiteCursorDriver masterQuery, String editTable,
+ SQLiteQuery query);
+ }
+
+ /**
+ * Open the database according to the flags {@link #OPEN_READWRITE}
+ * {@link #OPEN_READONLY} {@link #CREATE_IF_NECESSARY} and/or {@link #NO_LOCALIZED_COLLATORS}.
+ *
+ *
Sets the locale of the database to the the system's current locale.
+ * Call {@link #setLocale} if you would like something else.
+ *
+ * @param path to database file to open and/or create
+ * @param password to use to open and/or create database file
+ * @param factory an optional factory class that is called to instantiate a
+ * cursor when query is called, or null for default
+ * @param flags to control database access mode and other options
+ *
+ * @return the newly opened database
+ *
+ * @throws SQLiteException if the database cannot be opened
+ * @throws IllegalArgumentException if the database path is null
+ */
+ public static SQLiteDatabase openDatabase(String path, String password, CursorFactory factory, int flags) {
+ return openDatabase(path, password, factory, flags, null);
+ }
+
+ /**
+ * Open the database according to the flags {@link #OPEN_READWRITE}
+ * {@link #OPEN_READONLY} {@link #CREATE_IF_NECESSARY} and/or {@link #NO_LOCALIZED_COLLATORS}.
+ *
+ *
Sets the locale of the database to the system's current locale.
+ * Call {@link #setLocale} if you would like something else.
+ *
+ * @param path to database file to open and/or create
+ * @param password to use to open and/or create database file (char array)
+ * @param factory an optional factory class that is called to instantiate a
+ * cursor when query is called, or null for default
+ * @param flags to control database access mode and other options
+ *
+ * @return the newly opened database
+ *
+ * @throws SQLiteException if the database cannot be opened
+ * @throws IllegalArgumentException if the database path is null
+ */
+ public static SQLiteDatabase openDatabase(String path, char[] password, CursorFactory factory, int flags) {
+ return openDatabase(path, password, factory, flags, null, null);
+ }
+
+ /**
+ * Open the database according to the flags {@link #OPEN_READWRITE}
+ * {@link #OPEN_READONLY} {@link #CREATE_IF_NECESSARY} and/or {@link #NO_LOCALIZED_COLLATORS}
+ * with optional hook to run on pre/post key events.
+ *
+ *
Sets the locale of the database to the the system's current locale.
+ * Call {@link #setLocale} if you would like something else.
+ *
+ * @param path to database file to open and/or create
+ * @param password to use to open and/or create database file
+ * @param factory an optional factory class that is called to instantiate a
+ * cursor when query is called, or null for default
+ * @param flags to control database access mode and other options
+ * @param hook to run on pre/post key events
+ *
+ * @return the newly opened database
+ *
+ * @throws SQLiteException if the database cannot be opened
+ * @throws IllegalArgumentException if the database path is null
+ */
+ public static SQLiteDatabase openDatabase(String path, String password, CursorFactory factory, int flags, SQLiteDatabaseHook hook) {
+ return openDatabase(path, password, factory, flags, hook, null);
+ }
+
+ /**
+ * Open the database according to the flags {@link #OPEN_READWRITE}
+ * {@link #OPEN_READONLY} {@link #CREATE_IF_NECESSARY} and/or {@link #NO_LOCALIZED_COLLATORS}
+ * with optional hook to run on pre/post key events.
+ *
+ *
Sets the locale of the database to the the system's current locale.
+ * Call {@link #setLocale} if you would like something else.
+ *
+ * @param path to database file to open and/or create
+ * @param password to use to open and/or create database file (char array)
+ * @param factory an optional factory class that is called to instantiate a
+ * cursor when query is called, or null for default
+ * @param flags to control database access mode and other options
+ * @param hook to run on pre/post key events (may be null)
+ *
+ * @return the newly opened database
+ *
+ * @throws SQLiteException if the database cannot be opened
+ * @throws IllegalArgumentException if the database path is null
+ */
+ public static SQLiteDatabase openDatabase(String path, char[] password, CursorFactory factory, int flags, SQLiteDatabaseHook hook) {
+ return openDatabase(path, password, factory, flags, hook, null);
+ }
+
+ /**
+ * Open the database according to the flags {@link #OPEN_READWRITE}
+ * {@link #OPEN_READONLY} {@link #CREATE_IF_NECESSARY} and/or {@link #NO_LOCALIZED_COLLATORS}
+ * with optional hook to run on pre/post key events.
+ *
+ *
Sets the locale of the database to the the system's current locale.
+ * Call {@link #setLocale} if you would like something else.
+ *
+ * @param path to database file to open and/or create
+ * @param password to use to open and/or create database file
+ * @param factory an optional factory class that is called to instantiate a
+ * cursor when query is called, or null for default
+ * @param flags to control database access mode and other options
+ * @param hook to run on pre/post key events
+ * @param errorHandler The {@link DatabaseErrorHandler} to be used when sqlite reports database
+ * corruption (or null for default).
+ *
+ * @return the newly opened database
+ *
+ * @throws SQLiteException if the database cannot be opened
+ * @throws IllegalArgumentException if the database path is null
+ */
+ public static SQLiteDatabase openDatabase(String path, String password, CursorFactory factory, int flags,
+ SQLiteDatabaseHook hook, DatabaseErrorHandler errorHandler) {
+ return openDatabase(path, password == null ? null : password.toCharArray(), factory, flags, hook, errorHandler);
+ }
+
+/**
+ * Open the database according to the flags {@link #OPEN_READWRITE}
+ * {@link #OPEN_READONLY} {@link #CREATE_IF_NECESSARY} and/or {@link #NO_LOCALIZED_COLLATORS}
+ * with optional hook to run on pre/post key events.
+ *
+ *
Sets the locale of the database to the the system's current locale.
+ * Call {@link #setLocale} if you would like something else.
+ *
+ * @param path to database file to open and/or create
+ * @param password to use to open and/or create database file (char array)
+ * @param factory an optional factory class that is called to instantiate a
+ * cursor when query is called, or null for default
+ * @param flags to control database access mode and other options
+ * @param hook to run on pre/post key events (may be null)
+ * @param errorHandler The {@link DatabaseErrorHandler} to be used when sqlite reports database
+ * corruption (or null for default).
+ *
+ * @return the newly opened database
+ *
+ * @throws SQLiteException if the database cannot be opened
+ * @throws IllegalArgumentException if the database path is null
+ */
+ public static SQLiteDatabase openDatabase(String path, char[] password, CursorFactory factory, int flags,
+ SQLiteDatabaseHook hook, DatabaseErrorHandler errorHandler) {
+ byte[] keyMaterial = getBytes(password);
+ return openDatabase(path, keyMaterial, factory, flags, hook, errorHandler);
+ }
+
+ /**
+ * Open the database according to the flags {@link #OPEN_READWRITE}
+ * {@link #OPEN_READONLY} {@link #CREATE_IF_NECESSARY} and/or {@link #NO_LOCALIZED_COLLATORS}
+ * with optional hook to run on pre/post key events.
+ *
+ *
Sets the locale of the database to the the system's current locale.
+ * Call {@link #setLocale} if you would like something else.
+ *
+ * @param path to database file to open and/or create
+ * @param password to use to open and/or create database file (byte array)
+ * @param factory an optional factory class that is called to instantiate a
+ * cursor when query is called, or null for default
+ * @param flags to control database access mode and other options
+ * @param hook to run on pre/post key events (may be null)
+ * @param errorHandler The {@link DatabaseErrorHandler} to be used when sqlite reports database
+ * corruption (or null for default).
+ *
+ * @return the newly opened database
+ *
+ * @throws SQLiteException if the database cannot be opened
+ * @throws IllegalArgumentException if the database path is null
+ */
+ public static SQLiteDatabase openDatabase(String path, byte[] password, CursorFactory factory, int flags,
+ SQLiteDatabaseHook hook, DatabaseErrorHandler errorHandler) {
+ SQLiteDatabase sqliteDatabase = null;
+ DatabaseErrorHandler myErrorHandler = (errorHandler != null) ? errorHandler : new DefaultDatabaseErrorHandler();
+
+ try {
+ // Open the database.
+ sqliteDatabase = new SQLiteDatabase(path, factory, flags, myErrorHandler);
+ sqliteDatabase.openDatabaseInternal(password, hook);
+ } catch (SQLiteDatabaseCorruptException e) {
+ // Try to recover from this, if possible.
+ // FUTURE TBD: should we consider this for other open failures?
+
+ if(BuildConfig.DEBUG){
+ Log.e(TAG, "Calling error handler for corrupt database " + path, e);
+ }
+
+ // NOTE: if this errorHandler.onCorruption() throws the exception _should_
+ // bubble back to the original caller.
+ // DefaultDatabaseErrorHandler deletes the corrupt file, EXCEPT for memory database
+ myErrorHandler.onCorruption(sqliteDatabase);
+
+ // try *once* again:
+ sqliteDatabase = new SQLiteDatabase(path, factory, flags, myErrorHandler);
+ sqliteDatabase.openDatabaseInternal(password, hook);
+ }
+
+ if (SQLiteDebug.DEBUG_SQL_STATEMENTS) {
+ sqliteDatabase.enableSqlTracing(path);
+ }
+ if (SQLiteDebug.DEBUG_SQL_TIME) {
+ sqliteDatabase.enableSqlProfiling(path);
+ }
+
+ synchronized (sActiveDatabases) {
+ sActiveDatabases.put(sqliteDatabase, null);
+ }
+
+ return sqliteDatabase;
+ }
+
+ /**
+ * Equivalent to openDatabase(file.getPath(), password, factory, CREATE_IF_NECESSARY, databaseHook).
+ */
+ public static SQLiteDatabase openOrCreateDatabase(File file, String password, CursorFactory factory, SQLiteDatabaseHook databaseHook) {
+ return openOrCreateDatabase(file, password, factory, databaseHook, null);
+ }
+
+ /**
+ * Equivalent to openDatabase(path, password, factory, CREATE_IF_NECESSARY, databaseHook).
+ */
+ public static SQLiteDatabase openOrCreateDatabase(File file, String password, CursorFactory factory, SQLiteDatabaseHook databaseHook,
+ DatabaseErrorHandler errorHandler) {
+ return openOrCreateDatabase(file == null ? null : file.getPath(), password, factory, databaseHook, errorHandler);
+ }
+
+ /**
+ * Equivalent to openDatabase(path, password, factory, CREATE_IF_NECESSARY, databaseHook).
+ */
+ public static SQLiteDatabase openOrCreateDatabase(String path, String password, CursorFactory factory, SQLiteDatabaseHook databaseHook) {
+ return openDatabase(path, password, factory, CREATE_IF_NECESSARY, databaseHook);
+ }
+
+ public static SQLiteDatabase openOrCreateDatabase(String path, String password, CursorFactory factory, SQLiteDatabaseHook databaseHook,
+ DatabaseErrorHandler errorHandler) {
+ return openDatabase(path, password == null ? null : password.toCharArray(), factory, CREATE_IF_NECESSARY, databaseHook, errorHandler);
+ }
+
+ public static SQLiteDatabase openOrCreateDatabase(String path, char[] password, CursorFactory factory, SQLiteDatabaseHook databaseHook) {
+ return openDatabase(path, password, factory, CREATE_IF_NECESSARY, databaseHook);
+ }
+
+ public static SQLiteDatabase openOrCreateDatabase(String path, char[] password, CursorFactory factory, SQLiteDatabaseHook databaseHook,
+ DatabaseErrorHandler errorHandler) {
+ return openDatabase(path, password, factory, CREATE_IF_NECESSARY, databaseHook, errorHandler);
+ }
+
+ public static SQLiteDatabase openOrCreateDatabase(String path, byte[] password, CursorFactory factory, SQLiteDatabaseHook databaseHook) {
+ return openDatabase(path, password, factory, CREATE_IF_NECESSARY, databaseHook, null);
+ }
+
+ public static SQLiteDatabase openOrCreateDatabase(String path, byte[] password, CursorFactory factory, SQLiteDatabaseHook databaseHook,
+ DatabaseErrorHandler errorHandler) {
+ return openDatabase(path, password, factory, CREATE_IF_NECESSARY, databaseHook, errorHandler);
+ }
+
+ /**
+ * Equivalent to openDatabase(file.getPath(), password, factory, CREATE_IF_NECESSARY).
+ */
+ public static SQLiteDatabase openOrCreateDatabase(File file, String password, CursorFactory factory) {
+ return openOrCreateDatabase(file, password, factory, null);
+ }
+
+ /**
+ * Equivalent to openDatabase(path, password, factory, CREATE_IF_NECESSARY).
+ */
+ public static SQLiteDatabase openOrCreateDatabase(String path, String password, CursorFactory factory) {
+ return openDatabase(path, password, factory, CREATE_IF_NECESSARY, null);
+ }
+
+ /**
+ * Equivalent to openDatabase(path, password, factory, CREATE_IF_NECESSARY).
+ */
+ public static SQLiteDatabase openOrCreateDatabase(String path, char[] password, CursorFactory factory) {
+ return openDatabase(path, password, factory, CREATE_IF_NECESSARY, null);
+ }
+
+ /**
+ * Equivalent to openDatabase(path, password, factory, CREATE_IF_NECESSARY).
+ */
+ public static SQLiteDatabase openOrCreateDatabase(String path, byte[] password, CursorFactory factory) {
+ return openDatabase(path, password, factory, CREATE_IF_NECESSARY, null, null);
+ }
+
+ /**
+ * Create a memory backed SQLite database. Its contents will be destroyed
+ * when the database is closed.
+ *
+ *
Sets the locale of the database to the the system's current locale.
+ * Call {@link #setLocale} if you would like something else.
+ *
+ * @param factory an optional factory class that is called to instantiate a
+ * cursor when query is called
+ * @param password to use to open and/or create database file
+ *
+ * @return a SQLiteDatabase object, or null if the database can't be created
+ *
+ * @throws SQLiteException if the database cannot be opened
+ */
+ public static SQLiteDatabase create(CursorFactory factory, String password) {
+ // This is a magic string with special meaning for SQLite.
+ return openDatabase(MEMORY, password == null ? null : password.toCharArray(), factory, CREATE_IF_NECESSARY);
+ }
+
+ /**
+ * Create a memory backed SQLite database. Its contents will be destroyed
+ * when the database is closed.
+ *
+ *
Sets the locale of the database to the the system's current locale.
+ * Call {@link #setLocale} if you would like something else.
+ *
+ * @param factory an optional factory class that is called to instantiate a
+ * cursor when query is called
+ * @param password to use to open and/or create database file (char array)
+ *
+ * @return a SQLiteDatabase object, or null if the database can't be created
+ *
+ * @throws SQLiteException if the database cannot be opened
+ */
+ public static SQLiteDatabase create(CursorFactory factory, char[] password) {
+ return openDatabase(MEMORY, password, factory, CREATE_IF_NECESSARY);
+ }
+
+
+ /**
+ * Close the database.
+ */
+ public void close() {
+
+ if (!isOpen()) {
+ return; // already closed
+ }
+ lock();
+ try {
+ closeClosable();
+ // close this database instance - regardless of its reference count value
+ onAllReferencesReleased();
+ } finally {
+ unlock();
+ }
+ }
+
+ private void closeClosable() {
+ /* deallocate all compiled sql statement objects from mCompiledQueries cache.
+ * this should be done before de-referencing all {@link SQLiteClosable} objects
+ * from this database object because calling
+ * {@link SQLiteClosable#onAllReferencesReleasedFromContainer()} could cause the database
+ * to be closed. sqlite doesn't let a database close if there are
+ * any unfinalized statements - such as the compiled-sql objects in mCompiledQueries.
+ */
+ deallocCachedSqlStatements();
+
+ Iterator> iter = mPrograms.entrySet().iterator();
+ while (iter.hasNext()) {
+ Map.Entry entry = iter.next();
+ SQLiteClosable program = entry.getKey();
+ if (program != null) {
+ program.onAllReferencesReleasedFromContainer();
+ }
+ }
+ }
+
+ /**
+ * Native call to close the database.
+ */
+ private native void dbclose();
+
+ /**
+ * Gets the database version.
+ *
+ * @return the database version
+ *
+ * @throws IllegalStateException if the database is not open
+ */
+ public int getVersion() {
+ SQLiteStatement prog = null;
+ lock();
+ try {
+ if (!isOpen()) {
+ throw new IllegalStateException("database not open");
+ }
+ prog = new SQLiteStatement(this, "PRAGMA user_version;");
+ long version = prog.simpleQueryForLong();
+ return (int) version;
+ } finally {
+ if (prog != null) prog.close();
+ unlock();
+ }
+ }
+
+ /**
+ * Sets the database version.
+ *
+ * @param version the new database version
+ *
+ * @throws SQLiteException if there is an issue executing the sql internally
+ * @throws IllegalStateException if the database is not open
+ */
+ public void setVersion(int version) {
+ execSQL("PRAGMA user_version = " + version);
+ }
+
+ /**
+ * Returns the maximum size the database may grow to.
+ *
+ * @return the new maximum database size
+ */
+ public long getMaximumSize() {
+ SQLiteStatement prog = null;
+ lock();
+ try {
+ if (!isOpen()) {
+ throw new IllegalStateException("database not open");
+ }
+ prog = new SQLiteStatement(this,
+ "PRAGMA max_page_count;");
+ long pageCount = prog.simpleQueryForLong();
+ return pageCount * getPageSize();
+ } finally {
+ if (prog != null) prog.close();
+ unlock();
+ }
+ }
+
+ /**
+ * Sets the maximum size the database will grow to. The maximum size cannot
+ * be set below the current size.
+ *
+ * @param numBytes the maximum database size, in bytes
+ * @return the new maximum database size
+ */
+ public long setMaximumSize(long numBytes) {
+ SQLiteStatement prog = null;
+ lock();
+ try {
+ if (!isOpen()) {
+ throw new IllegalStateException("database not open");
+ }
+ long pageSize = getPageSize();
+ long numPages = numBytes / pageSize;
+ // If numBytes isn't a multiple of pageSize, bump up a page
+ if ((numBytes % pageSize) != 0) {
+ numPages++;
+ }
+ prog = new SQLiteStatement(this,
+ "PRAGMA max_page_count = " + numPages);
+ long newPageCount = prog.simpleQueryForLong();
+ return newPageCount * pageSize;
+ } finally {
+ if (prog != null) prog.close();
+ unlock();
+ }
+ }
+
+ /**
+ * Returns the current database page size, in bytes.
+ *
+ * @return the database page size, in bytes
+ */
+ public long getPageSize() {
+ SQLiteStatement prog = null;
+ lock();
+ try {
+ if (!isOpen()) {
+ throw new IllegalStateException("database not open");
+ }
+ prog = new SQLiteStatement(this,
+ "PRAGMA page_size;");
+ long size = prog.simpleQueryForLong();
+ return size;
+ } finally {
+ if (prog != null) prog.close();
+ unlock();
+ }
+ }
+
+ /**
+ * Sets the database page size. The page size must be a power of two. This
+ * method does not work if any data has been written to the database file,
+ * and must be called right after the database has been created.
+ *
+ * @param numBytes the database page size, in bytes
+ */
+ public void setPageSize(long numBytes) {
+ execSQL("PRAGMA page_size = " + numBytes);
+ }
+
+ /**
+ * Mark this table as syncable. When an update occurs in this table the
+* _sync_dirty field will be set to ensure proper syncing operation.
+ *
+ * @param table the table to mark as syncable
+ * @param deletedTable The deleted table that corresponds to the
+ * syncable table
+ *
+ * @throws SQLiteException if there is an issue executing the sql to mark the table as syncable
+ * OR if the database is not open
+ *
+ * FUTURE @todo throw IllegalStateException if the database is not open and
+ * update the test suite
+ *
+ * NOTE: This method was deprecated by the AOSP in Android API 11.
+ */
+ public void markTableSyncable(String table, String deletedTable) {
+ /* safeguard: */
+ if (!isOpen()) {
+ throw new SQLiteException("database not open");
+ }
+
+ markTableSyncable(table, "_id", table, deletedTable);
+ }
+
+ /**
+ * Mark this table as syncable, with the _sync_dirty residing in another
+ * table. When an update occurs in this table the _sync_dirty field of the
+ * row in updateTable with the _id in foreignKey will be set to
+ * ensure proper syncing operation.
+ *
+ * @param table an update on this table will trigger a sync time removal
+ * @param foreignKey this is the column in table whose value is an _id in
+ * updateTable
+ * @param updateTable this is the table that will have its _sync_dirty
+ *
+ * @throws SQLiteException if there is an issue executing the sql to mark the table as syncable
+ *
+ * FUTURE @todo throw IllegalStateException if the database is not open and
+ * update the test suite
+ *
+ * NOTE: This method was deprecated by the AOSP in Android API 11.
+ */
+ public void markTableSyncable(String table, String foreignKey,
+ String updateTable) {
+ /* safeguard: */
+ if (!isOpen()) {
+ throw new SQLiteException("database not open");
+ }
+
+ markTableSyncable(table, foreignKey, updateTable, null);
+ }
+
+ /**
+ * Mark this table as syncable, with the _sync_dirty residing in another
+ * table. When an update occurs in this table the _sync_dirty field of the
+ * row in updateTable with the _id in foreignKey will be set to
+ * ensure proper syncing operation.
+ *
+ * @param table an update on this table will trigger a sync time removal
+ * @param foreignKey this is the column in table whose value is an _id in
+ * updateTable
+ * @param updateTable this is the table that will have its _sync_dirty
+ * @param deletedTable The deleted table that corresponds to the
+ * updateTable
+ *
+ * @throws SQLiteException if there is an issue executing the sql
+ */
+ private void markTableSyncable(String table, String foreignKey,
+ String updateTable, String deletedTable) {
+ lock();
+ try {
+ native_execSQL("SELECT _sync_dirty FROM " + updateTable
+ + " LIMIT 0");
+ native_execSQL("SELECT " + foreignKey + " FROM " + table
+ + " LIMIT 0");
+ } finally {
+ unlock();
+ }
+
+ SyncUpdateInfo info = new SyncUpdateInfo(updateTable, deletedTable,
+ foreignKey);
+ synchronized (mSyncUpdateInfo) {
+ mSyncUpdateInfo.put(table, info);
+ }
+ }
+
+ /**
+ * Call for each row that is updated in a cursor.
+ *
+ * @param table the table the row is in
+ * @param rowId the row ID of the updated row
+ */
+ /* package */ void rowUpdated(String table, long rowId) {
+ SyncUpdateInfo info;
+ synchronized (mSyncUpdateInfo) {
+ info = mSyncUpdateInfo.get(table);
+ }
+ if (info != null) {
+ execSQL("UPDATE " + info.masterTable
+ + " SET _sync_dirty=1 WHERE _id=(SELECT " + info.foreignKey
+ + " FROM " + table + " WHERE _id=" + rowId + ")");
+ }
+ }
+
+ /**
+ * Finds the name of the first table, which is editable.
+ *
+ * @param tables a list of tables
+ * @return the first table listed
+ */
+ public static String findEditTable(String tables) {
+ if (!TextUtils.isEmpty(tables)) {
+ // find the first word terminated by either a space or a comma
+ int spacepos = tables.indexOf(' ');
+ int commapos = tables.indexOf(',');
+
+ if (spacepos > 0 && (spacepos < commapos || commapos < 0)) {
+ return tables.substring(0, spacepos);
+ } else if (commapos > 0 && (commapos < spacepos || spacepos < 0) ) {
+ return tables.substring(0, commapos);
+ }
+ return tables;
+ } else {
+ throw new IllegalStateException("Invalid tables");
+ }
+ }
+
+ /**
+ * Compiles an SQL statement into a reusable pre-compiled statement object.
+ * The parameters are identical to {@link #execSQL(String)}. You may put ?s in the
+ * statement and fill in those values with {@link SQLiteProgram#bindString}
+ * and {@link SQLiteProgram#bindLong} each time you want to run the
+ * statement. Statements may not return result sets larger than 1x1.
+ *
+ * @param sql The raw SQL statement, may contain ? for unknown values to be
+ * bound later.
+ *
+ * @return A pre-compiled {@link SQLiteStatement} object. Note that
+ * {@link SQLiteStatement}s are not synchronized, see the documentation for more details.
+ *
+ * @throws SQLException If the SQL string is invalid for some reason
+ * @throws IllegalStateException if the database is not open
+ */
+ public SQLiteStatement compileStatement(String sql) throws SQLException {
+ lock();
+ try {
+ if (!isOpen()) {
+ throw new IllegalStateException("database not open");
+ }
+ return new SQLiteStatement(this, sql);
+ } finally {
+ unlock();
+ }
+ }
+
+ /**
+ * Query the given URL, returning a {@link Cursor} over the result set.
+ *
+ * @param distinct true if you want each row to be unique, false otherwise.
+ * @param table The table name to compile the query against.
+ * @param columns A list of which columns to return. Passing null will
+ * return all columns, which is discouraged to prevent reading
+ * data from storage that isn't going to be used.
+ * @param selection A filter declaring which rows to return, formatted as an
+ * SQL WHERE clause (excluding the WHERE itself). Passing null
+ * will return all rows for the given table.
+ * @param selectionArgs You may include ?s in selection, which will be
+ * replaced by the values from selectionArgs, in order that they
+ * appear in the selection. The values will be bound as Strings.
+ * @param groupBy A filter declaring how to group rows, formatted as an SQL
+ * GROUP BY clause (excluding the GROUP BY itself). Passing null
+ * will cause the rows to not be grouped.
+ * @param having A filter declare which row groups to include in the cursor,
+ * if row grouping is being used, formatted as an SQL HAVING
+ * clause (excluding the HAVING itself). Passing null will cause
+ * all row groups to be included, and is required when row
+ * grouping is not being used.
+ * @param orderBy How to order the rows, formatted as an SQL ORDER BY clause
+ * (excluding the ORDER BY itself). Passing null will use the
+ * default sort order, which may be unordered.
+ * @param limit Limits the number of rows returned by the query,
+ * formatted as LIMIT clause. Passing null denotes no LIMIT clause.
+ *
+ * @return A {@link Cursor} object, which is positioned before the first entry. Note that
+ * {@link Cursor}s are not synchronized, see the documentation for more details.
+ *
+ * @throws SQLiteException if there is an issue executing the sql or the SQL string is invalid
+ * @throws IllegalStateException if the database is not open
+ *
+ * @see Cursor
+ */
+ public Cursor query(boolean distinct, String table, String[] columns,
+ String selection, String[] selectionArgs, String groupBy,
+ String having, String orderBy, String limit) {
+ return queryWithFactory(null, distinct, table, columns, selection, selectionArgs,
+ groupBy, having, orderBy, limit);
+ }
+
+ /**
+ * Query the given URL, returning a {@link Cursor} over the result set.
+ *
+ * @param cursorFactory the cursor factory to use, or null for the default factory
+ * @param distinct true if you want each row to be unique, false otherwise.
+ * @param table The table name to compile the query against.
+ * @param columns A list of which columns to return. Passing null will
+ * return all columns, which is discouraged to prevent reading
+ * data from storage that isn't going to be used.
+ * @param selection A filter declaring which rows to return, formatted as an
+ * SQL WHERE clause (excluding the WHERE itself). Passing null
+ * will return all rows for the given table.
+ * @param selectionArgs You may include ?s in selection, which will be
+ * replaced by the values from selectionArgs, in order that they
+ * appear in the selection. The values will be bound as Strings.
+ * @param groupBy A filter declaring how to group rows, formatted as an SQL
+ * GROUP BY clause (excluding the GROUP BY itself). Passing null
+ * will cause the rows to not be grouped.
+ * @param having A filter declare which row groups to include in the cursor,
+ * if row grouping is being used, formatted as an SQL HAVING
+ * clause (excluding the HAVING itself). Passing null will cause
+ * all row groups to be included, and is required when row
+ * grouping is not being used.
+ * @param orderBy How to order the rows, formatted as an SQL ORDER BY clause
+ * (excluding the ORDER BY itself). Passing null will use the
+ * default sort order, which may be unordered.
+ * @param limit Limits the number of rows returned by the query,
+ * formatted as LIMIT clause. Passing null denotes no LIMIT clause.
+ *
+ * @return A {@link Cursor} object, which is positioned before the first entry. Note that
+ * {@link Cursor}s are not synchronized, see the documentation for more details.
+ *
+ * @see Cursor
+ */
+ public Cursor queryWithFactory(CursorFactory cursorFactory,
+ boolean distinct, String table, String[] columns,
+ String selection, String[] selectionArgs, String groupBy,
+ String having, String orderBy, String limit) {
+ if (!isOpen()) {
+ throw new IllegalStateException("database not open");
+ }
+ String sql = SQLiteQueryBuilder.buildQueryString(
+ distinct, table, columns, selection, groupBy, having, orderBy, limit);
+
+ return rawQueryWithFactory(
+ cursorFactory, sql, selectionArgs, findEditTable(table));
+ }
+
+ /**
+ * Query the given table, returning a {@link Cursor} over the result set.
+ *
+ * @param table The table name to compile the query against.
+ * @param columns A list of which columns to return. Passing null will
+ * return all columns, which is discouraged to prevent reading
+ * data from storage that isn't going to be used.
+ * @param selection A filter declaring which rows to return, formatted as an
+ * SQL WHERE clause (excluding the WHERE itself). Passing null
+ * will return all rows for the given table.
+ * @param selectionArgs You may include ?s in selection, which will be
+ * replaced by the values from selectionArgs, in order that they
+ * appear in the selection. The values will be bound as Strings.
+ * @param groupBy A filter declaring how to group rows, formatted as an SQL
+ * GROUP BY clause (excluding the GROUP BY itself). Passing null
+ * will cause the rows to not be grouped.
+ * @param having A filter declare which row groups to include in the cursor,
+ * if row grouping is being used, formatted as an SQL HAVING
+ * clause (excluding the HAVING itself). Passing null will cause
+ * all row groups to be included, and is required when row
+ * grouping is not being used.
+ * @param orderBy How to order the rows, formatted as an SQL ORDER BY clause
+ * (excluding the ORDER BY itself). Passing null will use the
+ * default sort order, which may be unordered.
+ *
+ * @return A {@link Cursor} object, which is positioned before the first entry. Note that
+ * {@link Cursor}s are not synchronized, see the documentation for more details.
+ *
+ * @throws SQLiteException if there is an issue executing the sql or the SQL string is invalid
+ * @throws IllegalStateException if the database is not open
+ *
+ * @see Cursor
+ */
+ public Cursor query(String table, String[] columns, String selection,
+ String[] selectionArgs, String groupBy, String having,
+ String orderBy) {
+
+ return query(false, table, columns, selection, selectionArgs, groupBy,
+ having, orderBy, null /* limit */);
+ }
+
+ /**
+ * Query the given table, returning a {@link Cursor} over the result set.
+ *
+ * @param table The table name to compile the query against.
+ * @param columns A list of which columns to return. Passing null will
+ * return all columns, which is discouraged to prevent reading
+ * data from storage that isn't going to be used.
+ * @param selection A filter declaring which rows to return, formatted as an
+ * SQL WHERE clause (excluding the WHERE itself). Passing null
+ * will return all rows for the given table.
+ * @param selectionArgs You may include ?s in selection, which will be
+ * replaced by the values from selectionArgs, in order that they
+ * appear in the selection. The values will be bound as Strings.
+ * @param groupBy A filter declaring how to group rows, formatted as an SQL
+ * GROUP BY clause (excluding the GROUP BY itself). Passing null
+ * will cause the rows to not be grouped.
+ * @param having A filter declare which row groups to include in the cursor,
+ * if row grouping is being used, formatted as an SQL HAVING
+ * clause (excluding the HAVING itself). Passing null will cause
+ * all row groups to be included, and is required when row
+ * grouping is not being used.
+ * @param orderBy How to order the rows, formatted as an SQL ORDER BY clause
+ * (excluding the ORDER BY itself). Passing null will use the
+ * default sort order, which may be unordered.
+ * @param limit Limits the number of rows returned by the query,
+ * formatted as LIMIT clause. Passing null denotes no LIMIT clause.
+ *
+ * @return A {@link Cursor} object, which is positioned before the first entry. Note that
+ * {@link Cursor}s are not synchronized, see the documentation for more details.
+ *
+ * @throws SQLiteException if there is an issue executing the sql or the SQL string is invalid
+ * @throws IllegalStateException if the database is not open
+ *
+ * @see Cursor
+ */
+ public Cursor query(String table, String[] columns, String selection,
+ String[] selectionArgs, String groupBy, String having,
+ String orderBy, String limit) {
+
+ return query(false, table, columns, selection, selectionArgs, groupBy,
+ having, orderBy, limit);
+ }
+
+ /**
+ * Runs the provided SQL and returns a {@link Cursor} over the result set.
+ *
+ * @param sql the SQL query. The SQL string must not be ; terminated
+ * @param selectionArgs You may include ?s in where clause in the query,
+ * which will be replaced by the values from selectionArgs. The
+ * values will be bound as Strings.
+ *
+ * @return A {@link Cursor} object, which is positioned before the first entry. Note that
+ * {@link Cursor}s are not synchronized, see the documentation for more details.
+ *
+ * @throws SQLiteException if there is an issue executing the sql or the SQL string is invalid
+ * @throws IllegalStateException if the database is not open
+ */
+ public Cursor rawQuery(String sql, String[] selectionArgs) {
+ return rawQueryWithFactory(null, sql, selectionArgs, null);
+ }
+
+ /**
+ * Determines the total size in bytes of the query results, and the largest
+ * single row in bytes for the query.
+ *
+ * @param sql the SQL query. The SQL string must a SELECT statement
+ * @param args the argments to bind to the query
+ *
+ * @return A {@link SQLiteQueryStats} based the provided SQL query.
+ */
+ public SQLiteQueryStats getQueryStats(String sql, Object[] args){
+ long totalPayload = 0L;
+ long largestIndividualPayload = 0L;
+ try {
+ String query = String.format("CREATE TABLE tempstat AS %s", sql);
+ execSQL(query, args);
+ Cursor cursor = rawQuery("SELECT sum(payload) FROM dbstat WHERE name = 'tempstat';", new Object[]{});
+ if(cursor == null) return new SQLiteQueryStats(totalPayload, largestIndividualPayload);
+ cursor.moveToFirst();
+ totalPayload = cursor.getLong(0);
+ cursor.close();
+ cursor = rawQuery("SELECT max(mx_payload) FROM dbstat WHERE name = 'tempstat';", new Object[]{});
+ if(cursor == null) return new SQLiteQueryStats(totalPayload, largestIndividualPayload);
+ cursor.moveToFirst();
+ largestIndividualPayload = cursor.getLong(0);
+ cursor.close();
+ execSQL("DROP TABLE tempstat;");
+ } catch(SQLiteException ex) {
+ execSQL("DROP TABLE IF EXISTS tempstat;");
+ throw ex;
+ }
+ return new SQLiteQueryStats(totalPayload, largestIndividualPayload);
+ }
+
+ /**
+ * Runs the provided SQL and returns a {@link Cursor} over the result set.
+ *
+ * @param sql the SQL query. The SQL string must not be ; terminated
+ * @param args You may include ?s in where clause in the query,
+ * which will be replaced by the values from args. The
+ * values will be bound by their type.
+ *
+ * @return A {@link Cursor} object, which is positioned before the first entry. Note that
+ * {@link Cursor}s are not synchronized, see the documentation for more details.
+ *
+ * @throws SQLiteException if there is an issue executing the sql or the SQL string is invalid
+ * @throws IllegalStateException if the database is not open
+ */
+ public Cursor rawQuery(String sql, Object[] args) {
+ if (!isOpen()) {
+ throw new IllegalStateException("database not open");
+ }
+ long timeStart = 0;
+ if (Config.LOGV || mSlowQueryThreshold != -1) {
+ timeStart = System.currentTimeMillis();
+ }
+ SQLiteDirectCursorDriver driver = new SQLiteDirectCursorDriver(this, sql, null);
+ Cursor cursor = null;
+ try {
+ cursor = driver.query(mFactory, args);
+ } finally {
+ if (Config.LOGV || mSlowQueryThreshold != -1) {
+ // Force query execution
+ int count = -1;
+ if (cursor != null) {
+ count = cursor.getCount();
+ }
+
+ long duration = System.currentTimeMillis() - timeStart;
+
+ if (BuildConfig.DEBUG || duration >= mSlowQueryThreshold) {
+ Log.v(TAG,
+ "query (" + duration + " ms): " + driver.toString() +
+ ", args are , count is " + count);
+ }
+ }
+ }
+ return new CrossProcessCursorWrapper(cursor);
+ }
+
+ /**
+ * Runs the provided SQL and returns a cursor over the result set.
+ *
+ * @param cursorFactory the cursor factory to use, or null for the default factory
+ * @param sql the SQL query. The SQL string must not be ; terminated
+ * @param selectionArgs You may include ?s in where clause in the query,
+ * which will be replaced by the values from selectionArgs. The
+ * values will be bound as Strings.
+ * @param editTable the name of the first table, which is editable
+ *
+ * @return A {@link Cursor} object, which is positioned before the first entry. Note that
+ * {@link Cursor}s are not synchronized, see the documentation for more details.
+ *
+ * @throws SQLiteException if there is an issue executing the sql or the SQL string is invalid
+ * @throws IllegalStateException if the database is not open
+ */
+ public Cursor rawQueryWithFactory(
+ CursorFactory cursorFactory, String sql, String[] selectionArgs,
+ String editTable) {
+ if (!isOpen()) {
+ throw new IllegalStateException("database not open");
+ }
+ long timeStart = 0;
+
+ if (Config.LOGV || mSlowQueryThreshold != -1) {
+ timeStart = System.currentTimeMillis();
+ }
+
+ SQLiteCursorDriver driver = new SQLiteDirectCursorDriver(this, sql, editTable);
+
+ Cursor cursor = null;
+ try {
+ cursor = driver.query(
+ cursorFactory != null ? cursorFactory : mFactory,
+ selectionArgs);
+ } finally {
+ if (Config.LOGV || mSlowQueryThreshold != -1) {
+
+ // Force query execution
+ int count = -1;
+ if (cursor != null) {
+ count = cursor.getCount();
+ }
+
+ long duration = System.currentTimeMillis() - timeStart;
+
+ if (BuildConfig.DEBUG || duration >= mSlowQueryThreshold) {
+ Log.v(TAG,
+ "query (" + duration + " ms): " + driver.toString() +
+ ", args are , count is " + count);
+ }
+ }
+ }
+ return new CrossProcessCursorWrapper(cursor);
+ }
+
+ /**
+ * Runs the provided SQL and returns a cursor over the result set.
+ * The cursor will read an initial set of rows and the return to the caller.
+ * It will continue to read in batches and send data changed notifications
+ * when the later batches are ready.
+ * @param sql the SQL query. The SQL string must not be ; terminated
+ * @param selectionArgs You may include ?s in where clause in the query,
+ * which will be replaced by the values from selectionArgs. The
+ * values will be bound as Strings.
+ * @param initialRead set the initial count of items to read from the cursor
+ * @param maxRead set the count of items to read on each iteration after the first
+ * @return A {@link Cursor} object, which is positioned before the first entry. Note that
+ * {@link Cursor}s are not synchronized, see the documentation for more details.
+ *
+ * This work is incomplete and not fully tested or reviewed, so currently
+ * hidden.
+ * @hide
+ */
+ public Cursor rawQuery(String sql, String[] selectionArgs,
+ int initialRead, int maxRead) {
+ net.sqlcipher.CursorWrapper cursorWrapper = (net.sqlcipher.CursorWrapper)rawQueryWithFactory(null, sql, selectionArgs, null);
+ ((SQLiteCursor)cursorWrapper.getWrappedCursor()).setLoadStyle(initialRead, maxRead);
+ return cursorWrapper;
+ }
+
+ /**
+ * Convenience method for inserting a row into the database.
+ *
+ * @param table the table to insert the row into
+ * @param nullColumnHack SQL doesn't allow inserting a completely empty row,
+ * so if initialValues is empty this column will explicitly be
+ * assigned a NULL value
+ * @param values this map contains the initial column values for the
+ * row. The keys should be the column names and the values the
+ * column values
+ * @return the row ID of the newly inserted row, or -1 if an error occurred
+ */
+ public long insert(String table, String nullColumnHack, ContentValues values) {
+ try {
+ return insertWithOnConflict(table, nullColumnHack, values, CONFLICT_NONE);
+ } catch (SQLException e) {
+ if(BuildConfig.DEBUG){
+ Log.e(TAG, "Error inserting into " + table, e);
+ }
+ return -1;
+ }
+ }
+
+ /**
+ * Convenience method for inserting a row into the database.
+ *
+ * @param table the table to insert the row into
+ * @param nullColumnHack SQL doesn't allow inserting a completely empty row,
+ * so if initialValues is empty this column will explicitly be
+ * assigned a NULL value
+ * @param values this map contains the initial column values for the
+ * row. The keys should be the column names and the values the
+ * column values
+ * @throws SQLException
+ * @return the row ID of the newly inserted row, or -1 if an error occurred
+ */
+ public long insertOrThrow(String table, String nullColumnHack, ContentValues values)
+ throws SQLException {
+ return insertWithOnConflict(table, nullColumnHack, values, CONFLICT_NONE);
+ }
+
+ /**
+ * Convenience method for replacing a row in the database.
+ *
+ * @param table the table in which to replace the row
+ * @param nullColumnHack SQL doesn't allow inserting a completely empty row,
+ * so if initialValues is empty this row will explicitly be
+ * assigned a NULL value
+ * @param initialValues this map contains the initial column values for
+ * the row. The key
+ * @return the row ID of the newly inserted row, or -1 if an error occurred
+ */
+ public long replace(String table, String nullColumnHack, ContentValues initialValues) {
+ try {
+ return insertWithOnConflict(table, nullColumnHack, initialValues,
+ CONFLICT_REPLACE);
+ } catch (SQLException e) {
+ if(BuildConfig.DEBUG){
+ Log.e(TAG, "Error inserting into " + table, e);
+ }
+ return -1;
+ }
+ }
+
+ /**
+ * Convenience method for replacing a row in the database.
+ *
+ * @param table the table in which to replace the row
+ * @param nullColumnHack SQL doesn't allow inserting a completely empty row,
+ * so if initialValues is empty this row will explicitly be
+ * assigned a NULL value
+ * @param initialValues this map contains the initial column values for
+ * the row. The key
+ * @throws SQLException
+ * @return the row ID of the newly inserted row, or -1 if an error occurred
+ */
+ public long replaceOrThrow(String table, String nullColumnHack,
+ ContentValues initialValues) throws SQLException {
+ return insertWithOnConflict(table, nullColumnHack, initialValues,
+ CONFLICT_REPLACE);
+ }
+
+ /**
+ * General method for inserting a row into the database.
+ *
+ * @param table the table to insert the row into
+ * @param nullColumnHack SQL doesn't allow inserting a completely empty row,
+ * so if initialValues is empty this column will explicitly be
+ * assigned a NULL value
+ * @param initialValues this map contains the initial column values for the
+ * row. The keys should be the column names and the values the
+ * column values
+ * @param conflictAlgorithm for insert conflict resolver
+ *
+ * @return the row ID of the newly inserted row
+ * OR the primary key of the existing row if the input param 'conflictAlgorithm' =
+ * {@link #CONFLICT_IGNORE}
+ * OR -1 if any error
+ *
+ * @throws SQLException If the SQL string is invalid for some reason
+ * @throws IllegalStateException if the database is not open
+ */
+ public long insertWithOnConflict(String table, String nullColumnHack,
+ ContentValues initialValues, int conflictAlgorithm) {
+ if (!isOpen()) {
+ throw new IllegalStateException("database not open");
+ }
+
+ // Measurements show most sql lengths <= 152
+ StringBuilder sql = new StringBuilder(152);
+ sql.append("INSERT");
+ sql.append(CONFLICT_VALUES[conflictAlgorithm]);
+ sql.append(" INTO ");
+ sql.append(table);
+ // Measurements show most values lengths < 40
+ StringBuilder values = new StringBuilder(40);
+
+ Set> entrySet = null;
+ if (initialValues != null && initialValues.size() > 0) {
+ entrySet = initialValues.valueSet();
+ Iterator> entriesIter = entrySet.iterator();
+ sql.append('(');
+
+ boolean needSeparator = false;
+ while (entriesIter.hasNext()) {
+ if (needSeparator) {
+ sql.append(", ");
+ values.append(", ");
+ }
+ needSeparator = true;
+ Map.Entry entry = entriesIter.next();
+ sql.append(entry.getKey());
+ values.append('?');
+ }
+
+ sql.append(')');
+ } else {
+ sql.append("(" + nullColumnHack + ") ");
+ values.append("NULL");
+ }
+
+ sql.append(" VALUES(");
+ sql.append(values);
+ sql.append(");");
+
+ lock();
+ SQLiteStatement statement = null;
+ try {
+ statement = compileStatement(sql.toString());
+
+ // Bind the values
+ if (entrySet != null) {
+ int size = entrySet.size();
+ Iterator> entriesIter = entrySet.iterator();
+ for (int i = 0; i < size; i++) {
+ Map.Entry entry = entriesIter.next();
+ DatabaseUtils.bindObjectToProgram(statement, i + 1, entry.getValue());
+
+ }
+ }
+
+ // Run the program and then cleanup
+ statement.execute();
+
+ long insertedRowId = lastChangeCount() > 0 ? lastInsertRow() : -1;
+ if (insertedRowId == -1) {
+ if(BuildConfig.DEBUG){
+ Log.e(TAG, "Error inserting using into " + table);
+ }
+ } else {
+ if (BuildConfig.DEBUG && Log.isLoggable(TAG, Log.VERBOSE)) {
+ Log.v(TAG, "Inserting row " + insertedRowId +
+ " from using into " + table);
+ }
+ }
+ return insertedRowId;
+ } catch (SQLiteDatabaseCorruptException e) {
+ onCorruption();
+ throw e;
+ } finally {
+ if (statement != null) {
+ statement.close();
+ }
+ unlock();
+ }
+ }
+
+ /**
+ * Convenience method for deleting rows in the database.
+ *
+ * @param table the table to delete from
+ * @param whereClause the optional WHERE clause to apply when deleting.
+ * Passing null will delete all rows.
+ *
+ * @return the number of rows affected if a whereClause is passed in, 0
+ * otherwise. To remove all rows and get a count pass "1" as the
+ * whereClause.
+ *
+ * @throws SQLException If the SQL string is invalid for some reason
+ * @throws IllegalStateException if the database is not open
+ */
+ public int delete(String table, String whereClause, String[] whereArgs) {
+ Object[] args = new Object[whereArgs.length];
+
+ System.arraycopy(whereArgs, 0, args, 0, whereArgs.length);
+
+ return delete(table, whereClause, args);
+ }
+
+ /**
+ * Convenience method for deleting rows in the database.
+ *
+ * @param table the table to delete from
+ * @param whereClause the optional WHERE clause to apply when deleting.
+ * Passing null will delete all rows.
+ *
+ * @return the number of rows affected if a whereClause is passed in, 0
+ * otherwise. To remove all rows and get a count pass "1" as the
+ * whereClause.
+ *
+ * @throws SQLException If the SQL string is invalid for some reason
+ * @throws IllegalStateException if the database is not open
+ */
+ public int delete(String table, String whereClause, Object[] whereArgs) {
+ SQLiteStatement statement = null;
+ lock();
+ try {
+ if (!isOpen()) {
+ throw new IllegalStateException("database not open");
+ }
+ statement = compileStatement("DELETE FROM " + table
+ + (!TextUtils.isEmpty(whereClause)
+ ? " WHERE " + whereClause : ""));
+ if (whereArgs != null) {
+ int numArgs = whereArgs.length;
+ for (int i = 0; i < numArgs; i++) {
+ DatabaseUtils.bindObjectToProgram(statement, i + 1, whereArgs[i]);
+ }
+ }
+ statement.execute();
+ return lastChangeCount();
+ } catch (SQLiteDatabaseCorruptException e) {
+ onCorruption();
+ throw e;
+ } finally {
+ if (statement != null) {
+ statement.close();
+ }
+ unlock();
+ }
+ }
+
+ /**
+ * Convenience method for updating rows in the database.
+ *
+ * @param table the table to update in
+ * @param values a map from column names to new column values. null is a
+ * valid value that will be translated to NULL.
+ * @param whereClause the optional WHERE clause to apply when updating.
+ * Passing null will update all rows.
+ *
+ * @return the number of rows affected
+ *
+ * @throws SQLException If the SQL string is invalid for some reason
+ * @throws IllegalStateException if the database is not open
+ */
+ public int update(String table, ContentValues values, String whereClause, String[] whereArgs) {
+ return updateWithOnConflict(table, values, whereClause, whereArgs, CONFLICT_NONE);
+ }
+
+ /**
+ * Convenience method for updating rows in the database.
+ *
+ * @param table the table to update in
+ * @param values a map from column names to new column values. null is a
+ * valid value that will be translated to NULL.
+ * @param whereClause the optional WHERE clause to apply when updating.
+ * Passing null will update all rows.
+ * @param conflictAlgorithm for update conflict resolver
+ *
+ * @return the number of rows affected
+ *
+ * @throws SQLException If the SQL string is invalid for some reason
+ * @throws IllegalStateException if the database is not open
+ */
+ public int updateWithOnConflict(String table, ContentValues values,
+ String whereClause, String[] whereArgs, int conflictAlgorithm) {
+ if (values == null || values.size() == 0) {
+ throw new IllegalArgumentException("Empty values");
+ }
+
+ StringBuilder sql = new StringBuilder(120);
+ sql.append("UPDATE ");
+ sql.append(CONFLICT_VALUES[conflictAlgorithm]);
+ sql.append(table);
+ sql.append(" SET ");
+
+ Set> entrySet = values.valueSet();
+ Iterator> entriesIter = entrySet.iterator();
+
+ while (entriesIter.hasNext()) {
+ Map.Entry entry = entriesIter.next();
+ sql.append(entry.getKey());
+ sql.append("=?");
+ if (entriesIter.hasNext()) {
+ sql.append(", ");
+ }
+ }
+
+ if (!TextUtils.isEmpty(whereClause)) {
+ sql.append(" WHERE ");
+ sql.append(whereClause);
+ }
+ SQLiteStatement statement = null;
+ lock();
+ try {
+ if (!isOpen()) {
+ throw new IllegalStateException("database not open");
+ }
+ statement = compileStatement(sql.toString());
+
+ // Bind the values
+ int size = entrySet.size();
+ entriesIter = entrySet.iterator();
+ int bindArg = 1;
+ for (int i = 0; i < size; i++) {
+ Map.Entry entry = entriesIter.next();
+ DatabaseUtils.bindObjectToProgram(statement, bindArg, entry.getValue());
+ bindArg++;
+ }
+
+ if (whereArgs != null) {
+ size = whereArgs.length;
+ for (int i = 0; i < size; i++) {
+ statement.bindString(bindArg, whereArgs[i]);
+ bindArg++;
+ }
+ }
+
+ // Run the program and then cleanup
+ statement.execute();
+ int numChangedRows = lastChangeCount();
+ if (BuildConfig.DEBUG && Log.isLoggable(TAG, Log.VERBOSE)) {
+ Log.v(TAG, "Updated " + numChangedRows +
+ " rows using and for " + table);
+ }
+ return numChangedRows;
+ } catch (SQLiteDatabaseCorruptException e) {
+ onCorruption();
+ throw e;
+ } catch (SQLException e) {
+ if(BuildConfig.DEBUG){
+ Log.e(TAG, "Error updating using for " + table);
+ }
+ throw e;
+ } finally {
+ if (statement != null) {
+ statement.close();
+ }
+ unlock();
+ }
+ }
+
+ /**
+ * Execute a single SQL statement that is not a query. For example, CREATE
+ * TABLE, DELETE, INSERT, etc. Multiple statements separated by ;s are not
+ * supported. it takes a write lock
+ *
+ * @throws SQLException If the SQL string is invalid for some reason
+ * @throws IllegalStateException if the database is not open
+ */
+ public void execSQL(String sql) throws SQLException {
+ long timeStart = SystemClock.uptimeMillis();
+ lock();
+ try {
+ if (!isOpen()) {
+ throw new IllegalStateException("database not open");
+ }
+ native_execSQL(sql);
+ } catch (SQLiteDatabaseCorruptException e) {
+ onCorruption();
+ throw e;
+ } finally {
+ unlock();
+ }
+ }
+
+ public void rawExecSQL(String sql){
+ long timeStart = SystemClock.uptimeMillis();
+ lock();
+ try {
+ if (!isOpen()) {
+ throw new IllegalStateException("database not open");
+ }
+ native_rawExecSQL(sql);
+ } catch (SQLiteDatabaseCorruptException e) {
+ onCorruption();
+ throw e;
+ } finally {
+ unlock();
+ }
+ }
+
+ /**
+ * Execute a single SQL statement that is not a query. For example, CREATE
+ * TABLE, DELETE, INSERT, etc. Multiple statements separated by ;s are not
+ * supported. it takes a write lock,
+ *
+ * @param sql
+ * @param bindArgs only byte[], String, Long and Double are supported in bindArgs.
+ *
+ * @throws SQLException If the SQL string is invalid for some reason
+ * @throws IllegalStateException if the database is not open
+ */
+ public void execSQL(String sql, Object[] bindArgs) throws SQLException {
+ SQLiteStatement statement = null;
+ if (bindArgs == null) {
+ throw new IllegalArgumentException("Empty bindArgs");
+ }
+ long timeStart = SystemClock.uptimeMillis();
+ lock();
+ try {
+ if (!isOpen()) {
+ throw new IllegalStateException("database not open");
+ }
+ statement = compileStatement(sql);
+ if (bindArgs != null) {
+ int numArgs = bindArgs.length;
+ for (int i = 0; i < numArgs; i++) {
+ DatabaseUtils.bindObjectToProgram(statement, i + 1, bindArgs[i]);
+ }
+ }
+ statement.execute();
+ } catch (SQLiteDatabaseCorruptException e) {
+ onCorruption();
+ throw e;
+ } finally {
+ if (statement != null) {
+ statement.close();
+ }
+ unlock();
+ }
+ }
+
+ @Override
+ protected void finalize() {
+ if (isOpen()) {
+ if(BuildConfig.DEBUG){
+ Log.e(TAG, "close() was never explicitly called on database '" +
+ mPath + "' ", mStackTrace);
+ }
+ closeClosable();
+ onAllReferencesReleased();
+ }
+ }
+
+ /**
+ * Public constructor which attempts to open the database. See {@link #create} and {@link #openDatabase}.
+ *
+ *
Sets the locale of the database to the system's current locale.
+ * Call {@link #setLocale} if you would like something else.
+ *
+ * @param path The full path to the database
+ * @param password to use to open and/or create a database file (char array)
+ * @param factory The factory to use when creating cursors, may be NULL.
+ * @param flags 0 or {@link #NO_LOCALIZED_COLLATORS}. If the database file already
+ * exists, mFlags will be updated appropriately.
+ *
+ * @throws SQLiteException if the database cannot be opened
+ * @throws IllegalArgumentException if the database path is null
+ */
+ public SQLiteDatabase(String path, char[] password, CursorFactory factory, int flags) {
+ this(path, factory, flags, null);
+ this.openDatabaseInternal(password, null);
+ }
+
+ /**
+ * Public constructor which attempts to open the database. See {@link #create} and {@link #openDatabase}.
+ *
+ *
Sets the locale of the database to the system's current locale.
+ * Call {@link #setLocale} if you would like something else.
+ *
+ * @param path The full path to the database
+ * @param password to use to open and/or create a database file (char array)
+ * @param factory The factory to use when creating cursors, may be NULL.
+ * @param flags 0 or {@link #NO_LOCALIZED_COLLATORS}. If the database file already
+ * exists, mFlags will be updated appropriately.
+ * @param databaseHook to run on pre/post key events
+ *
+ * @throws SQLiteException if the database cannot be opened
+ * @throws IllegalArgumentException if the database path is null
+ */
+ public SQLiteDatabase(String path, char[] password, CursorFactory factory, int flags, SQLiteDatabaseHook databaseHook) {
+ this(path, factory, flags, null);
+ this.openDatabaseInternal(password, databaseHook);
+ }
+
+ public SQLiteDatabase(String path, byte[] password, CursorFactory factory, int flags, SQLiteDatabaseHook databaseHook) {
+ this(path, factory, flags, null);
+ this.openDatabaseInternal(password, databaseHook);
+ }
+
+ /**
+ * Private constructor (without database password) which DOES NOT attempt to open the database.
+ *
+ * @param path The full path to the database
+ * @param factory The factory to use when creating cursors, may be NULL.
+ * @param flags to control database access mode and other options
+ * @param errorHandler The {@link DatabaseErrorHandler} to be used when sqlite reports database
+ * corruption (or null for default).
+ *
+ * @throws IllegalArgumentException if the database path is null
+ */
+ private SQLiteDatabase(String path, CursorFactory factory, int flags, DatabaseErrorHandler errorHandler) {
+ if (path == null) {
+ throw new IllegalArgumentException("path should not be null");
+ }
+
+ mFlags = flags;
+ mPath = path;
+
+ mSlowQueryThreshold = -1;//SystemProperties.getInt(LOG_SLOW_QUERIES_PROPERTY, -1);
+ mStackTrace = new DatabaseObjectNotClosedException().fillInStackTrace();
+ mFactory = factory;
+ mPrograms = new WeakHashMap();
+
+ mErrorHandler = errorHandler;
+ }
+
+ private void openDatabaseInternal(final char[] password, SQLiteDatabaseHook hook) {
+ final byte[] keyMaterial = getBytes(password);
+ openDatabaseInternal(keyMaterial, hook);
+ }
+
+ private void openDatabaseInternal(final byte[] password, SQLiteDatabaseHook hook) {
+ boolean shouldCloseConnection = true;
+ dbopen(mPath, mFlags);
+ try {
+ keyDatabase(hook, new Runnable() {
+ public void run() {
+ if(password != null && password.length > 0) {
+ key(password);
+ }
+ }
+ });
+ shouldCloseConnection = false;
+
+ } catch(RuntimeException ex) {
+
+ final char[] keyMaterial = getChars(password);
+ if(containsNull(keyMaterial)) {
+ keyDatabase(hook, new Runnable() {
+ public void run() {
+ if(password != null) {
+ key_mutf8(keyMaterial);
+ }
+ }
+ });
+ if(password != null && password.length > 0) {
+ rekey(password);
+ }
+ shouldCloseConnection = false;
+ } else {
+ throw ex;
+ }
+ if(keyMaterial != null && keyMaterial.length > 0) {
+ Arrays.fill(keyMaterial, (char)0);
+ }
+
+ } finally {
+ if(shouldCloseConnection) {
+ dbclose();
+ if (SQLiteDebug.DEBUG_SQL_CACHE) {
+ mTimeClosed = getTime();
+ }
+ }
+ }
+
+ }
+
+ private boolean containsNull(char[] data) {
+ char defaultValue = '\u0000';
+ boolean status = false;
+ if(data != null && data.length > 0) {
+ for(char datum : data) {
+ if(datum == defaultValue) {
+ status = true;
+ break;
+ }
+ }
+ }
+ return status;
+ }
+
+ private void keyDatabase(SQLiteDatabaseHook databaseHook, Runnable keyOperation) {
+ if(databaseHook != null) {
+ databaseHook.preKey(this);
+ }
+ if(keyOperation != null){
+ keyOperation.run();
+ }
+ if(databaseHook != null){
+ databaseHook.postKey(this);
+ }
+ if (SQLiteDebug.DEBUG_SQL_CACHE) {
+ mTimeOpened = getTime();
+ }
+ try {
+ Cursor cursor = rawQuery("select count(*) from sqlite_master;", new String[]{});
+ if(cursor != null){
+ cursor.moveToFirst();
+ int count = cursor.getInt(0);
+ cursor.close();
+ }
+ } catch (RuntimeException e) {
+ if(BuildConfig.DEBUG){
+ Log.e(TAG, e.getMessage(), e);
+ }
+ throw e;
+ }
+ }
+
+ private String getTime() {
+ return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS ", Locale.US).format(System.currentTimeMillis());
+ }
+
+ /**
+ * return whether the DB is opened as read only.
+ * @return true if DB is opened as read only
+ */
+ public boolean isReadOnly() {
+ return (mFlags & OPEN_READ_MASK) == OPEN_READONLY;
+ }
+
+ /**
+ * @return true if the DB is currently open (has not been closed)
+ */
+ public boolean isOpen() {
+ return mNativeHandle != 0;
+ }
+
+ public boolean needUpgrade(int newVersion) {
+ /* NOTE: getVersion() will throw if database is not open. */
+ return newVersion > getVersion();
+ }
+
+ /**
+ * Getter for the path to the database file.
+ *
+ * @return the path to our database file.
+ */
+ public final String getPath() {
+ return mPath;
+ }
+
+ /**
+ * Removes email addresses from database filenames before they're
+ * logged to the EventLog where otherwise apps could potentially
+ * read them.
+ */
+ private String getPathForLogs() {
+ if (mPathForLogs != null) {
+ return mPathForLogs;
+ }
+ if (mPath == null) {
+ return null;
+ }
+ if (mPath.indexOf('@') == -1) {
+ mPathForLogs = mPath;
+ } else {
+ mPathForLogs = EMAIL_IN_DB_PATTERN.matcher(mPath).replaceAll("XX@YY");
+ }
+ return mPathForLogs;
+ }
+
+ /**
+ * Sets the locale for this database. Does nothing if this database has
+ * the NO_LOCALIZED_COLLATORS flag set or was opened read only.
+ *
+ * @throws SQLException if the locale could not be set. The most common reason
+ * for this is that there is no collator available for the locale you requested.
+ * In this case the database remains unchanged.
+ */
+ public void setLocale(Locale locale) {
+ lock();
+ try {
+ native_setLocale(locale.toString(), mFlags);
+ } finally {
+ unlock();
+ }
+ }
+
+ /*
+ * ============================================================================
+ *
+ * The following methods deal with compiled-sql cache
+ * ============================================================================
+ */
+ /**
+ * adds the given sql and its compiled-statement-id-returned-by-sqlite to the
+ * cache of compiledQueries attached to 'this'.
+ *
+ * if there is already a {@link SQLiteCompiledSql} in compiledQueries for the given sql,
+ * the new {@link SQLiteCompiledSql} object is NOT inserted into the cache (i.e.,the current
+ * mapping is NOT replaced with the new mapping).
+ */
+ /* package */ void addToCompiledQueries(String sql, SQLiteCompiledSql compiledStatement) {
+ if (mMaxSqlCacheSize == 0) {
+ // for this database, there is no cache of compiled sql.
+ if (SQLiteDebug.DEBUG_SQL_CACHE && BuildConfig.DEBUG) {
+ Log.v(TAG, "|NOT adding_sql_to_cache|" + getPath() + "|" + sql);
+ }
+ return;
+ }
+
+ SQLiteCompiledSql compiledSql = null;
+ synchronized(mCompiledQueries) {
+ // don't insert the new mapping if a mapping already exists
+ compiledSql = mCompiledQueries.get(sql);
+ if (compiledSql != null) {
+ return;
+ }
+ // add this to the cache
+ if (mCompiledQueries.size() == mMaxSqlCacheSize) {
+ /*
+ * cache size of {@link #mMaxSqlCacheSize} is not enough for this app.
+ * log a warning MAX_WARNINGS_ON_CACHESIZE_CONDITION times
+ * chances are it is NOT using ? for bindargs - so caching is useless.
+ * TODO: either let the callers set max cchesize for their app, or intelligently
+ * figure out what should be cached for a given app.
+ */
+ if (++mCacheFullWarnings == MAX_WARNINGS_ON_CACHESIZE_CONDITION && BuildConfig.DEBUG) {
+ Log.w(TAG, "Reached MAX size for compiled-sql statement cache for database " +
+ getPath() + "; i.e., NO space for this sql statement in cache: " +
+ sql + ". Please change your sql statements to use '?' for " +
+ "bindargs, instead of using actual values");
+ }
+ // don't add this entry to cache
+ } else {
+ // cache is NOT full. add this to cache.
+ mCompiledQueries.put(sql, compiledStatement);
+ if (SQLiteDebug.DEBUG_SQL_CACHE && BuildConfig.DEBUG) {
+ Log.v(TAG, "|adding_sql_to_cache|" + getPath() + "|" +
+ mCompiledQueries.size() + "|" + sql);
+ }
+ }
+ }
+ return;
+ }
+
+
+ private void deallocCachedSqlStatements() {
+ synchronized (mCompiledQueries) {
+ for (SQLiteCompiledSql compiledSql : mCompiledQueries.values()) {
+ compiledSql.releaseSqlStatement();
+ }
+ mCompiledQueries.clear();
+ }
+ }
+
+ /**
+ * from the compiledQueries cache, returns the compiled-statement-id for the given sql.
+ * returns null, if not found in the cache.
+ */
+ /* package */ SQLiteCompiledSql getCompiledStatementForSql(String sql) {
+ SQLiteCompiledSql compiledStatement = null;
+ boolean cacheHit;
+ synchronized(mCompiledQueries) {
+ if (mMaxSqlCacheSize == 0) {
+ // for this database, there is no cache of compiled sql.
+ if (SQLiteDebug.DEBUG_SQL_CACHE && BuildConfig.DEBUG) {
+ Log.v(TAG, "|cache NOT found|" + getPath());
+ }
+ return null;
+ }
+ cacheHit = (compiledStatement = mCompiledQueries.get(sql)) != null;
+ }
+ if (cacheHit) {
+ mNumCacheHits++;
+ } else {
+ mNumCacheMisses++;
+ }
+
+ if (SQLiteDebug.DEBUG_SQL_CACHE && BuildConfig.DEBUG) {
+ Log.v(TAG, "|cache_stats|" +
+ getPath() + "|" + mCompiledQueries.size() +
+ "|" + mNumCacheHits + "|" + mNumCacheMisses +
+ "|" + cacheHit + "|" + mTimeOpened + "|" + mTimeClosed + "|" + sql);
+ }
+ return compiledStatement;
+ }
+
+ /**
+ * returns true if the given sql is cached in compiled-sql cache.
+ * @hide
+ */
+ public boolean isInCompiledSqlCache(String sql) {
+ synchronized(mCompiledQueries) {
+ return mCompiledQueries.containsKey(sql);
+ }
+ }
+
+ /**
+ * purges the given sql from the compiled-sql cache.
+ * @hide
+ */
+ public void purgeFromCompiledSqlCache(String sql) {
+ synchronized(mCompiledQueries) {
+ mCompiledQueries.remove(sql);
+ }
+ }
+
+ /**
+ * remove everything from the compiled sql cache
+ * @hide
+ */
+ public void resetCompiledSqlCache() {
+ synchronized(mCompiledQueries) {
+ mCompiledQueries.clear();
+ }
+ }
+
+ /**
+ * return the current maxCacheSqlCacheSize
+ * @hide
+ */
+ public synchronized int getMaxSqlCacheSize() {
+ return mMaxSqlCacheSize;
+ }
+
+ /**
+ * set the max size of the compiled sql cache for this database after purging the cache.
+ * (size of the cache = number of compiled-sql-statements stored in the cache).
+ *
+ * max cache size can ONLY be increased from its current size (default = 0).
+ * if this method is called with smaller size than the current value of mMaxSqlCacheSize,
+ * then IllegalStateException is thrown
+ *
+ * synchronized because we don't want t threads to change cache size at the same time.
+ * @param cacheSize the size of the cache. can be (0 to MAX_SQL_CACHE_SIZE)
+ * @throws IllegalStateException if input cacheSize > MAX_SQL_CACHE_SIZE or < 0 or
+ * < the value set with previous setMaxSqlCacheSize() call.
+ *
+ * @hide
+ */
+ public synchronized void setMaxSqlCacheSize(int cacheSize) {
+ if (cacheSize > MAX_SQL_CACHE_SIZE || cacheSize < 0) {
+ throw new IllegalStateException("expected value between 0 and " + MAX_SQL_CACHE_SIZE);
+ } else if (cacheSize < mMaxSqlCacheSize) {
+ throw new IllegalStateException("cannot set cacheSize to a value less than the value " +
+ "set with previous setMaxSqlCacheSize() call.");
+ }
+ mMaxSqlCacheSize = cacheSize;
+ }
+
+ public static byte[] getBytes(char[] data) {
+ if(data == null || data.length == 0) return null;
+ CharBuffer charBuffer = CharBuffer.wrap(data);
+ ByteBuffer byteBuffer = Charset.forName(KEY_ENCODING).encode(charBuffer);
+ byte[] result = new byte[byteBuffer.limit()];
+ byteBuffer.get(result);
+ return result;
+ }
+
+ public static char[] getChars(byte[] data){
+ if(data == null || data.length == 0) return null;
+ ByteBuffer byteBuffer = ByteBuffer.wrap(data);
+ CharBuffer charBuffer = Charset.forName(KEY_ENCODING).decode(byteBuffer);
+ char[] result = new char[charBuffer.limit()];
+ charBuffer.get(result);
+ return result;
+ }
+
+ /* begin SQLiteSupportDatabase methods */
+
+ @Override
+ public android.database.Cursor query(String query) {
+ return rawQuery(query, null);
+ }
+
+ @Override
+ public android.database.Cursor query(String query, Object[] bindArgs) {
+ return rawQuery(query, bindArgs);
+ }
+
+ @Override
+ public android.database.Cursor query(SupportSQLiteQuery query) {
+ return query(query, null);
+ }
+
+ @Override
+ public android.database.Cursor query(final SupportSQLiteQuery supportQuery,
+ CancellationSignal cancellationSignal) {
+ BindingsRecorder hack=new BindingsRecorder();
+
+ supportQuery.bindTo(hack);
+
+ return rawQuery(supportQuery.getSql(), hack.getBindings());
+ }
+
+ @Override
+ public long insert(String table, int conflictAlgorithm,
+ ContentValues values)
+ throws android.database.SQLException {
+ return insertWithOnConflict(table, null, values, conflictAlgorithm);
+ }
+
+ @Override
+ public int update(String table, int conflictAlgorithm, ContentValues values,
+ String whereClause, Object[] whereArgs) {
+ String[] args = new String[whereArgs.length];
+
+ for (int i = 0; i < whereArgs.length; i++) {
+ args[i] = whereArgs[i].toString();
+ }
+
+ return updateWithOnConflict(table, values, whereClause, args, conflictAlgorithm);
+ }
+
+ @Override
+ public void beginTransactionWithListener(
+ final android.database.sqlite.SQLiteTransactionListener transactionListener) {
+ beginTransactionWithListener(new SQLiteTransactionListener() {
+ @Override
+ public void onBegin() {
+ transactionListener.onBegin();
+ }
+
+ @Override
+ public void onCommit() {
+ transactionListener.onCommit();
+ }
+
+ @Override
+ public void onRollback() {
+ transactionListener.onRollback();
+ }
+ });
+ }
+
+ @Override
+ public void beginTransactionWithListenerNonExclusive(
+ final android.database.sqlite.SQLiteTransactionListener transactionListener) {
+ beginTransactionWithListenerNonExclusive(
+ new SQLiteTransactionListener() {
+ @Override
+ public void onBegin() {
+ transactionListener.onBegin();
+ }
+
+ @Override
+ public void onCommit() {
+ transactionListener.onCommit();
+ }
+
+ @Override
+ public void onRollback() {
+ transactionListener.onRollback();
+ }
+ });
+ }
+
+ /* end SQLiteSupportDatabase methods */
+
+ private void beginTransactionWithListenerInternal(SQLiteTransactionListener transactionListener,
+ SQLiteDatabaseTransactionType transactionType) {
+ lockForced();
+ if (!isOpen()) {
+ throw new IllegalStateException("database not open");
+ }
+ boolean ok = false;
+ try {
+ // If this thread already had the lock then get out
+ if (mLock.getHoldCount() > 1) {
+ if (mInnerTransactionIsSuccessful) {
+ String msg = "Cannot call beginTransaction between "
+ + "calling setTransactionSuccessful and endTransaction";
+ IllegalStateException e = new IllegalStateException(msg);
+ if(BuildConfig.DEBUG){
+ Log.e(TAG, "beginTransaction() failed", e);
+ }
+ throw e;
+ }
+ ok = true;
+ return;
+ }
+ // This thread didn't already have the lock, so begin a database
+ // transaction now.
+ if(transactionType == SQLiteDatabaseTransactionType.Exclusive) {
+ execSQL("BEGIN EXCLUSIVE;");
+ } else if(transactionType == SQLiteDatabaseTransactionType.Immediate) {
+ execSQL("BEGIN IMMEDIATE;");
+ } else if(transactionType == SQLiteDatabaseTransactionType.Deferred) {
+ execSQL("BEGIN DEFERRED;");
+ } else {
+ String message = String.format("%s is an unsupported transaction type",
+ transactionType);
+ throw new IllegalArgumentException(message);
+ }
+ mTransactionListener = transactionListener;
+ mTransactionIsSuccessful = true;
+ mInnerTransactionIsSuccessful = false;
+ if (transactionListener != null) {
+ try {
+ transactionListener.onBegin();
+ } catch (RuntimeException e) {
+ execSQL("ROLLBACK;");
+ throw e;
+ }
+ }
+ ok = true;
+ } finally {
+ if (!ok) {
+ // beginTransaction is called before the try block so we must release the lock in
+ // the case of failure.
+ unlockForced();
+ }
+ }
+ }
+
+ /**
+ * this method is used to collect data about ALL open databases in the current process.
+ * bugreport is a user of this data.
+ */
+ /* package */ static ArrayList getDbStats() {
+ ArrayList dbStatsList = new ArrayList();
+
+ for (SQLiteDatabase db : getActiveDatabases()) {
+ if (db == null || !db.isOpen()) {
+ continue;
+ }
+
+ // get SQLITE_DBSTATUS_LOOKASIDE_USED for the db
+ int lookasideUsed = db.native_getDbLookaside();
+
+ // get the lastnode of the dbname
+ String path = db.getPath();
+ int indx = path.lastIndexOf("/");
+ String lastnode = path.substring((indx != -1) ? ++indx : 0);
+
+ // get list of attached dbs and for each db, get its size and pagesize
+ ArrayList> attachedDbs = getAttachedDbs(db);
+ if (attachedDbs == null) {
+ continue;
+ }
+ for (int i = 0; i < attachedDbs.size(); i++) {
+ Pair p = attachedDbs.get(i);
+ long pageCount = getPragmaVal(db, p.first + ".page_count;");
+
+ // first entry in the attached db list is always the main database
+ // don't worry about prefixing the dbname with "main"
+ String dbName;
+ if (i == 0) {
+ dbName = lastnode;
+ } else {
+ // lookaside is only relevant for the main db
+ lookasideUsed = 0;
+ dbName = " (attached) " + p.first;
+ // if the attached db has a path, attach the lastnode from the path to above
+ if (p.second.trim().length() > 0) {
+ int idx = p.second.lastIndexOf("/");
+ dbName += " : " + p.second.substring((idx != -1) ? ++idx : 0);
+ }
+ }
+ if (pageCount > 0) {
+ dbStatsList.add(new DbStats(dbName, pageCount, db.getPageSize(),
+ lookasideUsed));
+ }
+ }
+ }
+ return dbStatsList;
+ }
+
+ private static ArrayList getActiveDatabases() {
+ ArrayList databases = new ArrayList();
+ synchronized (sActiveDatabases) {
+ databases.addAll(sActiveDatabases.keySet());
+ }
+ return databases;
+ }
+
+ /**
+ * get the specified pragma value from sqlite for the specified database.
+ * only handles pragma's that return int/long.
+ * NO JAVA locks are held in this method.
+ * TODO: use this to do all pragma's in this class
+ */
+ private static long getPragmaVal(SQLiteDatabase db, String pragma) {
+ if (!db.isOpen()) {
+ return 0;
+ }
+ SQLiteStatement prog = null;
+ try {
+ prog = new SQLiteStatement(db, "PRAGMA " + pragma);
+ long val = prog.simpleQueryForLong();
+ return val;
+ } finally {
+ if (prog != null) prog.close();
+ }
+ }
+
+ /**
+ * returns list of full pathnames of all attached databases
+ * including the main database
+ * TODO: move this to {@link DatabaseUtils}
+ */
+ private static ArrayList> getAttachedDbs(SQLiteDatabase dbObj) {
+ if (!dbObj.isOpen()) {
+ return null;
+ }
+ ArrayList> attachedDbs = new ArrayList>();
+ Cursor c = dbObj.rawQuery("pragma database_list;", null);
+ while (c.moveToNext()) {
+ attachedDbs.add(new Pair(c.getString(1), c.getString(2)));
+ }
+ c.close();
+ return attachedDbs;
+ }
+
+ private Pair getResultFromPragma(String command) {
+ Cursor cursor = rawQuery(command, new Object[]{});
+ if(cursor == null) return new Pair(false, "");
+ cursor.moveToFirst();
+ String value = cursor.getString(0);
+ cursor.close();
+ return new Pair(true, value);
+ }
+
+
+ /**
+ * Sets the root directory to search for the ICU data file
+ */
+ public static native void setICURoot(String path);
+
+ /**
+ * Native call to open the database.
+ *
+ * @param path The full path to the database
+ */
+ private native void dbopen(String path, int flags);
+
+ /**
+ * Native call to setup tracing of all sql statements
+ *
+ * @param path the full path to the database
+ */
+ private native void enableSqlTracing(String path);
+
+ /**
+ * Native call to setup profiling of all sql statements.
+ * currently, sqlite's profiling = printing of execution-time
+ * (wall-clock time) of each of the sql statements, as they
+ * are executed.
+ *
+ * @param path the full path to the database
+ */
+ private native void enableSqlProfiling(String path);
+
+ /**
+ * Native call to execute a raw SQL statement. {@link #lock} must be held
+ * when calling this method.
+ *
+ * @param sql The raw SQL string
+ *
+ * @throws SQLException
+ */
+ /* package */ native void native_execSQL(String sql) throws SQLException;
+
+ /**
+ * Native call to set the locale. {@link #lock} must be held when calling
+ * this method.
+ *
+ * @throws SQLException
+ */
+ /* package */ native void native_setLocale(String loc, int flags);
+
+ /**
+ * Returns the row ID of the last row inserted into the database.
+ *
+ * @return the row ID of the last row inserted into the database.
+ */
+ /* package */ native long lastInsertRow();
+
+ /**
+ * Returns the number of changes made in the last statement executed.
+ *
+ * @return the number of changes made in the last statement executed.
+ */
+ /* package */ native int lastChangeCount();
+
+ /**
+ * return the SQLITE_DBSTATUS_LOOKASIDE_USED documented here
+ * http://www.sqlite.org/c3ref/c_dbstatus_lookaside_used.html
+ * @return int value of SQLITE_DBSTATUS_LOOKASIDE_USED
+ */
+ private native int native_getDbLookaside();
+
+ private native void native_rawExecSQL(String sql);
+
+ private native int native_status(int operation, boolean reset);
+
+ private native void key(byte[] key) throws SQLException;
+ private native void key_mutf8(char[] key) throws SQLException;
+ private native void rekey(byte[] key) throws SQLException;
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteDatabaseCorruptException.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteDatabaseCorruptException.java
new file mode 100644
index 0000000..2e7373c
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteDatabaseCorruptException.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher.database;
+
+/**
+ * An exception that indicates that the SQLite database file is corrupt.
+ */
+public class SQLiteDatabaseCorruptException extends SQLiteException {
+ public SQLiteDatabaseCorruptException() {}
+
+ public SQLiteDatabaseCorruptException(String error) {
+ super(error);
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteDatabaseHook.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteDatabaseHook.java
new file mode 100644
index 0000000..a5014b1
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteDatabaseHook.java
@@ -0,0 +1,15 @@
+package net.sqlcipher.database;
+
+/**
+ * An interface to perform pre and post key operations against a database.
+ */
+public interface SQLiteDatabaseHook {
+ /**
+ * Called immediately before opening the database.
+ */
+ void preKey(SQLiteDatabase database);
+ /**
+ * Called immediately after opening the database.
+ */
+ void postKey(SQLiteDatabase database);
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteDebug.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteDebug.java
new file mode 100644
index 0000000..d90f017
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteDebug.java
@@ -0,0 +1,197 @@
+/*
+ * Copyright (C) 2007 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher.database;
+
+import java.util.ArrayList;
+
+import android.util.Log;
+
+/**
+ * Provides debugging info about all SQLite databases running in the current process.
+ *
+ * {@hide}
+ */
+public final class SQLiteDebug {
+ /**
+ * Controls the printing of SQL statements as they are executed.
+ */
+ public static final boolean DEBUG_SQL_STATEMENTS =
+ Log.isLoggable("SQLiteStatements", Log.VERBOSE);
+
+ /**
+ * Controls the printing of wall-clock time taken to execute SQL statements
+ * as they are executed.
+ */
+ public static final boolean DEBUG_SQL_TIME =
+ Log.isLoggable("SQLiteTime", Log.VERBOSE);
+
+ /**
+ * Controls the printing of compiled-sql-statement cache stats.
+ */
+ public static final boolean DEBUG_SQL_CACHE =
+ Log.isLoggable("SQLiteCompiledSql", Log.VERBOSE);
+
+ /**
+ * Controls the stack trace reporting of active cursors being
+ * finalized.
+ */
+ public static final boolean DEBUG_ACTIVE_CURSOR_FINALIZATION =
+ Log.isLoggable("SQLiteCursorClosing", Log.VERBOSE);
+
+ /**
+ * Controls the tracking of time spent holding the database lock.
+ */
+ public static final boolean DEBUG_LOCK_TIME_TRACKING =
+ Log.isLoggable("SQLiteLockTime", Log.VERBOSE);
+
+ /**
+ * Controls the printing of stack traces when tracking the time spent holding the database lock.
+ */
+ public static final boolean DEBUG_LOCK_TIME_TRACKING_STACK_TRACE =
+ Log.isLoggable("SQLiteLockStackTrace", Log.VERBOSE);
+
+ /**
+ * Contains statistics about the active pagers in the current process.
+ *
+ * @see #getPagerStats(PagerStats)
+ */
+ public static class PagerStats {
+ /** The total number of bytes in all pagers in the current process
+ * @deprecated not used any longer
+ */
+ @Deprecated
+ public long totalBytes;
+ /** The number of bytes in referenced pages in all pagers in the current process
+ * @deprecated not used any longer
+ * */
+ @Deprecated
+ public long referencedBytes;
+ /** The number of bytes in all database files opened in the current process
+ * @deprecated not used any longer
+ */
+ @Deprecated
+ public long databaseBytes;
+ /** The number of pagers opened in the current process
+ * @deprecated not used any longer
+ */
+ @Deprecated
+ public int numPagers;
+
+ /** the current amount of memory checked out by sqlite using sqlite3_malloc().
+ * documented at http://www.sqlite.org/c3ref/c_status_malloc_size.html
+ */
+ public int memoryUsed;
+
+ /** the number of bytes of page cache allocation which could not be sattisfied by the
+ * SQLITE_CONFIG_PAGECACHE buffer and where forced to overflow to sqlite3_malloc().
+ * The returned value includes allocations that overflowed because they where too large
+ * (they were larger than the "sz" parameter to SQLITE_CONFIG_PAGECACHE) and allocations
+ * that overflowed because no space was left in the page cache.
+ * documented at http://www.sqlite.org/c3ref/c_status_malloc_size.html
+ */
+ public int pageCacheOverflo;
+
+ /** records the largest memory allocation request handed to sqlite3.
+ * documented at http://www.sqlite.org/c3ref/c_status_malloc_size.html
+ */
+ public int largestMemAlloc;
+
+ /** a list of {@link DbStats} - one for each main database opened by the applications
+ * running on the android device
+ */
+ public ArrayList dbStats;
+ }
+
+ /**
+ * contains statistics about a database
+ */
+ public static class DbStats {
+ /** name of the database */
+ public String dbName;
+
+ /** the page size for the database */
+ public long pageSize;
+
+ /** the database size */
+ public long dbSize;
+
+ /** documented here http://www.sqlite.org/c3ref/c_dbstatus_lookaside_used.html */
+ public int lookaside;
+
+ public DbStats(String dbName, long pageCount, long pageSize, int lookaside) {
+ this.dbName = dbName;
+ this.pageSize = pageSize;
+ dbSize = (pageCount * pageSize) / 1024;
+ this.lookaside = lookaside;
+ }
+ }
+
+ /**
+ * return all pager and database stats for the current process.
+ * @return {@link PagerStats}
+ */
+ public static PagerStats getDatabaseInfo() {
+ PagerStats stats = new PagerStats();
+ getPagerStats(stats);
+ stats.dbStats = SQLiteDatabase.getDbStats();
+ return stats;
+ }
+
+ /**
+ * Gathers statistics about all pagers in the current process.
+ */
+ public static native void getPagerStats(PagerStats stats);
+
+ /**
+ * Returns the size of the SQLite heap.
+ * @return The size of the SQLite heap in bytes.
+ */
+ public static native long getHeapSize();
+
+ /**
+ * Returns the amount of allocated memory in the SQLite heap.
+ * @return The allocated size in bytes.
+ */
+ public static native long getHeapAllocatedSize();
+
+ /**
+ * Returns the amount of free memory in the SQLite heap.
+ * @return The freed size in bytes.
+ */
+ public static native long getHeapFreeSize();
+
+ /**
+ * Determines the number of dirty belonging to the SQLite
+ * heap segments of this process. pages[0] returns the number of
+ * shared pages, pages[1] returns the number of private pages
+ */
+ public static native void getHeapDirtyPages(int[] pages);
+
+ private static int sNumActiveCursorsFinalized = 0;
+
+ /**
+ * Returns the number of active cursors that have been finalized. This depends on the GC having
+ * run but is still useful for tests.
+ */
+ public static int getNumActiveCursorsFinalized() {
+ return sNumActiveCursorsFinalized;
+ }
+
+ static synchronized void notifyActiveCursorFinalized() {
+ sNumActiveCursorsFinalized++;
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteDirectCursorDriver.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteDirectCursorDriver.java
new file mode 100644
index 0000000..36ae59a
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteDirectCursorDriver.java
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2007 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher.database;
+
+import net.sqlcipher.Cursor;
+import net.sqlcipher.database.SQLiteDatabase.CursorFactory;
+
+/**
+ * A cursor driver that uses the given query directly.
+ *
+ * @hide
+ */
+public class SQLiteDirectCursorDriver implements SQLiteCursorDriver {
+ private String mEditTable;
+ private SQLiteDatabase mDatabase;
+ private Cursor mCursor;
+ private String mSql;
+ private SQLiteQuery mQuery;
+
+ public SQLiteDirectCursorDriver(SQLiteDatabase db, String sql, String editTable) {
+ mDatabase = db;
+ mEditTable = editTable;
+ mSql = sql;
+ }
+
+ public Cursor query(CursorFactory factory, Object[] args) {
+ SQLiteQuery query = new SQLiteQuery(mDatabase, mSql, 0, args);
+ try {
+ query.bindArguments(args);
+ if (factory == null) {
+ mCursor = new SQLiteCursor(mDatabase, this, mEditTable, query);
+ } else {
+ mCursor = factory.newCursor(mDatabase, this, mEditTable, query);
+ }
+ mQuery = query;
+ query = null;
+ return mCursor;
+ } finally {
+ // Make sure this object is cleaned up if something happens
+ if (query != null) query.close();
+ }
+ }
+
+ public Cursor query(CursorFactory factory, String[] selectionArgs) {
+ // Compile the query
+ SQLiteQuery query = new SQLiteQuery(mDatabase, mSql, 0, selectionArgs);
+
+ try {
+ // Arg binding
+ int numArgs = selectionArgs == null ? 0 : selectionArgs.length;
+ for (int i = 0; i < numArgs; i++) {
+ query.bindString(i + 1, selectionArgs[i]);
+ }
+
+ // Create the cursor
+ if (factory == null) {
+ mCursor = new SQLiteCursor(mDatabase, this, mEditTable, query);
+
+ } else {
+ mCursor = factory.newCursor(mDatabase, this, mEditTable, query);
+ }
+
+ mQuery = query;
+ query = null;
+ return mCursor;
+ } finally {
+ // Make sure this object is cleaned up if something happens
+ if (query != null) query.close();
+ }
+ }
+
+ public void cursorClosed() {
+ mCursor = null;
+ }
+
+ public void setBindArguments(String[] bindArgs) {
+ final int numArgs = bindArgs.length;
+ for (int i = 0; i < numArgs; i++) {
+ mQuery.bindString(i + 1, bindArgs[i]);
+ }
+ }
+
+ @Override
+ public void cursorDeactivated() {
+ // Do nothing
+ }
+
+ @Override
+ public void cursorRequeried(android.database.Cursor cursor) {
+ // Do nothing
+ }
+
+ @Override
+ public String toString() {
+ return "SQLiteDirectCursorDriver: " + mSql;
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteDiskIOException.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteDiskIOException.java
new file mode 100644
index 0000000..de4b543
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteDiskIOException.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher.database;
+
+/**
+ * An exception that indicates that an IO error occured while accessing the
+ * SQLite database file.
+ */
+public class SQLiteDiskIOException extends SQLiteException {
+ public SQLiteDiskIOException() {}
+
+ public SQLiteDiskIOException(String error) {
+ super(error);
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteDoneException.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteDoneException.java
new file mode 100644
index 0000000..f0f6f0d
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteDoneException.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher.database;
+
+/**
+ * An exception that indicates that the SQLite program is done.
+ * Thrown when an operation that expects a row (such as {@link
+ * SQLiteStatement#simpleQueryForString} or {@link
+ * SQLiteStatement#simpleQueryForLong}) does not get one.
+ */
+public class SQLiteDoneException extends SQLiteException {
+ public SQLiteDoneException() {}
+
+ public SQLiteDoneException(String error) {
+ super(error);
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteException.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteException.java
new file mode 100644
index 0000000..2c7f11a
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteException.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher.database;
+
+import net.sqlcipher.*;
+
+/**
+ * A SQLite exception that indicates there was an error with SQL parsing or execution.
+ */
+public class SQLiteException extends SQLException {
+ public SQLiteException() {}
+
+ public SQLiteException(String error) {
+ super(error);
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteFullException.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteFullException.java
new file mode 100644
index 0000000..66af19f
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteFullException.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher.database;
+
+/**
+ * An exception that indicates that the SQLite database is full.
+ */
+public class SQLiteFullException extends SQLiteException {
+ public SQLiteFullException() {}
+
+ public SQLiteFullException(String error) {
+ super(error);
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteMisuseException.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteMisuseException.java
new file mode 100644
index 0000000..ef261fc
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteMisuseException.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher.database;
+
+public class SQLiteMisuseException extends SQLiteException {
+ public SQLiteMisuseException() {}
+
+ public SQLiteMisuseException(String error) {
+ super(error);
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteOpenHelper.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteOpenHelper.java
new file mode 100644
index 0000000..259b5b5
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteOpenHelper.java
@@ -0,0 +1,399 @@
+/*
+ * Copyright (C) 2007 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher.database;
+
+import java.io.File;
+
+import android.content.Context;
+import net.sqlcipher.DatabaseErrorHandler;
+import net.sqlcipher.DefaultDatabaseErrorHandler;
+import net.sqlcipher.database.SQLiteDatabaseHook;
+import net.sqlcipher.database.SQLiteDatabase.CursorFactory;
+import android.util.Log;
+
+/**
+ * A helper class to manage database creation and version management.
+ * You create a subclass implementing {@link #onCreate}, {@link #onUpgrade} and
+ * optionally {@link #onOpen}, and this class takes care of opening the database
+ * if it exists, creating it if it does not, and upgrading it as necessary.
+ * Transactions are used to make sure the database is always in a sensible state.
+ *
For an example, see the NotePadProvider class in the NotePad sample application,
+ * in the samples/ directory of the SDK.
+ */
+public abstract class SQLiteOpenHelper {
+ private static final String TAG = SQLiteOpenHelper.class.getSimpleName();
+
+ private final Context mContext;
+ private final String mName;
+ private final CursorFactory mFactory;
+ private final int mNewVersion;
+ private final SQLiteDatabaseHook mHook;
+ private final DatabaseErrorHandler mErrorHandler;
+ private boolean mEnableWriteAheadLogging;
+ private boolean mDeferSetWriteAheadLoggingEnabled;
+
+ private SQLiteDatabase mDatabase = null;
+ private boolean mIsInitializing = false;
+
+ /**
+ * Create a helper object to create, open, and/or manage a database.
+ * This method always returns very quickly. The database is not actually
+ * created or opened until one of {@link #getWritableDatabase} or
+ * {@link #getReadableDatabase} is called.
+ *
+ * @param context to use to open or create the database
+ * @param name of the database file, or null for an in-memory database
+ * @param factory to use for creating cursor objects, or null for the default
+ * @param version number of the database (starting at 1); if the database is older,
+ * {@link #onUpgrade} will be used to upgrade the database
+ */
+ public SQLiteOpenHelper(Context context, String name, CursorFactory factory, int version) {
+ this(context, name, factory, version, null, new DefaultDatabaseErrorHandler());
+ }
+
+ /**
+ * Create a helper object to create, open, and/or manage a database.
+ * The database is not actually created or opened until one of
+ * {@link #getWritableDatabase} or {@link #getReadableDatabase} is called.
+ *
+ * @param context to use to open or create the database
+ * @param name of the database file, or null for an in-memory database
+ * @param factory to use for creating cursor objects, or null for the default
+ * @param version number of the database (starting at 1); if the database is older,
+ * {@link #onUpgrade} will be used to upgrade the database
+ * @param hook to run on pre/post key events
+ */
+ public SQLiteOpenHelper(Context context, String name, CursorFactory factory,
+ int version, SQLiteDatabaseHook hook) {
+ this(context, name, factory, version, hook, new DefaultDatabaseErrorHandler());
+ }
+
+ /**
+ * Create a helper object to create, open, and/or manage a database.
+ * The database is not actually created or opened until one of
+ * {@link #getWritableDatabase} or {@link #getReadableDatabase} is called.
+ *
+ *
Accepts input param: a concrete instance of {@link DatabaseErrorHandler} to be
+ * used to handle corruption when sqlite reports database corruption.
+ *
+ * @param context to use to open or create the database
+ * @param name of the database file, or null for an in-memory database
+ * @param factory to use for creating cursor objects, or null for the default
+ * @param version number of the database (starting at 1); if the database is older,
+ * {@link #onUpgrade} will be used to upgrade the database
+ * @param hook to run on pre/post key events
+ * @param errorHandler the {@link DatabaseErrorHandler} to be used when sqlite reports database
+ * corruption.
+ */
+ public SQLiteOpenHelper(Context context, String name, CursorFactory factory,
+ int version, SQLiteDatabaseHook hook, DatabaseErrorHandler errorHandler) {
+ if (version < 1) throw new IllegalArgumentException("Version must be >= 1, was " + version);
+ if (errorHandler == null) {
+ throw new IllegalArgumentException("DatabaseErrorHandler param value can't be null.");
+ }
+
+ mContext = context;
+ mName = name;
+ mFactory = factory;
+ mNewVersion = version;
+ mHook = hook;
+ mErrorHandler = errorHandler;
+ }
+
+ /**
+ * Create and/or open a database that will be used for reading and writing.
+ * Once opened successfully, the database is cached, so you can call this
+ * method every time you need to write to the database. Make sure to call
+ * {@link #close} when you no longer need it.
+ *
+ *
Errors such as bad permissions or a full disk may cause this operation
+ * to fail, but future attempts may succeed if the problem is fixed.
+ *
+ * @throws SQLiteException if the database cannot be opened for writing
+ * @return a read/write database object valid until {@link #close} is called
+ */
+
+ public synchronized SQLiteDatabase getWritableDatabase(String password) {
+ return getWritableDatabase(password == null ? null : password.toCharArray());
+ }
+
+ public synchronized SQLiteDatabase getWritableDatabase(char[] password) {
+ return getWritableDatabase(password == null ? null : SQLiteDatabase.getBytes(password));
+ }
+
+ public synchronized SQLiteDatabase getWritableDatabase(byte[] password) {
+ if (mDatabase != null && mDatabase.isOpen() && !mDatabase.isReadOnly()) {
+ return mDatabase; // The database is already open for business
+ }
+
+ if (mIsInitializing) {
+ throw new IllegalStateException("getWritableDatabase called recursively");
+ }
+
+ // If we have a read-only database open, someone could be using it
+ // (though they shouldn't), which would cause a lock to be held on
+ // the file, and our attempts to open the database read-write would
+ // fail waiting for the file lock. To prevent that, we acquire the
+ // lock on the read-only database, which shuts out other users.
+
+ boolean success = false;
+ SQLiteDatabase db = null;
+ if (mDatabase != null) mDatabase.lock();
+ try {
+ mIsInitializing = true;
+ if (mName == null) {
+ db = SQLiteDatabase.create(null, "");
+ } else {
+ String path = mContext.getDatabasePath(mName).getPath();
+ File dbPathFile = new File (path);
+ if (!dbPathFile.exists()) {
+ dbPathFile.getParentFile().mkdirs();
+ }
+ db = SQLiteDatabase.openOrCreateDatabase(path, password, mFactory, mHook, mErrorHandler);
+ }
+ if(mDeferSetWriteAheadLoggingEnabled) {
+ mEnableWriteAheadLogging = db.enableWriteAheadLogging();
+ }
+ onConfigure(db);
+ int version = db.getVersion();
+ if (version != mNewVersion) {
+ db.beginTransaction();
+ try {
+ if (version == 0) {
+ onCreate(db);
+ } else {
+ if(version > mNewVersion) {
+ onDowngrade(db, version, mNewVersion);
+ } else {
+ onUpgrade(db, version, mNewVersion);
+ }
+ }
+ db.setVersion(mNewVersion);
+ db.setTransactionSuccessful();
+ } finally {
+ db.endTransaction();
+ }
+ }
+
+ onOpen(db);
+ success = true;
+ return db;
+ } finally {
+ mIsInitializing = false;
+ if (success) {
+ if (mDatabase != null) {
+ try { mDatabase.close(); } catch (Exception e) { }
+ mDatabase.unlock();
+ }
+ mDatabase = db;
+ } else {
+ if (mDatabase != null) mDatabase.unlock();
+ if (db != null) db.close();
+ }
+ }
+ }
+
+ /**
+ * Create and/or open a database. This will be the same object returned by
+ * {@link #getWritableDatabase} unless some problem, such as a full disk,
+ * requires the database to be opened read-only. In that case, a read-only
+ * database object will be returned. If the problem is fixed, a future call
+ * to {@link #getWritableDatabase} may succeed, in which case the read-only
+ * database object will be closed and the read/write object will be returned
+ * in the future.
+ *
+ * @throws SQLiteException if the database cannot be opened
+ * @return a database object valid until {@link #getWritableDatabase}
+ * or {@link #close} is called.
+ */
+ public synchronized SQLiteDatabase getReadableDatabase(String password) {
+ return getReadableDatabase(password == null ? null : password.toCharArray());
+ }
+
+ public synchronized SQLiteDatabase getReadableDatabase(char[] password) {
+ return getReadableDatabase(password == null ? null : SQLiteDatabase.getBytes(password));
+ }
+
+ public synchronized SQLiteDatabase getReadableDatabase(byte[] password) {
+ if (mDatabase != null && mDatabase.isOpen()) {
+ return mDatabase; // The database is already open for business
+ }
+
+ if (mIsInitializing) {
+ throw new IllegalStateException("getReadableDatabase called recursively");
+ }
+
+ try {
+ return getWritableDatabase(password);
+ } catch (SQLiteException e) {
+ if (mName == null) throw e; // Can't open a temp database read-only!
+ Log.e(TAG, "Couldn't open " + mName + " for writing (will try read-only):", e);
+ }
+
+ SQLiteDatabase db = null;
+ try {
+ mIsInitializing = true;
+ String path = mContext.getDatabasePath(mName).getPath();
+ File databasePath = new File(path);
+ File databasesDirectory = new File(mContext.getDatabasePath(mName).getParent());
+
+ if(!databasesDirectory.exists()){
+ databasesDirectory.mkdirs();
+ }
+ if(!databasePath.exists()){
+ mIsInitializing = false;
+ db = getWritableDatabase(password);
+ mIsInitializing = true;
+ db.close();
+ }
+ db = SQLiteDatabase.openDatabase(path, password, mFactory, SQLiteDatabase.OPEN_READONLY, mHook, mErrorHandler);
+ if (db.getVersion() != mNewVersion) {
+ throw new SQLiteException("Can't upgrade read-only database from version " +
+ db.getVersion() + " to " + mNewVersion + ": " + path);
+ }
+
+ onOpen(db);
+ Log.w(TAG, "Opened " + mName + " in read-only mode");
+ mDatabase = db;
+ return mDatabase;
+ } finally {
+ mIsInitializing = false;
+ if (db != null && db != mDatabase) db.close();
+ }
+ }
+
+ /**
+ * Close any open database object.
+ */
+ public synchronized void close() {
+ if (mIsInitializing) throw new IllegalStateException("Closed during initialization");
+
+ if (mDatabase != null && mDatabase.isOpen()) {
+ mDatabase.close();
+ mDatabase = null;
+ }
+ }
+
+ /**
+ * Return the name of the SQLite database being opened, as given to
+ * the constructor.
+ */
+ public String getDatabaseName() {
+ return mName;
+ }
+
+ /**
+ * Enables or disables the use of write-ahead logging for the database.
+ *
+ * Write-ahead logging cannot be used with read-only databases so the value of
+ * this flag is ignored if the database is opened read-only.
+ *
+ * @param enabled True if write-ahead logging should be enabled, false if it
+ * should be disabled.
+ *
+ * @see SQLiteDatabase#enableWriteAheadLogging()
+ */
+ public void setWriteAheadLoggingEnabled(boolean enabled) {
+ synchronized (this) {
+ if (mEnableWriteAheadLogging != enabled) {
+ if (mDatabase != null && mDatabase.isOpen() && !mDatabase.isReadOnly()) {
+ if (enabled) {
+ mDatabase.enableWriteAheadLogging();
+ } else {
+ mDatabase.disableWriteAheadLogging();
+ }
+ mEnableWriteAheadLogging = enabled;
+ } else {
+ mDeferSetWriteAheadLoggingEnabled = enabled;
+ }
+ }
+ }
+ }
+
+ /**
+ * Called when the database needs to be downgraded. This is strictly similar to
+ * {@link #onUpgrade} method, but is called whenever current version is newer than requested one.
+ * However, this method is not abstract, so it is not mandatory for a customer to
+ * implement it. If not overridden, default implementation will reject downgrade and
+ * throws SQLiteException
+ *
+ *
+ * This method executes within a transaction. If an exception is thrown, all changes
+ * will automatically be rolled back.
+ *
+ *
+ * @param db The database.
+ * @param oldVersion The old database version.
+ * @param newVersion The new database version.
+ */
+ public void onDowngrade(SQLiteDatabase db, int oldVersion, int newVersion) {
+ throw new SQLiteException("Can't downgrade database from version " +
+ oldVersion + " to " + newVersion);
+ }
+
+ /**
+ * Called when the database connection is being configured, to enable features
+ * such as write-ahead logging or foreign key support.
+ *
+ * This method is called before {@link #onCreate}, {@link #onUpgrade},
+ * {@link #onDowngrade}, or {@link #onOpen} are called. It should not modify
+ * the database except to configure the database connection as required.
+ *
+ * This method should only call methods that configure the parameters of the
+ * database connection, such as {@link SQLiteDatabase#enableWriteAheadLogging}
+ * {@link SQLiteDatabase#setForeignKeyConstraintsEnabled},
+ * {@link SQLiteDatabase#setLocale}, or executing PRAGMA statements.
+ *
+ *
+ * @param db The database.
+ */
+ public void onConfigure(SQLiteDatabase db) {}
+
+ /**
+ * Called when the database is created for the first time. This is where the
+ * creation of tables and the initial population of the tables should happen.
+ *
+ * @param db The database.
+ */
+ public abstract void onCreate(SQLiteDatabase db);
+
+ /**
+ * Called when the database needs to be upgraded. The implementation
+ * should use this method to drop tables, add tables, or do anything else it
+ * needs to upgrade to the new schema version.
+ *
+ *
The SQLite ALTER TABLE documentation can be found
+ * here. If you add new columns
+ * you can use ALTER TABLE to insert them into a live table. If you rename or remove columns
+ * you can use ALTER TABLE to rename the old table, then create the new table and then
+ * populate the new table with the contents of the old table.
+ *
+ * @param db The database.
+ * @param oldVersion The old database version.
+ * @param newVersion The new database version.
+ */
+ public abstract void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion);
+
+ /**
+ * Called when the database has been opened.
+ * Override method should check {@link SQLiteDatabase#isReadOnly} before
+ * updating the database.
+ *
+ * @param db The database.
+ */
+ public void onOpen(SQLiteDatabase db) {}
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteProgram.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteProgram.java
new file mode 100644
index 0000000..e43826d
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteProgram.java
@@ -0,0 +1,361 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher.database;
+
+import android.util.Log;
+import androidx.sqlite.db.SupportSQLiteProgram;
+
+/**
+ * A base class for compiled SQLite programs.
+ *
+ * SQLiteProgram is not internally synchronized so code using a SQLiteProgram from multiple
+ * threads should perform its own synchronization when using the SQLiteProgram.
+ */
+public abstract class SQLiteProgram extends SQLiteClosable implements
+ SupportSQLiteProgram {
+
+ private static final String TAG = "SQLiteProgram";
+
+ /** The database this program is compiled against.
+ * @deprecated do not use this
+ */
+ @Deprecated
+ protected SQLiteDatabase mDatabase;
+
+ /** The SQL used to create this query */
+ /* package */ final String mSql;
+
+ /**
+ * Native linkage, do not modify. This comes from the database and should not be modified
+ * in here or in the native code.
+ * @deprecated do not use this
+ */
+ @Deprecated
+ protected long nHandle = 0;
+
+ /**
+ * the SQLiteCompiledSql object for the given sql statement.
+ */
+ private SQLiteCompiledSql mCompiledSql;
+
+ /**
+ * SQLiteCompiledSql statement id is populated with the corresponding object from the above
+ * member. This member is used by the native_bind_* methods
+ * @deprecated do not use this
+ */
+ @Deprecated
+ protected long nStatement = 0;
+
+ /**
+ * Indicates whether {@link #close()} has been called.
+ */
+ boolean mClosed = false;
+
+ /* package */ SQLiteProgram(SQLiteDatabase db, String sql) {
+ mDatabase = db;
+ mSql = sql.trim();
+ db.acquireReference();
+ db.addSQLiteClosable(this);
+ this.nHandle = db.mNativeHandle;
+ int crudPrefixLength = 6;
+
+ // only cache CRUD statements
+ String prefixSql = mSql.length() >= crudPrefixLength ? mSql.substring(0, crudPrefixLength) : mSql;
+ if (!prefixSql.equalsIgnoreCase("INSERT") && !prefixSql.equalsIgnoreCase("UPDATE") &&
+ !prefixSql.equalsIgnoreCase("REPLAC") &&
+ !prefixSql.equalsIgnoreCase("DELETE") && !prefixSql.equalsIgnoreCase("SELECT")) {
+ mCompiledSql = new SQLiteCompiledSql(db, sql);
+ nStatement = mCompiledSql.nStatement;
+ // since it is not in the cache, no need to acquire() it.
+ return;
+ }
+
+ // it is not pragma
+ mCompiledSql = db.getCompiledStatementForSql(sql);
+ if (mCompiledSql == null) {
+ // create a new compiled-sql obj
+ mCompiledSql = new SQLiteCompiledSql(db, sql);
+
+ // add it to the cache of compiled-sqls
+ // but before adding it and thus making it available for anyone else to use it,
+ // make sure it is acquired by me.
+ mCompiledSql.acquire();
+ db.addToCompiledQueries(sql, mCompiledSql);
+ if (SQLiteDebug.DEBUG_ACTIVE_CURSOR_FINALIZATION) {
+ Log.v(TAG, "Created DbObj (id#" + mCompiledSql.nStatement +
+ ") for sql: " + sql);
+ }
+ } else {
+ // it is already in compiled-sql cache.
+ // try to acquire the object.
+ if (!mCompiledSql.acquire()) {
+ long last = mCompiledSql.nStatement;
+ // the SQLiteCompiledSql in cache is in use by some other SQLiteProgram object.
+ // we can't have two different SQLiteProgam objects can't share the same
+ // CompiledSql object. create a new one.
+ // finalize it when I am done with it in "this" object.
+ mCompiledSql = new SQLiteCompiledSql(db, sql);
+ if (SQLiteDebug.DEBUG_ACTIVE_CURSOR_FINALIZATION) {
+ Log.v(TAG, "** possible bug ** Created NEW DbObj (id#" +
+ mCompiledSql.nStatement +
+ ") because the previously created DbObj (id#" + last +
+ ") was not released for sql:" + sql);
+ }
+ // since it is not in the cache, no need to acquire() it.
+ }
+ }
+ nStatement = mCompiledSql.nStatement;
+ }
+
+ @Override
+ protected void onAllReferencesReleased() {
+ releaseCompiledSqlIfNotInCache();
+ mDatabase.releaseReference();
+ mDatabase.removeSQLiteClosable(this);
+ }
+
+ @Override
+ protected void onAllReferencesReleasedFromContainer() {
+ releaseCompiledSqlIfNotInCache();
+ mDatabase.releaseReference();
+ }
+
+ private void releaseCompiledSqlIfNotInCache() {
+ if (mCompiledSql == null) {
+ return;
+ }
+ synchronized(mDatabase.mCompiledQueries) {
+ if (!mDatabase.mCompiledQueries.containsValue(mCompiledSql)) {
+ // it is NOT in compiled-sql cache. i.e., responsibility of
+ // releasing this statement is on me.
+ mCompiledSql.releaseSqlStatement();
+ mCompiledSql = null;
+ nStatement = 0;
+ } else {
+ // it is in compiled-sql cache. reset its CompiledSql#mInUse flag
+ mCompiledSql.release();
+ }
+ }
+ }
+
+ /**
+ * Returns a unique identifier for this program.
+ *
+ * @return a unique identifier for this program
+ */
+ public final long getUniqueId() {
+ return nStatement;
+ }
+
+ /* package */ String getSqlString() {
+ return mSql;
+ }
+
+ /**
+ * @deprecated This method is deprecated and must not be used.
+ *
+ * @param sql the SQL string to compile
+ * @param forceCompilation forces the SQL to be recompiled in the event that there is an
+ * existing compiled SQL program already around
+ */
+ @Deprecated
+ protected void compile(String sql, boolean forceCompilation) {
+ // TODO is there a need for this?
+ }
+
+ /**
+ * Bind a NULL value to this statement. The value remains bound until
+ * {@link #clearBindings} is called.
+ *
+ * @param index The 1-based index to the parameter to bind null to
+ */
+ @Override
+ public void bindNull(int index) {
+ if (mClosed) {
+ throw new IllegalStateException("program already closed");
+ }
+ if (!mDatabase.isOpen()) {
+ throw new IllegalStateException("database " + mDatabase.getPath() + " already closed");
+ }
+ acquireReference();
+ try {
+ native_bind_null(index);
+ } finally {
+ releaseReference();
+ }
+ }
+
+ /**
+ * Bind a long value to this statement. The value remains bound until
+ * {@link #clearBindings} is called.
+ *
+ * @param index The 1-based index to the parameter to bind
+ * @param value The value to bind
+ */
+ @Override
+ public void bindLong(int index, long value) {
+ if (mClosed) {
+ throw new IllegalStateException("program already closed");
+ }
+ if (!mDatabase.isOpen()) {
+ throw new IllegalStateException("database " + mDatabase.getPath() + " already closed");
+ }
+ acquireReference();
+ try {
+ native_bind_long(index, value);
+ } finally {
+ releaseReference();
+ }
+ }
+
+ /**
+ * Bind a double value to this statement. The value remains bound until
+ * {@link #clearBindings} is called.
+ *
+ * @param index The 1-based index to the parameter to bind
+ * @param value The value to bind
+ */
+ @Override
+ public void bindDouble(int index, double value) {
+ if (mClosed) {
+ throw new IllegalStateException("program already closed");
+ }
+ if (!mDatabase.isOpen()) {
+ throw new IllegalStateException("database " + mDatabase.getPath() + " already closed");
+ }
+ acquireReference();
+ try {
+ native_bind_double(index, value);
+ } finally {
+ releaseReference();
+ }
+ }
+
+ /**
+ * Bind a String value to this statement. The value remains bound until
+ * {@link #clearBindings} is called.
+ *
+ * @param index The 1-based index to the parameter to bind
+ * @param value The value to bind
+ */
+ @Override
+ public void bindString(int index, String value) {
+ if (value == null) {
+ throw new IllegalArgumentException("the bind value at index " + index + " is null");
+ }
+ if (mClosed) {
+ throw new IllegalStateException("program already closed");
+ }
+ if (!mDatabase.isOpen()) {
+ throw new IllegalStateException("database " + mDatabase.getPath() + " already closed");
+ }
+ acquireReference();
+ try {
+ native_bind_string(index, value);
+ } finally {
+ releaseReference();
+ }
+ }
+
+ /**
+ * Bind a byte array value to this statement. The value remains bound until
+ * {@link #clearBindings} is called.
+ *
+ * @param index The 1-based index to the parameter to bind
+ * @param value The value to bind
+ */
+ @Override
+ public void bindBlob(int index, byte[] value) {
+ if (value == null) {
+ throw new IllegalArgumentException("the bind value at index " + index + " is null");
+ }
+ if (mClosed) {
+ throw new IllegalStateException("program already closed");
+ }
+ if (!mDatabase.isOpen()) {
+ throw new IllegalStateException("database " + mDatabase.getPath() + " already closed");
+ }
+ acquireReference();
+ try {
+ native_bind_blob(index, value);
+ } finally {
+ releaseReference();
+ }
+ }
+
+ /**
+ * Clears all existing bindings. Unset bindings are treated as NULL.
+ */
+ @Override
+ public void clearBindings() {
+ if (mClosed) {
+ throw new IllegalStateException("program already closed");
+ }
+ if (!mDatabase.isOpen()) {
+ throw new IllegalStateException("database " + mDatabase.getPath() + " already closed");
+ }
+ acquireReference();
+ try {
+ native_clear_bindings();
+ } finally {
+ releaseReference();
+ }
+ }
+
+ /**
+ * Release this program's resources, making it invalid.
+ */
+ public void close() {
+ if (mClosed) {
+ return;
+ }
+ if (!mDatabase.isOpen()) {
+ return;
+ }
+ mDatabase.lock();
+ try {
+ releaseReference();
+ } finally {
+ mDatabase.unlock();
+ }
+ mClosed = true;
+ }
+
+ /**
+ * @deprecated This method is deprecated and must not be used.
+ * Compiles SQL into a SQLite program.
+ *
+ *
The database lock must be held when calling this method.
+ * @param sql The SQL to compile.
+ */
+ @Deprecated
+ protected final native void native_compile(String sql);
+
+ /**
+ * @deprecated This method is deprecated and must not be used.
+ */
+ @Deprecated
+ protected final native void native_finalize();
+
+ protected final native void native_bind_null(int index);
+ protected final native void native_bind_long(int index, long value);
+ protected final native void native_bind_double(int index, double value);
+ protected final native void native_bind_string(int index, String value);
+ protected final native void native_bind_blob(int index, byte[] value);
+ private final native void native_clear_bindings();
+}
+
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteQuery.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteQuery.java
new file mode 100644
index 0000000..ab40246
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteQuery.java
@@ -0,0 +1,230 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher.database;
+import net.sqlcipher.*;
+
+import android.os.SystemClock;
+import android.util.Log;
+
+/**
+ * A SQLite program that represents a query that reads the resulting rows into a CursorWindow.
+ * This class is used by SQLiteCursor and isn't useful itself.
+ *
+ * SQLiteQuery is not internally synchronized so code using a SQLiteQuery from multiple
+ * threads should perform its own synchronization when using the SQLiteQuery.
+ */
+public class SQLiteQuery extends SQLiteProgram {
+ private static final String TAG = "Cursor";
+
+ /** The index of the unbound OFFSET parameter */
+ private int mOffsetIndex;
+
+ /** Args to bind on requery */
+ private String[] mBindArgs;
+ private Object[] mObjectBindArgs;
+
+ /**
+ * Create a persistent query object.
+ *
+ * @param db The database that this query object is associated with
+ * @param query The SQL string for this query.
+ * @param offsetIndex The 1-based index to the OFFSET parameter,
+ */
+ /* package */ SQLiteQuery(SQLiteDatabase db, String query, int offsetIndex, String[] bindArgs) {
+ super(db, query);
+
+ mOffsetIndex = offsetIndex;
+ mBindArgs = bindArgs;
+ }
+
+ SQLiteQuery(SQLiteDatabase db, String query, int offsetIndex, Object[] bindArgs) {
+ super(db, query);
+ mOffsetIndex = offsetIndex;
+ mObjectBindArgs = bindArgs;
+ int length = mObjectBindArgs != null ? mObjectBindArgs.length : 0;
+ mBindArgs = new String[length];
+ }
+
+ /**
+ * Reads rows into a buffer. This method acquires the database lock.
+ *
+ * @param window The window to fill into
+ * @return number of total rows in the query
+ */
+ /* package */
+ int fillWindow(CursorWindow window,
+ int maxRead, int lastPos) {
+ long timeStart = SystemClock.uptimeMillis();
+ mDatabase.lock();
+ try {
+ acquireReference();
+ try {
+ window.acquireReference();
+ // if the start pos is not equal to 0, then most likely window is
+ // too small for the data set, loading by another thread
+ // is not safe in this situation. the native code will ignore maxRead
+ int numRows = native_fill_window(window,
+ window.getStartPosition(),
+ window.getRequiredPosition(),
+ mOffsetIndex,
+ maxRead, lastPos);
+
+ // Logging
+ if (SQLiteDebug.DEBUG_SQL_STATEMENTS) {
+ Log.d(TAG, "fillWindow(): " + mSql);
+ }
+ return numRows;
+ } catch (IllegalStateException e){
+ // simply ignore it
+ return 0;
+ } catch (SQLiteDatabaseCorruptException e) {
+ mDatabase.onCorruption();
+ throw e;
+ } finally {
+ window.releaseReference();
+ }
+ } finally {
+ releaseReference();
+ mDatabase.unlock();
+ }
+ }
+
+ /**
+ * Get the column count for the statement. Only valid on query based
+ * statements. The database must be locked
+ * when calling this method.
+ *
+ * @return The number of column in the statement's result set.
+ */
+ /* package */ int columnCountLocked() {
+ acquireReference();
+ try {
+ return native_column_count();
+ } finally {
+ releaseReference();
+ }
+ }
+
+ /**
+ * Retrieves the column name for the given column index. The database must be locked
+ * when calling this method.
+ *
+ * @param columnIndex the index of the column to get the name for
+ * @return The requested column's name
+ */
+ /* package */ String columnNameLocked(int columnIndex) {
+ acquireReference();
+ try {
+ return native_column_name(columnIndex);
+ } finally {
+ releaseReference();
+ }
+ }
+
+ @Override
+ public String toString() {
+ return "SQLiteQuery: " + mSql;
+ }
+
+ /**
+ * Called by SQLiteCursor when it is requeried.
+ */
+ /* package */ void requery() {
+ if (mBindArgs != null) {
+ int len = mBindArgs.length;
+ try {
+ if(mObjectBindArgs != null) {
+ bindArguments(mObjectBindArgs);
+ } else {
+ for (int i = 0; i < len; i++) {
+ super.bindString(i + 1, mBindArgs[i]);
+ }
+ }
+ } catch (SQLiteMisuseException e) {
+ StringBuilder errMsg = new StringBuilder("mSql " + mSql);
+ for (int i = 0; i < len; i++) {
+ errMsg.append(" ");
+ errMsg.append(mBindArgs[i]);
+ }
+ errMsg.append(" ");
+ IllegalStateException leakProgram = new IllegalStateException(
+ errMsg.toString(), e);
+ throw leakProgram;
+ }
+ }
+ }
+
+ @Override
+ public void bindNull(int index) {
+ mBindArgs[index - 1] = null;
+ if (!mClosed) super.bindNull(index);
+ }
+
+ @Override
+ public void bindLong(int index, long value) {
+ mBindArgs[index - 1] = Long.toString(value);
+ if (!mClosed) super.bindLong(index, value);
+ }
+
+ @Override
+ public void bindDouble(int index, double value) {
+ mBindArgs[index - 1] = Double.toString(value);
+ if (!mClosed) super.bindDouble(index, value);
+ }
+
+ @Override
+ public void bindString(int index, String value) {
+ mBindArgs[index - 1] = value;
+ if (!mClosed) super.bindString(index, value);
+ }
+
+ public void bindArguments(Object[] args){
+ if(args != null && args.length > 0){
+ for(int i = 0; i < args.length; i++){
+ Object value = args[i];
+ if(value == null){
+ bindNull(i + 1);
+ } else if (value instanceof Double) {
+ bindDouble(i + 1, (Double)value);
+ } else if (value instanceof Float) {
+ float number = ((Number)value).floatValue();
+ bindDouble(i + 1, Double.valueOf(number));
+ } else if (value instanceof Long) {
+ bindLong(i + 1, (Long)value);
+ } else if(value instanceof Integer) {
+ int number = ((Number) value).intValue();
+ bindLong(i + 1, Long.valueOf(number));
+ } else if (value instanceof Boolean) {
+ bindLong(i + 1, (Boolean)value ? 1 : 0);
+ } else if (value instanceof byte[]) {
+ bindBlob(i + 1, (byte[])value);
+ } else {
+ bindString(i + 1, value.toString());
+ }
+ }
+ }
+ }
+
+ private final native int native_fill_window(CursorWindow window,
+ int startPos, int requiredPos,
+ int offsetParam, int maxRead,
+ int lastPos);
+
+ private final native int native_column_count();
+
+ private final native String native_column_name(int columnIndex);
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteQueryBuilder.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteQueryBuilder.java
new file mode 100644
index 0000000..d47f559
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteQueryBuilder.java
@@ -0,0 +1,550 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher.database;
+
+import net.sqlcipher.*;
+
+import android.provider.BaseColumns;
+import android.text.TextUtils;
+import android.util.Log;
+
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Set;
+import java.util.Map.Entry;
+import java.util.regex.Pattern;
+
+/**
+ * This is a convience class that helps build SQL queries to be sent to
+ * {@link SQLiteDatabase} objects.
+ */
+public class SQLiteQueryBuilder
+{
+ private static final String TAG = "SQLiteQueryBuilder";
+ private static final Pattern sLimitPattern =
+ Pattern.compile("\\s*\\d+\\s*(,\\s*\\d+\\s*)?");
+
+ private Map mProjectionMap = null;
+ private String mTables = "";
+ private StringBuilder mWhereClause = null; // lazily created
+ private boolean mDistinct;
+ private SQLiteDatabase.CursorFactory mFactory;
+ private boolean mStrictProjectionMap;
+
+ public SQLiteQueryBuilder() {
+ mDistinct = false;
+ mFactory = null;
+ }
+
+ /**
+ * Mark the query as DISTINCT.
+ *
+ * @param distinct if true the query is DISTINCT, otherwise it isn't
+ */
+ public void setDistinct(boolean distinct) {
+ mDistinct = distinct;
+ }
+
+ /**
+ * Returns the list of tables being queried
+ *
+ * @return the list of tables being queried
+ */
+ public String getTables() {
+ return mTables;
+ }
+
+ /**
+ * Sets the list of tables to query. Multiple tables can be specified to perform a join.
+ * For example:
+ * setTables("foo, bar")
+ * setTables("foo LEFT OUTER JOIN bar ON (foo.id = bar.foo_id)")
+ *
+ * @param inTables the list of tables to query on
+ */
+ public void setTables(String inTables) {
+ mTables = inTables;
+ }
+
+ /**
+ * Append a chunk to the WHERE clause of the query. All chunks appended are surrounded
+ * by parenthesis and ANDed with the selection passed to {@link #query}. The final
+ * WHERE clause looks like:
+ *
+ * WHERE (<append chunk 1><append chunk2>) AND (<query() selection parameter>)
+ *
+ * @param inWhere the chunk of text to append to the WHERE clause.
+ */
+ public void appendWhere(CharSequence inWhere) {
+ if (mWhereClause == null) {
+ mWhereClause = new StringBuilder(inWhere.length() + 16);
+ }
+ if (mWhereClause.length() == 0) {
+ mWhereClause.append('(');
+ }
+ mWhereClause.append(inWhere);
+ }
+
+ /**
+ * Append a chunk to the WHERE clause of the query. All chunks appended are surrounded
+ * by parenthesis and ANDed with the selection passed to {@link #query}. The final
+ * WHERE clause looks like:
+ *
+ * WHERE (<append chunk 1><append chunk2>) AND (<query() selection parameter>)
+ *
+ * @param inWhere the chunk of text to append to the WHERE clause. it will be escaped
+ * to avoid SQL injection attacks
+ */
+ public void appendWhereEscapeString(String inWhere) {
+ if (mWhereClause == null) {
+ mWhereClause = new StringBuilder(inWhere.length() + 16);
+ }
+ if (mWhereClause.length() == 0) {
+ mWhereClause.append('(');
+ }
+ DatabaseUtils.appendEscapedSQLString(mWhereClause, inWhere);
+ }
+
+ /**
+ * Sets the projection map for the query. The projection map maps
+ * from column names that the caller passes into query to database
+ * column names. This is useful for renaming columns as well as
+ * disambiguating column names when doing joins. For example you
+ * could map "name" to "people.name". If a projection map is set
+ * it must contain all column names the user may request, even if
+ * the key and value are the same.
+ *
+ * @param columnMap maps from the user column names to the database column names
+ */
+ public void setProjectionMap(Map columnMap) {
+ mProjectionMap = columnMap;
+ }
+
+ /**
+ * Sets the cursor factory to be used for the query. You can use
+ * one factory for all queries on a database but it is normally
+ * easier to specify the factory when doing this query. @param
+ * factory the factor to use
+ */
+ public void setCursorFactory(SQLiteDatabase.CursorFactory factory) {
+ mFactory = factory;
+ }
+
+ /**
+ * @hide
+ */
+ public void setStrictProjectionMap(boolean flag) {
+ mStrictProjectionMap = flag;
+ }
+
+ /**
+ * Build an SQL query string from the given clauses.
+ *
+ * @param distinct true if you want each row to be unique, false otherwise.
+ * @param tables The table names to compile the query against.
+ * @param columns A list of which columns to return. Passing null will
+ * return all columns, which is discouraged to prevent reading
+ * data from storage that isn't going to be used.
+ * @param where A filter declaring which rows to return, formatted as an SQL
+ * WHERE clause (excluding the WHERE itself). Passing null will
+ * return all rows for the given URL.
+ * @param groupBy A filter declaring how to group rows, formatted as an SQL
+ * GROUP BY clause (excluding the GROUP BY itself). Passing null
+ * will cause the rows to not be grouped.
+ * @param having A filter declare which row groups to include in the cursor,
+ * if row grouping is being used, formatted as an SQL HAVING
+ * clause (excluding the HAVING itself). Passing null will cause
+ * all row groups to be included, and is required when row
+ * grouping is not being used.
+ * @param orderBy How to order the rows, formatted as an SQL ORDER BY clause
+ * (excluding the ORDER BY itself). Passing null will use the
+ * default sort order, which may be unordered.
+ * @param limit Limits the number of rows returned by the query,
+ * formatted as LIMIT clause. Passing null denotes no LIMIT clause.
+ * @return the SQL query string
+ */
+ public static String buildQueryString(
+ boolean distinct, String tables, String[] columns, String where,
+ String groupBy, String having, String orderBy, String limit) {
+ if (TextUtils.isEmpty(groupBy) && !TextUtils.isEmpty(having)) {
+ throw new IllegalArgumentException(
+ "HAVING clauses are only permitted when using a groupBy clause");
+ }
+ if (!TextUtils.isEmpty(limit) && !sLimitPattern.matcher(limit).matches()) {
+ throw new IllegalArgumentException("invalid LIMIT clauses:" + limit);
+ }
+
+ StringBuilder query = new StringBuilder(120);
+
+ query.append("SELECT ");
+ if (distinct) {
+ query.append("DISTINCT ");
+ }
+ if (columns != null && columns.length != 0) {
+ appendColumns(query, columns);
+ } else {
+ query.append("* ");
+ }
+ query.append("FROM ");
+ query.append(tables);
+ appendClause(query, " WHERE ", where);
+ appendClause(query, " GROUP BY ", groupBy);
+ appendClause(query, " HAVING ", having);
+ appendClause(query, " ORDER BY ", orderBy);
+ appendClause(query, " LIMIT ", limit);
+
+ return query.toString();
+ }
+
+ private static void appendClause(StringBuilder s, String name, String clause) {
+ if (!TextUtils.isEmpty(clause)) {
+ s.append(name);
+ s.append(clause);
+ }
+ }
+
+ private static void appendClauseEscapeClause(StringBuilder s, String name, String clause) {
+ if (!TextUtils.isEmpty(clause)) {
+ s.append(name);
+ DatabaseUtils.appendEscapedSQLString(s, clause);
+ }
+ }
+
+ /**
+ * Add the names that are non-null in columns to s, separating
+ * them with commas.
+ */
+ public static void appendColumns(StringBuilder s, String[] columns) {
+ int n = columns.length;
+
+ for (int i = 0; i < n; i++) {
+ String column = columns[i];
+
+ if (column != null) {
+ if (i > 0) {
+ s.append(", ");
+ }
+ s.append(column);
+ }
+ }
+ s.append(' ');
+ }
+
+ /**
+ * Perform a query by combining all current settings and the
+ * information passed into this method.
+ *
+ * @param db the database to query on
+ * @param projectionIn A list of which columns to return. Passing
+ * null will return all columns, which is discouraged to prevent
+ * reading data from storage that isn't going to be used.
+ * @param selection A filter declaring which rows to return,
+ * formatted as an SQL WHERE clause (excluding the WHERE
+ * itself). Passing null will return all rows for the given URL.
+ * @param selectionArgs You may include ?s in selection, which
+ * will be replaced by the values from selectionArgs, in order
+ * that they appear in the selection. The values will be bound
+ * as Strings.
+ * @param groupBy A filter declaring how to group rows, formatted
+ * as an SQL GROUP BY clause (excluding the GROUP BY
+ * itself). Passing null will cause the rows to not be grouped.
+ * @param having A filter declare which row groups to include in
+ * the cursor, if row grouping is being used, formatted as an
+ * SQL HAVING clause (excluding the HAVING itself). Passing
+ * null will cause all row groups to be included, and is
+ * required when row grouping is not being used.
+ * @param sortOrder How to order the rows, formatted as an SQL
+ * ORDER BY clause (excluding the ORDER BY itself). Passing null
+ * will use the default sort order, which may be unordered.
+ * @return a cursor over the result set
+ * @see android.content.ContentResolver#query(android.net.Uri, String[],
+ * String, String[], String)
+ */
+ public Cursor query(SQLiteDatabase db, String[] projectionIn,
+ String selection, String[] selectionArgs, String groupBy,
+ String having, String sortOrder) {
+ return query(db, projectionIn, selection, selectionArgs, groupBy, having, sortOrder,
+ null /* limit */);
+ }
+
+ /**
+ * Perform a query by combining all current settings and the
+ * information passed into this method.
+ *
+ * @param db the database to query on
+ * @param projectionIn A list of which columns to return. Passing
+ * null will return all columns, which is discouraged to prevent
+ * reading data from storage that isn't going to be used.
+ * @param selection A filter declaring which rows to return,
+ * formatted as an SQL WHERE clause (excluding the WHERE
+ * itself). Passing null will return all rows for the given URL.
+ * @param selectionArgs You may include ?s in selection, which
+ * will be replaced by the values from selectionArgs, in order
+ * that they appear in the selection. The values will be bound
+ * as Strings.
+ * @param groupBy A filter declaring how to group rows, formatted
+ * as an SQL GROUP BY clause (excluding the GROUP BY
+ * itself). Passing null will cause the rows to not be grouped.
+ * @param having A filter declare which row groups to include in
+ * the cursor, if row grouping is being used, formatted as an
+ * SQL HAVING clause (excluding the HAVING itself). Passing
+ * null will cause all row groups to be included, and is
+ * required when row grouping is not being used.
+ * @param sortOrder How to order the rows, formatted as an SQL
+ * ORDER BY clause (excluding the ORDER BY itself). Passing null
+ * will use the default sort order, which may be unordered.
+ * @param limit Limits the number of rows returned by the query,
+ * formatted as LIMIT clause. Passing null denotes no LIMIT clause.
+ * @return a cursor over the result set
+ * @see android.content.ContentResolver#query(android.net.Uri, String[],
+ * String, String[], String)
+ */
+ public Cursor query(SQLiteDatabase db, String[] projectionIn,
+ String selection, String[] selectionArgs, String groupBy,
+ String having, String sortOrder, String limit) {
+ if (mTables == null) {
+ return null;
+ }
+
+ String sql = buildQuery(
+ projectionIn, selection, selectionArgs, groupBy, having,
+ sortOrder, limit);
+
+ if (Log.isLoggable(TAG, Log.DEBUG)) {
+ Log.d(TAG, "Performing query: " + sql);
+ }
+ return db.rawQueryWithFactory(
+ mFactory, sql, selectionArgs,
+ SQLiteDatabase.findEditTable(mTables));
+ }
+
+ /**
+ * Construct a SELECT statement suitable for use in a group of
+ * SELECT statements that will be joined through UNION operators
+ * in buildUnionQuery.
+ *
+ * @param projectionIn A list of which columns to return. Passing
+ * null will return all columns, which is discouraged to
+ * prevent reading data from storage that isn't going to be
+ * used.
+ * @param selection A filter declaring which rows to return,
+ * formatted as an SQL WHERE clause (excluding the WHERE
+ * itself). Passing null will return all rows for the given
+ * URL.
+ * @param selectionArgs You may include ?s in selection, which
+ * will be replaced by the values from selectionArgs, in order
+ * that they appear in the selection. The values will be bound
+ * as Strings.
+ * @param groupBy A filter declaring how to group rows, formatted
+ * as an SQL GROUP BY clause (excluding the GROUP BY itself).
+ * Passing null will cause the rows to not be grouped.
+ * @param having A filter declare which row groups to include in
+ * the cursor, if row grouping is being used, formatted as an
+ * SQL HAVING clause (excluding the HAVING itself). Passing
+ * null will cause all row groups to be included, and is
+ * required when row grouping is not being used.
+ * @param sortOrder How to order the rows, formatted as an SQL
+ * ORDER BY clause (excluding the ORDER BY itself). Passing null
+ * will use the default sort order, which may be unordered.
+ * @param limit Limits the number of rows returned by the query,
+ * formatted as LIMIT clause. Passing null denotes no LIMIT clause.
+ * @return the resulting SQL SELECT statement
+ */
+ public String buildQuery(
+ String[] projectionIn, String selection, String[] selectionArgs,
+ String groupBy, String having, String sortOrder, String limit) {
+ String[] projection = computeProjection(projectionIn);
+
+ StringBuilder where = new StringBuilder();
+ boolean hasBaseWhereClause = mWhereClause != null && mWhereClause.length() > 0;
+
+ if (hasBaseWhereClause) {
+ where.append(mWhereClause.toString());
+ where.append(')');
+ }
+
+ // Tack on the user's selection, if present.
+ if (selection != null && selection.length() > 0) {
+ if (hasBaseWhereClause) {
+ where.append(" AND ");
+ }
+
+ where.append('(');
+ where.append(selection);
+ where.append(')');
+ }
+
+ return buildQueryString(
+ mDistinct, mTables, projection, where.toString(),
+ groupBy, having, sortOrder, limit);
+ }
+
+ /**
+ * Construct a SELECT statement suitable for use in a group of
+ * SELECT statements that will be joined through UNION operators
+ * in buildUnionQuery.
+ *
+ * @param typeDiscriminatorColumn the name of the result column
+ * whose cells will contain the name of the table from which
+ * each row was drawn.
+ * @param unionColumns the names of the columns to appear in the
+ * result. This may include columns that do not appear in the
+ * table this SELECT is querying (i.e. mTables), but that do
+ * appear in one of the other tables in the UNION query that we
+ * are constructing.
+ * @param columnsPresentInTable a Set of the names of the columns
+ * that appear in this table (i.e. in the table whose name is
+ * mTables). Since columns in unionColumns include columns that
+ * appear only in other tables, we use this array to distinguish
+ * which ones actually are present. Other columns will have
+ * NULL values for results from this subquery.
+ * @param computedColumnsOffset all columns in unionColumns before
+ * this index are included under the assumption that they're
+ * computed and therefore won't appear in columnsPresentInTable,
+ * e.g. "date * 1000 as normalized_date"
+ * @param typeDiscriminatorValue the value used for the
+ * type-discriminator column in this subquery
+ * @param selection A filter declaring which rows to return,
+ * formatted as an SQL WHERE clause (excluding the WHERE
+ * itself). Passing null will return all rows for the given
+ * URL.
+ * @param selectionArgs You may include ?s in selection, which
+ * will be replaced by the values from selectionArgs, in order
+ * that they appear in the selection. The values will be bound
+ * as Strings.
+ * @param groupBy A filter declaring how to group rows, formatted
+ * as an SQL GROUP BY clause (excluding the GROUP BY itself).
+ * Passing null will cause the rows to not be grouped.
+ * @param having A filter declare which row groups to include in
+ * the cursor, if row grouping is being used, formatted as an
+ * SQL HAVING clause (excluding the HAVING itself). Passing
+ * null will cause all row groups to be included, and is
+ * required when row grouping is not being used.
+ * @return the resulting SQL SELECT statement
+ */
+ public String buildUnionSubQuery(
+ String typeDiscriminatorColumn,
+ String[] unionColumns,
+ Set columnsPresentInTable,
+ int computedColumnsOffset,
+ String typeDiscriminatorValue,
+ String selection,
+ String[] selectionArgs,
+ String groupBy,
+ String having) {
+ int unionColumnsCount = unionColumns.length;
+ String[] projectionIn = new String[unionColumnsCount];
+
+ for (int i = 0; i < unionColumnsCount; i++) {
+ String unionColumn = unionColumns[i];
+
+ if (unionColumn.equals(typeDiscriminatorColumn)) {
+ projectionIn[i] = "'" + typeDiscriminatorValue + "' AS "
+ + typeDiscriminatorColumn;
+ } else if (i <= computedColumnsOffset
+ || columnsPresentInTable.contains(unionColumn)) {
+ projectionIn[i] = unionColumn;
+ } else {
+ projectionIn[i] = "NULL AS " + unionColumn;
+ }
+ }
+ return buildQuery(
+ projectionIn, selection, selectionArgs, groupBy, having,
+ null /* sortOrder */,
+ null /* limit */);
+ }
+
+ /**
+ * Given a set of subqueries, all of which are SELECT statements,
+ * construct a query that returns the union of what those
+ * subqueries return.
+ * @param subQueries an array of SQL SELECT statements, all of
+ * which must have the same columns as the same positions in
+ * their results
+ * @param sortOrder How to order the rows, formatted as an SQL
+ * ORDER BY clause (excluding the ORDER BY itself). Passing
+ * null will use the default sort order, which may be unordered.
+ * @param limit The limit clause, which applies to the entire union result set
+ *
+ * @return the resulting SQL SELECT statement
+ */
+ public String buildUnionQuery(String[] subQueries, String sortOrder, String limit) {
+ StringBuilder query = new StringBuilder(128);
+ int subQueryCount = subQueries.length;
+ String unionOperator = mDistinct ? " UNION " : " UNION ALL ";
+
+ for (int i = 0; i < subQueryCount; i++) {
+ if (i > 0) {
+ query.append(unionOperator);
+ }
+ query.append(subQueries[i]);
+ }
+ appendClause(query, " ORDER BY ", sortOrder);
+ appendClause(query, " LIMIT ", limit);
+ return query.toString();
+ }
+
+ private String[] computeProjection(String[] projectionIn) {
+ if (projectionIn != null && projectionIn.length > 0) {
+ if (mProjectionMap != null) {
+ String[] projection = new String[projectionIn.length];
+ int length = projectionIn.length;
+
+ for (int i = 0; i < length; i++) {
+ String userColumn = projectionIn[i];
+ String column = mProjectionMap.get(userColumn);
+
+ if (column != null) {
+ projection[i] = column;
+ continue;
+ }
+
+ if (!mStrictProjectionMap &&
+ ( userColumn.contains(" AS ") || userColumn.contains(" as "))) {
+ /* A column alias already exist */
+ projection[i] = userColumn;
+ continue;
+ }
+
+ throw new IllegalArgumentException("Invalid column "
+ + projectionIn[i]);
+ }
+ return projection;
+ } else {
+ return projectionIn;
+ }
+ } else if (mProjectionMap != null) {
+ // Return all columns in projection map.
+ Set> entrySet = mProjectionMap.entrySet();
+ String[] projection = new String[entrySet.size()];
+ Iterator> entryIter = entrySet.iterator();
+ int i = 0;
+
+ while (entryIter.hasNext()) {
+ Entry entry = entryIter.next();
+
+ // Don't include the _count column when people ask for no projection.
+ if (entry.getKey().equals(BaseColumns._COUNT)) {
+ continue;
+ }
+ projection[i++] = entry.getValue();
+ }
+ return projection;
+ }
+ return null;
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteQueryStats.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteQueryStats.java
new file mode 100644
index 0000000..4b36c05
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteQueryStats.java
@@ -0,0 +1,20 @@
+package net.sqlcipher.database;
+
+public class SQLiteQueryStats {
+ long totalQueryResultSize = 0L;
+ long largestIndividualRowSize = 0L;
+
+ public SQLiteQueryStats(long totalQueryResultSize,
+ long largestIndividualRowSize) {
+ this.totalQueryResultSize = totalQueryResultSize;
+ this.largestIndividualRowSize = largestIndividualRowSize;
+ }
+
+ public long getTotalQueryResultSize(){
+ return totalQueryResultSize;
+ }
+
+ public long getLargestIndividualRowSize(){
+ return largestIndividualRowSize;
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteStatement.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteStatement.java
new file mode 100644
index 0000000..84b7b4c
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteStatement.java
@@ -0,0 +1,168 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher.database;
+
+import android.os.SystemClock;
+import androidx.sqlite.db.SupportSQLiteStatement;
+
+/**
+ * A pre-compiled statement against a {@link SQLiteDatabase} that can be reused.
+ * The statement cannot return multiple rows, but 1x1 result sets are allowed.
+ * Don't use SQLiteStatement constructor directly, please use
+ * {@link SQLiteDatabase#compileStatement(String)}
+ *
+ * SQLiteStatement is not internally synchronized so code using a SQLiteStatement from multiple
+ * threads should perform its own synchronization when using the SQLiteStatement.
+ */
+public class SQLiteStatement extends SQLiteProgram implements
+ SupportSQLiteStatement
+{
+ /**
+ * Don't use SQLiteStatement constructor directly, please use
+ * {@link SQLiteDatabase#compileStatement(String)}
+ * @param db
+ * @param sql
+ */
+ /* package */ SQLiteStatement(SQLiteDatabase db, String sql) {
+ super(db, sql);
+ }
+
+ /**
+ * Execute this SQL statement, if it is not a query. For example,
+ * CREATE TABLE, DELTE, INSERT, etc.
+ *
+ * @throws android.database.SQLException If the SQL string is invalid for
+ * some reason
+ */
+ @Override
+ public void execute() {
+ if (!mDatabase.isOpen()) {
+ throw new IllegalStateException("database " + mDatabase.getPath() + " already closed");
+ }
+ long timeStart = SystemClock.uptimeMillis();
+ mDatabase.lock();
+
+ acquireReference();
+ try {
+ native_execute();
+ } finally {
+ releaseReference();
+ mDatabase.unlock();
+ }
+ }
+
+ /**
+ * Execute this SQL statement and return the ID of the row inserted due to this call.
+ * The SQL statement should be an INSERT for this to be a useful call.
+ *
+ * @return the row ID of the last row inserted, if this insert is successful. -1 otherwise.
+ *
+ * @throws android.database.SQLException If the SQL string is invalid for
+ * some reason
+ */
+ @Override
+ public long executeInsert() {
+ if (!mDatabase.isOpen()) {
+ throw new IllegalStateException("database " + mDatabase.getPath() + " already closed");
+ }
+ long timeStart = SystemClock.uptimeMillis();
+ mDatabase.lock();
+
+ acquireReference();
+ try {
+ native_execute();
+ return (mDatabase.lastChangeCount() > 0) ? mDatabase.lastInsertRow() : -1;
+ } finally {
+ releaseReference();
+ mDatabase.unlock();
+ }
+ }
+
+ @Override
+ public int executeUpdateDelete() {
+ if (!mDatabase.isOpen()) {
+ throw new IllegalStateException("database " + mDatabase.getPath() + " already closed");
+ }
+ long timeStart = SystemClock.uptimeMillis();
+ mDatabase.lock();
+
+ acquireReference();
+ try {
+ native_execute();
+ return mDatabase.lastChangeCount();
+ } finally {
+ releaseReference();
+ mDatabase.unlock();
+ }
+ }
+
+ /**
+ * Execute a statement that returns a 1 by 1 table with a numeric value.
+ * For example, SELECT COUNT(*) FROM table;
+ *
+ * @return The result of the query.
+ *
+ * @throws android.database.sqlite.SQLiteDoneException if the query returns zero rows
+ */
+ @Override
+ public long simpleQueryForLong() {
+ if (!mDatabase.isOpen()) {
+ throw new IllegalStateException("database " + mDatabase.getPath() + " already closed");
+ }
+ long timeStart = SystemClock.uptimeMillis();
+ mDatabase.lock();
+
+ acquireReference();
+ try {
+ long retValue = native_1x1_long();
+ return retValue;
+ } finally {
+ releaseReference();
+ mDatabase.unlock();
+ }
+ }
+
+ /**
+ * Execute a statement that returns a 1 by 1 table with a text value.
+ * For example, SELECT COUNT(*) FROM table;
+ *
+ * @return The result of the query.
+ *
+ * @throws android.database.sqlite.SQLiteDoneException if the query returns zero rows
+ */
+ @Override
+ public String simpleQueryForString() {
+ if (!mDatabase.isOpen()) {
+ throw new IllegalStateException("database " + mDatabase.getPath() + " already closed");
+ }
+ long timeStart = SystemClock.uptimeMillis();
+ mDatabase.lock();
+
+ acquireReference();
+ try {
+ String retValue = native_1x1_string();
+ return retValue;
+ } finally {
+ releaseReference();
+ mDatabase.unlock();
+ }
+ }
+
+ private final native void native_execute();
+ private final native long native_1x1_long();
+ private final native String native_1x1_string();
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteTransactionListener.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteTransactionListener.java
new file mode 100644
index 0000000..69680ee
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SQLiteTransactionListener.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher.database;
+
+/**
+ * A listener for transaction events.
+ */
+public interface SQLiteTransactionListener {
+ /**
+ * Called immediately after the transaction begins.
+ */
+ void onBegin();
+
+ /**
+ * Called immediately before commiting the transaction.
+ */
+ void onCommit();
+
+ /**
+ * Called if the transaction is about to be rolled back.
+ */
+ void onRollback();
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SqliteWrapper.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SqliteWrapper.java
new file mode 100644
index 0000000..b1bbbfb
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SqliteWrapper.java
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2008 Esmertec AG.
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher.database;
+
+import android.content.ContentResolver;
+import android.content.ContentValues;
+import android.content.Context;
+
+import net.sqlcipher.*;
+
+import android.net.Uri;
+import android.util.Log;
+import android.widget.Toast;
+
+/**
+ * @hide
+ */
+
+public final class SqliteWrapper {
+ private static final String TAG = "SqliteWrapper";
+ private static final String SQLITE_EXCEPTION_DETAIL_MESSAGE
+ = "unable to open database file";
+
+ private SqliteWrapper() {
+ // Forbidden being instantiated.
+ }
+
+ // FIXME: need to optimize this method.
+ private static boolean isLowMemory(SQLiteException e) {
+ return e.getMessage().equals(SQLITE_EXCEPTION_DETAIL_MESSAGE);
+ }
+
+ public static void checkSQLiteException(Context context, SQLiteException e) {
+ if (isLowMemory(e)) {
+ Toast.makeText(context, e.getMessage(),
+ Toast.LENGTH_SHORT).show();
+ } else {
+ throw e;
+ }
+ }
+
+ public static Cursor query(Context context, ContentResolver resolver, Uri uri,
+ String[] projection, String selection, String[] selectionArgs, String sortOrder) {
+ try {
+ return (Cursor) resolver.query(uri, projection, selection, selectionArgs, sortOrder);
+ } catch (SQLiteException e) {
+ Log.e(TAG, "Catch a SQLiteException when query: ", e);
+ checkSQLiteException(context, e);
+ return null;
+ }
+ }
+
+ public static boolean requery(Context context, android.database.Cursor cursor) {
+ try {
+ return cursor.requery();
+ } catch (SQLiteException e) {
+ Log.e(TAG, "Catch a SQLiteException when requery: ", e);
+ checkSQLiteException(context, e);
+ return false;
+ }
+ }
+ public static int update(Context context, ContentResolver resolver, Uri uri,
+ ContentValues values, String where, String[] selectionArgs) {
+ try {
+ return resolver.update(uri, values, where, selectionArgs);
+ } catch (SQLiteException e) {
+ Log.e(TAG, "Catch a SQLiteException when update: ", e);
+ checkSQLiteException(context, e);
+ return -1;
+ }
+ }
+
+ public static int delete(Context context, ContentResolver resolver, Uri uri,
+ String where, String[] selectionArgs) {
+ try {
+ return resolver.delete(uri, where, selectionArgs);
+ } catch (SQLiteException e) {
+ Log.e(TAG, "Catch a SQLiteException when delete: ", e);
+ checkSQLiteException(context, e);
+ return -1;
+ }
+ }
+
+ public static Uri insert(Context context, ContentResolver resolver,
+ Uri uri, ContentValues values) {
+ try {
+ return resolver.insert(uri, values);
+ } catch (SQLiteException e) {
+ Log.e(TAG, "Catch a SQLiteException when insert: ", e);
+ checkSQLiteException(context, e);
+ return null;
+ }
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SupportFactory.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SupportFactory.java
new file mode 100644
index 0000000..ae14e76
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SupportFactory.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2019 Mark L. Murphy
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher.database;
+
+import androidx.sqlite.db.SupportSQLiteOpenHelper;
+
+public class SupportFactory implements SupportSQLiteOpenHelper.Factory {
+ private final byte[] passphrase;
+ private final SQLiteDatabaseHook hook;
+
+ public SupportFactory(byte[] passphrase) {
+ this(passphrase, (SQLiteDatabaseHook)null);
+ }
+
+ public SupportFactory(byte[] passphrase, SQLiteDatabaseHook hook) {
+ this.passphrase = passphrase;
+ this.hook = hook;
+ }
+
+ @Override
+ public SupportSQLiteOpenHelper create(SupportSQLiteOpenHelper.Configuration configuration) {
+ return new SupportHelper(configuration, passphrase, hook);
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SupportHelper.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SupportHelper.java
new file mode 100644
index 0000000..ca27db5
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/SupportHelper.java
@@ -0,0 +1,95 @@
+/*
+ * Copyright (C) 2019 Mark L. Murphy
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sqlcipher.database;
+
+import androidx.sqlite.db.SupportSQLiteDatabase;
+import androidx.sqlite.db.SupportSQLiteOpenHelper;
+
+public class SupportHelper implements SupportSQLiteOpenHelper {
+ private SQLiteOpenHelper standardHelper;
+ private byte[] passphrase;
+
+ SupportHelper(final SupportSQLiteOpenHelper.Configuration configuration,
+ byte[] passphrase, final SQLiteDatabaseHook hook) {
+ SQLiteDatabase.loadLibs(configuration.context);
+ this.passphrase = passphrase;
+
+ standardHelper =
+ new SQLiteOpenHelper(configuration.context, configuration.name,
+ null, configuration.callback.version, hook) {
+ @Override
+ public void onCreate(SQLiteDatabase db) {
+ configuration.callback.onCreate(db);
+ }
+
+ @Override
+ public void onUpgrade(SQLiteDatabase db, int oldVersion,
+ int newVersion) {
+ configuration.callback.onUpgrade(db, oldVersion,
+ newVersion);
+ }
+
+ @Override
+ public void onDowngrade(SQLiteDatabase db, int oldVersion,
+ int newVersion) {
+ configuration.callback.onDowngrade(db, oldVersion,
+ newVersion);
+ }
+
+ @Override
+ public void onOpen(SQLiteDatabase db) {
+ configuration.callback.onOpen(db);
+ }
+
+ @Override
+ public void onConfigure(SQLiteDatabase db) {
+ configuration.callback.onConfigure(db);
+ }
+ };
+ }
+
+ @Override
+ public String getDatabaseName() {
+ return standardHelper.getDatabaseName();
+ }
+
+ @Override
+ public void setWriteAheadLoggingEnabled(boolean enabled) {
+ standardHelper.setWriteAheadLoggingEnabled(enabled);
+ }
+
+ @Override
+ public SupportSQLiteDatabase getWritableDatabase() {
+ SQLiteDatabase result = standardHelper.getWritableDatabase(passphrase);
+ if(passphrase != null) {
+ for (int i = 0; i < passphrase.length; i++) {
+ passphrase[i] = (byte)0;
+ }
+ }
+ return result;
+ }
+
+ @Override
+ public SupportSQLiteDatabase getReadableDatabase() {
+ return getWritableDatabase();
+ }
+
+ @Override
+ public void close() {
+ standardHelper.close();
+ }
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/package-info.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/package-info.java
new file mode 100644
index 0000000..84c8b7b
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/database/package-info.java
@@ -0,0 +1,4 @@
+/**
+ * Contains the SQLCipher database managements classes that an application would use to manage its own private database.
+ */
+package net.sqlcipher.database;
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/package-info.java b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/package-info.java
new file mode 100644
index 0000000..c21dbf1
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/java/net/sqlcipher/package-info.java
@@ -0,0 +1,4 @@
+/**
+ * Contains classes to explore data returned from a SQLCipher database.
+ */
+package net.sqlcipher;
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/res/values/android_database_sqlcipher_strings.xml b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/res/values/android_database_sqlcipher_strings.xml
new file mode 100644
index 0000000..ddedb58
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/android-database-sqlcipher/src/main/res/values/android_database_sqlcipher_strings.xml
@@ -0,0 +1,12 @@
+
+
+ Zetetic, LLC
+ https://www.zetetic.net/sqlcipher/
+ SQLCipher for Android
+ Android SQLite API based on SQLCipher
+ https://www.zetetic.net/sqlcipher/
+ ${clientVersionNumber}
+ true
+ https://github.com/sqlcipher/android-database-sqlcipher
+ https://www.zetetic.net/sqlcipher/license/
+
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/build.gradle b/BotZone2.8v1 Android/android-database-sqlcipher/build.gradle
new file mode 100644
index 0000000..feaaaf2
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/build.gradle
@@ -0,0 +1,99 @@
+buildscript {
+ repositories {
+ google()
+ jcenter()
+ maven {
+ url "https://plugins.gradle.org/m2/"
+ }
+ }
+ dependencies {
+ classpath "com.android.tools.build:gradle:3.4.1"
+ classpath "gradle.plugin.org.ec4j.gradle:editorconfig-gradle-plugin:0.0.3"
+ }
+}
+
+allprojects {
+ repositories {
+ google()
+ jcenter()
+ }
+}
+
+ext {
+ clientVersionNumber = "4.2.0"
+ mavenPackaging = "aar"
+ mavenGroup = "net.zetetic"
+ mavenArtifactId = "android-database-sqlcipher"
+ mavenLocalRepositoryPrefix = "file://"
+ if(project.hasProperty('publishLocal') && publishLocal.toBoolean()){
+ mavenSnapshotRepositoryUrl = "outputs/snapshot"
+ mavenReleaseRepositoryUrl = "outputs/release"
+ } else {
+ mavenLocalRepositoryPrefix = ""
+ mavenSnapshotRepositoryUrl = "https://oss.sonatype.org/content/repositories/snapshots"
+ mavenReleaseRepositoryUrl = "https://oss.sonatype.org/service/local/staging/deploy/maven2"
+ }
+ if(project.hasProperty('publishSnapshot') && publishSnapshot.toBoolean()){
+ mavenVersionName = "${clientVersionNumber}-SNAPSHOT"
+ } else {
+ mavenVersionName = "${clientVersionNumber}"
+ }
+ if(project.hasProperty('nexusUsername')){
+ nexusUsername = "${nexusUsername}"
+ }
+ if(project.hasProperty('nexusPassword')){
+ nexusPassword = "${nexusPassword}"
+ }
+ mavenPomDescription = "SQLCipher for Android is a plugin to SQLite that provides full database encryption."
+ mavenPomUrl = "https://www.zetetic.net/sqlcipher"
+ mavenScmUrl = "https://github.com/sqlcipher/android-database-sqlcipher.git"
+ mavenScmConnection = "scm:git:https://github.com/sqlcipher/android-database-sqlcipher.git"
+ mavenScmDeveloperConnection = "scm:git:https://github.com/sqlcipher/android-database-sqlcipher.git"
+ mavenLicenseUrl = "https://www.zetetic.net/sqlcipher/license/"
+ mavenDeveloperName = "Zetetic Support"
+ mavenDeveloperEmail = "support@zetetic.net"
+ mavenDeveloperOrganization = "Zetetic LLC"
+ mavenDeveloperUrl = "https://www.zetetic.net"
+ minimumAndroidSdkVersion = 14
+ minimumAndroid64BitSdkVersion = 21
+ targetAndroidSdkVersion = 26
+ compileAndroidSdkVersion = 26
+ mainProjectName = "android-database-sqlcipher"
+ nativeRootOutputDir = "${projectDir}/${mainProjectName}/src/main"
+ androidNativeRootDir = "${nativeRootOutputDir}/external/android-libs"
+ sqlcipherDir = "${projectDir}/${mainProjectName}/src/main/external/sqlcipher"
+ opensslVersion = "1.1.1b"
+ opensslDir = "${projectDir}/${mainProjectName}/src/main/external/openssl-${opensslVersion}"
+ if(project.hasProperty('debugBuild') && debugBuild.toBoolean()) {
+ otherSqlcipherCFlags = ""
+ ndkBuildType="NDK_DEBUG=1"
+ } else {
+ otherSqlcipherCFlags = "-DLOG_NDEBUG"
+ ndkBuildType="NDK_DEBUG=0"
+ }
+ sqlcipherCFlags = "-DSQLITE_HAS_CODEC " +
+ "-DSQLITE_SOUNDEX " +
+ "-DHAVE_USLEEP=1 " +
+ "-DSQLITE_MAX_VARIABLE_NUMBER=99999 " +
+ "-DSQLITE_TEMP_STORE=3 " +
+ "-DSQLITE_THREADSAFE=1 " +
+ "-DSQLITE_DEFAULT_JOURNAL_SIZE_LIMIT=1048576 " +
+ "-DNDEBUG=1 " +
+ "-DSQLITE_ENABLE_MEMORY_MANAGEMENT=1 " +
+ "-DSQLITE_ENABLE_LOAD_EXTENSION " +
+ "-DSQLITE_ENABLE_COLUMN_METADATA " +
+ "-DSQLITE_ENABLE_UNLOCK_NOTIFY " +
+ "-DSQLITE_ENABLE_RTREE " +
+ "-DSQLITE_ENABLE_STAT3 " +
+ "-DSQLITE_ENABLE_STAT4 " +
+ "-DSQLITE_ENABLE_JSON1 " +
+ "-DSQLITE_ENABLE_FTS3_PARENTHESIS " +
+ "-DSQLITE_ENABLE_FTS4 " +
+ "-DSQLITE_ENABLE_FTS5 " +
+ "-DSQLCIPHER_CRYPTO_OPENSSL " +
+ "-DSQLITE_ENABLE_DBSTAT_VTAB"
+}
+
+task clean(type: Delete) {
+ delete rootProject.buildDir
+}
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/gradle/wrapper/gradle-wrapper.jar b/BotZone2.8v1 Android/android-database-sqlcipher/gradle/wrapper/gradle-wrapper.jar
new file mode 100644
index 0000000..7a3265e
Binary files /dev/null and b/BotZone2.8v1 Android/android-database-sqlcipher/gradle/wrapper/gradle-wrapper.jar differ
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/gradle/wrapper/gradle-wrapper.properties b/BotZone2.8v1 Android/android-database-sqlcipher/gradle/wrapper/gradle-wrapper.properties
new file mode 100644
index 0000000..ffb5977
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,5 @@
+distributionBase=GRADLE_USER_HOME
+distributionPath=wrapper/dists
+zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-5.4.1-bin.zip
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/gradlew b/BotZone2.8v1 Android/android-database-sqlcipher/gradlew
new file mode 100644
index 0000000..cccdd3d
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/gradlew
@@ -0,0 +1,172 @@
+#!/usr/bin/env sh
+
+##############################################################################
+##
+## Gradle start up script for UN*X
+##
+##############################################################################
+
+# Attempt to set APP_HOME
+# Resolve links: $0 may be a link
+PRG="$0"
+# Need this for relative symlinks.
+while [ -h "$PRG" ] ; do
+ ls=`ls -ld "$PRG"`
+ link=`expr "$ls" : '.*-> \(.*\)$'`
+ if expr "$link" : '/.*' > /dev/null; then
+ PRG="$link"
+ else
+ PRG=`dirname "$PRG"`"/$link"
+ fi
+done
+SAVED="`pwd`"
+cd "`dirname \"$PRG\"`/" >/dev/null
+APP_HOME="`pwd -P`"
+cd "$SAVED" >/dev/null
+
+APP_NAME="Gradle"
+APP_BASE_NAME=`basename "$0"`
+
+# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+DEFAULT_JVM_OPTS=""
+
+# Use the maximum available, or set MAX_FD != -1 to use that value.
+MAX_FD="maximum"
+
+warn () {
+ echo "$*"
+}
+
+die () {
+ echo
+ echo "$*"
+ echo
+ exit 1
+}
+
+# OS specific support (must be 'true' or 'false').
+cygwin=false
+msys=false
+darwin=false
+nonstop=false
+case "`uname`" in
+ CYGWIN* )
+ cygwin=true
+ ;;
+ Darwin* )
+ darwin=true
+ ;;
+ MINGW* )
+ msys=true
+ ;;
+ NONSTOP* )
+ nonstop=true
+ ;;
+esac
+
+CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
+
+# Determine the Java command to use to start the JVM.
+if [ -n "$JAVA_HOME" ] ; then
+ if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+ # IBM's JDK on AIX uses strange locations for the executables
+ JAVACMD="$JAVA_HOME/jre/sh/java"
+ else
+ JAVACMD="$JAVA_HOME/bin/java"
+ fi
+ if [ ! -x "$JAVACMD" ] ; then
+ die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+ fi
+else
+ JAVACMD="java"
+ which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+fi
+
+# Increase the maximum file descriptors if we can.
+if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
+ MAX_FD_LIMIT=`ulimit -H -n`
+ if [ $? -eq 0 ] ; then
+ if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
+ MAX_FD="$MAX_FD_LIMIT"
+ fi
+ ulimit -n $MAX_FD
+ if [ $? -ne 0 ] ; then
+ warn "Could not set maximum file descriptor limit: $MAX_FD"
+ fi
+ else
+ warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
+ fi
+fi
+
+# For Darwin, add options to specify how the application appears in the dock
+if $darwin; then
+ GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
+fi
+
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin ; then
+ APP_HOME=`cygpath --path --mixed "$APP_HOME"`
+ CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
+ JAVACMD=`cygpath --unix "$JAVACMD"`
+
+ # We build the pattern for arguments to be converted via cygpath
+ ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
+ SEP=""
+ for dir in $ROOTDIRSRAW ; do
+ ROOTDIRS="$ROOTDIRS$SEP$dir"
+ SEP="|"
+ done
+ OURCYGPATTERN="(^($ROOTDIRS))"
+ # Add a user-defined pattern to the cygpath arguments
+ if [ "$GRADLE_CYGPATTERN" != "" ] ; then
+ OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
+ fi
+ # Now convert the arguments - kludge to limit ourselves to /bin/sh
+ i=0
+ for arg in "$@" ; do
+ CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
+ CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
+
+ if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
+ eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
+ else
+ eval `echo args$i`="\"$arg\""
+ fi
+ i=$((i+1))
+ done
+ case $i in
+ (0) set -- ;;
+ (1) set -- "$args0" ;;
+ (2) set -- "$args0" "$args1" ;;
+ (3) set -- "$args0" "$args1" "$args2" ;;
+ (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
+ (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
+ (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
+ (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
+ (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
+ (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
+ esac
+fi
+
+# Escape application args
+save () {
+ for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
+ echo " "
+}
+APP_ARGS=$(save "$@")
+
+# Collect all arguments for the java command, following the shell quoting and substitution rules
+eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
+
+# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
+if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
+ cd "$(dirname "$0")"
+fi
+
+exec "$JAVACMD" "$@"
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/gradlew.bat b/BotZone2.8v1 Android/android-database-sqlcipher/gradlew.bat
new file mode 100644
index 0000000..f955316
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/gradlew.bat
@@ -0,0 +1,84 @@
+@if "%DEBUG%" == "" @echo off
+@rem ##########################################################################
+@rem
+@rem Gradle startup script for Windows
+@rem
+@rem ##########################################################################
+
+@rem Set local scope for the variables with windows NT shell
+if "%OS%"=="Windows_NT" setlocal
+
+set DIRNAME=%~dp0
+if "%DIRNAME%" == "" set DIRNAME=.
+set APP_BASE_NAME=%~n0
+set APP_HOME=%DIRNAME%
+
+@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+set DEFAULT_JVM_OPTS=
+
+@rem Find java.exe
+if defined JAVA_HOME goto findJavaFromJavaHome
+
+set JAVA_EXE=java.exe
+%JAVA_EXE% -version >NUL 2>&1
+if "%ERRORLEVEL%" == "0" goto init
+
+echo.
+echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:findJavaFromJavaHome
+set JAVA_HOME=%JAVA_HOME:"=%
+set JAVA_EXE=%JAVA_HOME%/bin/java.exe
+
+if exist "%JAVA_EXE%" goto init
+
+echo.
+echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:init
+@rem Get command-line arguments, handling Windows variants
+
+if not "%OS%" == "Windows_NT" goto win9xME_args
+
+:win9xME_args
+@rem Slurp the command line arguments.
+set CMD_LINE_ARGS=
+set _SKIP=2
+
+:win9xME_args_slurp
+if "x%~1" == "x" goto execute
+
+set CMD_LINE_ARGS=%*
+
+:execute
+@rem Setup the command line
+
+set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
+
+@rem Execute Gradle
+"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
+
+:end
+@rem End local scope for the variables with windows NT shell
+if "%ERRORLEVEL%"=="0" goto mainEnd
+
+:fail
+rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
+rem the _cmd.exe /c_ return code!
+if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
+exit /b 1
+
+:mainEnd
+if "%OS%"=="Windows_NT" endlocal
+
+:omega
diff --git a/BotZone2.8v1 Android/android-database-sqlcipher/settings.gradle b/BotZone2.8v1 Android/android-database-sqlcipher/settings.gradle
new file mode 100644
index 0000000..7943976
--- /dev/null
+++ b/BotZone2.8v1 Android/android-database-sqlcipher/settings.gradle
@@ -0,0 +1 @@
+include ':android-database-sqlcipher'
diff --git a/BotZone2.8v1 Android/commands.py b/BotZone2.8v1 Android/commands.py
new file mode 100644
index 0000000..b0ff147
--- /dev/null
+++ b/BotZone2.8v1 Android/commands.py
@@ -0,0 +1,6176 @@
+import base64
+from colorama import init, Fore, Back, Style
+import config
+import decryptor
+import io
+import commands
+import json
+from orator import DatabaseManager, Model
+import os
+import packet
+import PySimpleGUI as sg
+from random import choice
+from random import randint
+import re
+import requests
+from string import ascii_uppercase
+import sys
+import time
+import webbrowser
+from random import *
+import extra
+import Lrfarm
+import aa
+import random
+# Coloroma autoreset
+init(autoreset=True)
+
+
+def complete_stage(stage_id, difficulty, kagi=None):
+ # Completes a given stage stage name or ID has been supplied as a string
+ # kagi must be correct kagi item ID if used
+ # Check if user has supplied a stage name and searches DB for correct stage id
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/user'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/user'
+ else:
+ url = 'http://ishin-production.aktsk.jp/user'
+ r = requests.get(url, headers=headers)
+ user = r.json()
+
+ print(Fore.GREEN + 'Account Rank:' + str(get_user()['user']['rank']))
+ print('Stones: ' + str(user['user']['stone']))
+ if not stage_id.isnumeric():
+
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ stage_id = str(config.Quests.where('name', 'like', '%' + stage_id
+ + '%').first().id)
+ except AttributeError:
+ config.Model.set_connection_resolver(config.db_jp)
+ stage_id = str(config.Quests.where('name', 'like', '%' + stage_id
+ + '%').first().id)
+ except:
+ print(Fore.RED + Style.BRIGHT + "Could not find stage name in databases")
+ return 0
+ # Retrieve correct stage name to print
+ # Check if GLB database has id, if not try JP DB.
+
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ config.Quests.find_or_fail(int(stage_id))
+ stage_name = config.Quests.find_or_fail(int(stage_id)).name
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ config.Quests.find_or_fail(int(stage_id))
+ stage_name = config.Quests.find_or_fail(int(stage_id)).name
+
+ try:
+ print('Begin stage: ' + stage_name + ' ' + stage_id + ' | Difficulty: ' \
+ + str(difficulty) + ' Deck: ' + str(config.deck))
+ except:
+ print(Fore.RED + Style.BRIGHT + 'Does this quest exist?')
+ return 0
+
+ # Begin timer for overall stage completion, rounded to second.
+ timer_start = int(round(time.time(), 0))
+
+ # Form First Request
+ APIToken = ''.join(choice(ascii_uppercase) for i in range(63))
+ friend = get_friend(stage_id, difficulty)
+
+ if friend['is_cpu'] == False:
+ if kagi != None:
+ sign = json.dumps({'difficulty': difficulty, 'eventkagi_item_id': kagi, 'friend_id': friend['id'],
+ 'is_playing_script': True, 'selected_team_num': config.deck})
+ else:
+ sign = json.dumps({'difficulty': difficulty, 'friend_id': friend['id'], 'is_playing_script': True,
+ 'selected_team_num': config.deck})
+ else:
+ if kagi != None:
+ sign = json.dumps({'difficulty': difficulty, 'eventkagi_item_id': kagi, 'cpu_friend_id': friend['id'],
+ 'is_playing_script': True, 'selected_team_num': config.deck})
+ else:
+ sign = json.dumps({'difficulty': difficulty, 'cpu_friend_id': friend['id'], 'is_playing_script': True,
+ 'selected_team_num': config.deck})
+
+ enc_sign = packet.encrypt_sign(sign)
+
+ # ## Send First Request
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('POST', '/quests/' + stage_id
+ + '/sugoroku_maps/start'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ data = {'sign': enc_sign}
+
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/quests/' + stage_id \
+ + '/sugoroku_maps/start'
+ else:
+ url = 'http://ishin-production.aktsk.jp/quests/' + stage_id \
+ + '/sugoroku_maps/start'
+
+ r = requests.post(url, data=json.dumps(data), headers=headers)
+
+ # Form second request
+ # Time for request sent
+
+ if 'sign' in r.json():
+ dec_sign = packet.decrypt_sign(r.json()['sign'])
+ elif 'error' in r.json():
+ print(Fore.RED + Style.BRIGHT + str(r.json()['error']))
+ # Check if error was due to lack of stamina
+ if r.json()['error']['code'] == 'act_is_not_enough':
+ # Check if allowed to refill stamina
+ if config.allow_stamina_refill == True:
+ refill_stamina()
+ r = requests.post(url, data=json.dumps(data),
+ headers=headers)
+ else:
+ print(Fore.RED + Style.BRIGHT + 'Stamina refill not allowed.')
+ return 0
+ elif r.json()['error']['code'] == 'active_record/record_not_found':
+ return 0
+ elif r.json()['error']['code'] == 'invalid_area_conditions_potential_releasable':
+ print(Fore.RED + Style.BRIGHT + 'You do not meet the coniditions to complete potential events')
+ return 0
+ else:
+ print(Fore.RED + Style.BRIGHT + str(r.json()['error']))
+ return 0
+ else:
+ print(Fore.RED + Style.BRIGHT + str(r.json()))
+ return 0
+ if 'sign' in r.json():
+ dec_sign = packet.decrypt_sign(r.json()['sign'])
+ # Retrieve possible tile steps from response
+ steps = []
+ for x in dec_sign['sugoroku']['events']:
+ steps.append(x)
+
+ finish_time = int(round(time.time(), 0) + 90)
+ start_time = finish_time - randint(6200000, 8200000)
+ damage = randint(500000, 1000000)
+
+ # Hercule punching bag event damage
+ if str(stage_id)[0:3] in ('711', '185'):
+ damage = randint(100000000, 101000000)
+
+ sign = {
+ 'actual_steps': steps,
+ 'difficulty': difficulty,
+ 'elapsed_time': finish_time - start_time,
+ 'energy_ball_counts_in_boss_battle': [4, 6, 0, 6, 4, 3, 0, 0, 0, 0, 0, 0, 0, ],
+ 'has_player_been_taken_damage': False,
+ 'is_cheat_user': False,
+ 'is_cleared': True,
+ 'is_defeated_boss': True,
+ 'is_player_special_attack_only': True,
+ 'max_damage_to_boss': damage,
+ 'min_turn_in_boss_battle': 0,
+ 'quest_finished_at_ms': finish_time,
+ 'quest_started_at_ms': start_time,
+ 'steps': steps,
+ 'token': dec_sign['token'],
+ }
+
+ enc_sign = packet.encrypt_sign(json.dumps(sign))
+
+ # Send second request
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('POST', '/quests/' + stage_id
+ + '/sugoroku_maps/finish'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ data = {'sign': enc_sign}
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/quests/' + stage_id \
+ + '/sugoroku_maps/finish'
+ else:
+ url = 'http://ishin-production.aktsk.jp/quests/' + stage_id \
+ + '/sugoroku_maps/finish'
+
+ r = requests.post(url, data=json.dumps(data), headers=headers)
+ dec_sign = packet.decrypt_sign(r.json()['sign'])
+
+ # ## Print out Items from Database
+ if 'items' in dec_sign:
+ supportitems = []
+ awakeningitems = []
+ trainingitems = []
+ potentialitems = []
+ treasureitems = []
+ carditems = []
+ trainingfields = []
+ stones = 0
+ supportitemsset = set()
+ awakeningitemsset = set()
+ trainingitemsset = set()
+ potentialitemsset = set()
+ treasureitemsset = set()
+ carditemsset = set()
+ trainingfieldsset = set()
+ print('Items:')
+ print('-------------------------')
+ if 'quest_clear_rewards' in dec_sign:
+ for x in dec_sign['quest_clear_rewards']:
+ if x['item_type'] == 'Point::Stone':
+ stones += x['amount']
+ for x in dec_sign['items']:
+ if x['item_type'] == 'SupportItem':
+
+ # print('' + SupportItems.find(x['item_id']).name + ' x '+str(x['quantity']))
+
+ for i in range(x['quantity']):
+ supportitems.append(x['item_id'])
+ supportitemsset.add(x['item_id'])
+ elif x['item_type'] == 'PotentialItem':
+
+ # print('' + PotentialItems.find(x['item_id']).name + ' x '+str(x['quantity']))
+
+ for i in range(x['quantity']):
+ potentialitems.append(x['item_id'])
+ potentialitemsset.add(x['item_id'])
+ elif x['item_type'] == 'TrainingItem':
+
+ # print('' + TrainingItems.find(x['item_id']).name + ' x '+str(x['quantity']))
+
+ for i in range(x['quantity']):
+ trainingitems.append(x['item_id'])
+ trainingitemsset.add(x['item_id'])
+ elif x['item_type'] == 'AwakeningItem':
+
+ # print('' + AwakeningItems.find(x['item_id']).name + ' x '+str(x['quantity']))
+
+ for i in range(x['quantity']):
+ awakeningitems.append(x['item_id'])
+ awakeningitemsset.add(x['item_id'])
+ elif x['item_type'] == 'TreasureItem':
+
+ # print('' + TreasureItems.find(x['item_id']).name + ' x '+str(x['quantity']))
+
+ for i in range(x['quantity']):
+ treasureitems.append(x['item_id'])
+ treasureitemsset.add(x['item_id'])
+ elif x['item_type'] == 'Card':
+
+ # card = Cards.find(x['item_id'])
+
+ carditems.append(x['item_id'])
+ carditemsset.add(x['item_id'])
+ elif x['item_type'] == 'Point::Stone':
+
+ # print('' + card.name + '['+rarity+']'+ ' x '+str(x['quantity']))
+ # print('' + TreasureItems.find(x['item_id']).name + ' x '+str(x['quantity']))
+
+ stones += 1
+ elif x['item_type'] == 'TrainingField':
+
+ # card = Cards.find(x['item_id'])
+
+ for i in range(x['quantity']):
+ trainingfields.append(x['item_id'])
+ trainingfieldsset.add(x['item_id'])
+ else:
+ print(x['item_type'])
+
+ # Print items
+ for x in supportitemsset:
+ # JP Translation
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ config.SupportItems.find_or_fail(x).name
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+
+ # Print name and item count
+ print(Fore.CYAN + Style.BRIGHT + config.SupportItems.find(x).name + ' x' \
+ + str(supportitems.count(x)))
+ for x in awakeningitemsset:
+ # JP Translation
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ config.AwakeningItems.find_or_fail(x).name
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+
+ # Print name and item count
+ print(Fore.MAGENTA + Style.BRIGHT + config.AwakeningItems.find(x).name + ' x' \
+ + str(awakeningitems.count(x)))
+ for x in trainingitemsset:
+ # JP Translation
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ config.TrainingItems.find_or_fail(x).name
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+
+ # Print name and item count
+ print(Fore.RED + Style.BRIGHT + config.TrainingItems.find(x).name + ' x' \
+ + str(trainingitems.count(x)))
+ for x in potentialitemsset:
+ # JP Translation
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ config.PotentialItems.find_or_fail(x).name
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+
+ # Print name and item count
+ print(config.PotentialItems.find_or_fail(x).name + ' x' \
+ + str(potentialitems.count(x)))
+ for x in treasureitemsset:
+ # JP Translation
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ config.TreasureItems.find_or_fail(x).name
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+
+ # Print name and item count
+ print(Fore.GREEN + Style.BRIGHT + config.TreasureItems.find(x).name + ' x' \
+ + str(treasureitems.count(x)))
+ for x in trainingfieldsset:
+ # JP Translation
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ config.TrainingFields.find_or_fail(x).name
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+
+ # Print name and item count
+ print(config.TrainingFields.find(x).name + ' x' \
+ + str(trainingfields.count(x)))
+ for x in carditemsset:
+ # JP Translation
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ config.Cards.find_or_fail(x).name
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+
+ # Print name and item count
+ print(config.Cards.find(x).name + ' x' + str(carditems.count(x)))
+ print(Fore.YELLOW + Style.BRIGHT + 'Stones x' + str(stones))
+ zeni = '{:,}'.format(dec_sign['zeni'])
+ print('Zeni: ' + zeni)
+ if 'gasha_point' in dec_sign:
+ print('Friend Points: ' + str(dec_sign['gasha_point']))
+
+ print('--------------------------')
+
+ # Sell Cards
+
+ i = 0, 1
+ card_list = []
+ if 'user_items' in dec_sign:
+ if 'cards' in dec_sign['user_items']:
+ for x in dec_sign['user_items']['cards']:
+ if config.Cards.find(x['card_id']).rarity == 0:
+ card_list.append(x['id'])
+ if config.Cards.find(x['card_id']).rarity == 1:
+ card_list.append(x['id'])
+
+ if len(card_list) > 0:
+ sell_cards(card_list)
+ if len(card_list) > 1:
+ sell_cards(card_list)
+
+ # ## Finish timing level
+
+ timer_finish = int(round(time.time(), 0))
+ timer_total = timer_finish - timer_start
+
+ # #### COMPLETED STAGE
+
+ print(Fore.GREEN + Style.BRIGHT + 'Completed stage: ' + str(stage_id) + ' in ' \
+ + str(timer_total) + ' seconds')
+ print('##############################################')
+
+
+####################################################################
+def get_friend(
+ stage_id,
+ difficulty,
+):
+ # Returns supporter for given stage_id & difficulty
+ # Chooses cpu_supporter if possible
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/quests/' + stage_id
+ + '/supporters'),
+ 'Content-type': 'application/json',
+ 'X-Platform': 'config.platform',
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/quests/' + stage_id \
+ + '/supporters'
+ else:
+ url = 'http://ishin-production.aktsk.jp/quests/' + stage_id \
+ + '/supporters'
+
+ r = requests.get(url, headers=headers)
+
+ '''
+ if 'supporters' not in r.json():
+ print('Bandai has temp blocked connection... Attempting sign in...')
+ response = SignIn(signup, AdId, UniqueId)
+ RefreshClient()
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': GetMac('GET', '/quests/' + stage_id
+ + '/supporters', MacId, secret1),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ r = requests.get(url, headers=headers)
+ '''
+ # If CPU supporter available, choose it every time
+ if 'cpu_supporters' in r.json():
+ if int(difficulty) == 5:
+ if 'super_hard3' in r.json()['cpu_supporters']:
+ if len(r.json()['cpu_supporters']['super_hard3'
+ ]['cpu_friends']) > 0:
+ return {
+ 'is_cpu': True,
+ 'id': r.json()['cpu_supporters']['super_hard3']
+ ['cpu_friends'][0]['id']
+ }
+ if int(difficulty) == 4:
+ if 'super_hard2' in r.json()['cpu_supporters']:
+ if len(r.json()['cpu_supporters']['super_hard2'
+ ]['cpu_friends']) > 0:
+ return {
+ 'is_cpu': True,
+ 'id': r.json()['cpu_supporters']['super_hard2']
+ ['cpu_friends'][0]['id']
+ }
+ if int(difficulty) == 3:
+ if 'super_hard1' in r.json()['cpu_supporters']:
+ if len(r.json()['cpu_supporters']['super_hard1'
+ ]['cpu_friends']) > 0:
+ return {
+ 'is_cpu': True,
+ 'id': r.json()['cpu_supporters']['super_hard1']
+ ['cpu_friends'][0]['id']
+ }
+ if int(difficulty) == 2:
+ if 'very_hard' in r.json()['cpu_supporters']:
+ if len(r.json()['cpu_supporters']['very_hard'
+ ]['cpu_friends']) > 0:
+ return {
+ 'is_cpu': True,
+ 'id': r.json()['cpu_supporters']['very_hard']
+ ['cpu_friends'][0]['id']
+ }
+ if int(difficulty) == 1:
+ if 'hard' in r.json()['cpu_supporters']:
+ if len(r.json()['cpu_supporters']['hard']['cpu_friends'
+ ]) > 0:
+ return {
+ 'is_cpu': True,
+ 'id': r.json()['cpu_supporters']['hard']
+ ['cpu_friends'][0]['id']
+ }
+ if int(difficulty) == 0:
+ if 'normal' in r.json()['cpu_supporters']:
+ if len(r.json()['cpu_supporters']['normal'
+ ]['cpu_friends']) > 0:
+ return {
+ 'is_cpu': True,
+ 'id': r.json()['cpu_supporters']['normal']
+ ['cpu_friends'][0]['id']
+ }
+
+ return {
+ 'is_cpu': False,
+ 'id': r.json()['supporters'][0]['id']
+ }
+
+
+####################################################################
+def refill_stamina():
+ # ## Restore user stamina
+
+ stones = get_user()['user']['stone']
+ if stones < 1:
+ print(Fore.RED + Style.BRIGHT + 'You have no stones left...')
+ return 0
+ if config.client == 'global':
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('PUT', '/user/recover_act_with_stone'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ url = 'https://ishin-global.aktsk.com/user/recover_act_with_stone'
+ else:
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('PUT', '/user/recover_act_with_stone'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ url = 'http://ishin-production.aktsk.jp/user/recover_act_with_stone'
+
+ r = requests.put(url, headers=headers)
+ print(Fore.GREEN + Style.BRIGHT + 'STAMINA RESTORED')
+
+
+####################################################################
+def get_user():
+ # Returns user response from bandai
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/user'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/user'
+ else:
+ url = 'http://ishin-production.aktsk.jp/user'
+ r = requests.get(url, headers=headers)
+ return r.json()
+
+
+####################################################################
+def sell_cards(card_list):
+ # Takes cards list and sells them in batches of 99
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('POST', '/cards/sell'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/cards/sell'
+ else:
+ url = 'http://ishin-production.aktsk.jp/cards/sell'
+
+ cards_to_sell = []
+ i = 0
+ for card in card_list:
+ i += 1
+ cards_to_sell.append(card)
+ if i == 99:
+ data = {'card_ids': cards_to_sell}
+ r = requests.post(url, data=json.dumps(data), headers=headers)
+ print('Sold Cards x' + str(len(cards_to_sell)))
+ if 'error' in r.json():
+ print(r.json()['error'])
+ return 0
+ i = 0
+ cards_to_sell[:] = []
+ if i != 0:
+ data = {'card_ids': cards_to_sell}
+ r = requests.post(url, data=json.dumps(data), headers=headers)
+ print('Sold Cards x' + str(len(cards_to_sell)))
+ # print(r.json())
+
+
+####################################################################
+def signup():
+ # returns string identifier to be formatted and used by SignIn function
+
+ # Set platform to use
+ set_platform()
+
+ # Generate AdId and Unique ID to send to Bandai
+ config.AdId = packet.guid()['AdId']
+ config.UniqueId = packet.guid()['UniqueId']
+
+ user_acc = {
+ 'ad_id': config.AdId,
+ 'country': 'AU',
+ 'currency': 'AUD',
+ 'device': 'samsung',
+ 'device_model': 'SM-E7000',
+ 'os_version': '7.0',
+ 'platform': config.platform,
+ 'unique_id': config.UniqueId,
+ }
+ user_account = json.dumps({'user_account': user_acc})
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 4.4; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/auth/sign_up'
+ else:
+ url = 'http://ishin-production.aktsk.jp/auth/sign_up'
+ r = requests.post(url, data=user_account, headers=headers)
+
+ # ## It is now necessary to solve the captcha. Opens browser window
+ # ## in order to solve it. Script waits for user input before continuing
+ if 'captcha_url' not in r.json():
+ print(Fore.RED + Style.BRIGHT + 'Captcha could not be loaded...')
+ return None
+
+ url = r.json()['captcha_url']
+ webbrowser.open(url, new=2)
+ captcha_session_key = r.json()['captcha_session_key']
+ print(
+ 'Opening captcha in browser. Press' + Fore.RED + Style.BRIGHT + ' ENTER ' + Style.RESET_ALL + 'once you have solved it...')
+ input()
+
+ # ## Query sign up again passing the captcha session key.
+ # ## Bandais servers check if captcha was solved relative to the session key
+
+ data = {'captcha_session_key': captcha_session_key,
+ 'user_account': user_acc}
+
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/auth/sign_up'
+ else:
+ url = 'http://ishin-production.aktsk.jp/auth/sign_up'
+
+ r = requests.post(url, data=json.dumps(data), headers=headers)
+
+ # ##Return identifier for account, this changes upon transferring account
+ try:
+ return base64.b64decode(r.json()['identifier']).decode('utf-8')
+ except:
+ return None
+
+
+####################################################################
+####################################################################
+def signin(identifier):
+ # Takes account identifier and encodes it properly, sending BASIC Authorization
+ # request to bandai.
+ # Returns tuple
+
+ # Format identifier to receive access_token and secret
+ basic_pwacc = identifier.split(':')
+ complete_string = basic_pwacc[1] + ':' + basic_pwacc[0]
+ basic_accpw = 'Basic ' \
+ + base64.b64encode(complete_string.encode('utf-8'
+ )).decode('utf-8')
+ data = json.dumps({
+ 'ad_id': packet.guid()['AdId'],
+ 'unique_id': packet.guid()['UniqueId']
+ })
+
+ # print(data)
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': basic_accpw,
+ 'Content-type': 'application/json',
+ 'X-ClientVersion': '////',
+ 'X-Language': 'en',
+ 'X-UserCountry': 'AU',
+ 'X-UserCurrency': 'AUD',
+ 'X-Platform': config.platform,
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/auth/sign_in'
+ else:
+ url = 'http://ishin-production.aktsk.jp/auth/sign_in'
+
+ r = requests.post(url, data=data, headers=headers)
+
+ if 'captcha_url' in r.json():
+ print(r.json())
+ url = r.json()['captcha_url']
+ webbrowser.open(url, new=2)
+ captcha_session_key = r.json()['captcha_session_key']
+ print(
+ 'Opening captcha in browser. Press' + Fore.RED + Style.BRIGHT + ' ENTER ' + Style.RESET_ALL + 'once you have solved it...')
+ input()
+ r = requests.post(url, data=data, headers=headers)
+
+ print(Fore.RED + Style.BRIGHT + 'SIGN IN COMPLETE' + Style.RESET_ALL)
+
+ try:
+ return (r.json()['access_token'], r.json()['secret'])
+ except:
+ return None
+
+
+####################################################################
+def get_transfer_code():
+ # Returns transfer code in dictionary
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('POST', '/auth/link_codes'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ data = {'eternal': 1}
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/auth/link_codes'
+ else:
+ url = 'http://ishin-production.aktsk.jp/auth/link_codes'
+
+ r = requests.post(url, data=json.dumps(data), headers=headers)
+ try:
+ print('Transfer Code:' + r.json()['link_code'])
+ return {'transfer_code': r.json()['link_code']}
+ except:
+ return None
+
+
+####################################################################
+def tutorial():
+ # ##Progress NULL TUTORIAL FINISH
+
+ print(Fore.CYAN + Style.BRIGHT + 'Tutorial Progress: 1/8')
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('PUT', '/tutorial/finish'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/tutorial/finish'
+ else:
+ url = 'http://ishin-production.aktsk.jp/tutorial/finish'
+ r = requests.put(url, headers=headers)
+
+ # ##Progress NULL Gasha
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('POST', '/tutorial/gasha'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/tutorial/gasha'
+ else:
+ url = 'http://ishin-production.aktsk.jp/tutorial/gasha'
+ r = requests.post(url, headers=headers)
+ print(Fore.CYAN + Style.BRIGHT + 'Tutorial Progress: 2/8')
+
+ # ##Progress to 999%
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('PUT', '/tutorial'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ progress = {'progress': '999'}
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/tutorial'
+ else:
+ url = 'http://ishin-production.aktsk.jp/tutorial'
+ r = requests.put(url, data=json.dumps(progress), headers=headers)
+ print(Fore.CYAN + Style.BRIGHT + 'Tutorial Progress: 3/8')
+
+ # ##Change User name
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('PUT', '/user'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ user = {'user': {'name': make_name()}}
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/user'
+ else:
+ url = 'http://ishin-production.aktsk.jp/user'
+ r = requests.put(url, data=json.dumps(user), headers=headers)
+ print(Fore.CYAN + Style.BRIGHT + 'Tutorial Progress: 4/8')
+
+ # ##/missions/put_forward
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('POST', '/missions/put_forward'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/missions/put_forward'
+ else:
+ url = 'http://ishin-production.aktsk.jp/missions/put_forward'
+ r = requests.post(url, headers=headers)
+ print(Fore.CYAN + Style.BRIGHT + 'Tutorial Progress: 5/8')
+
+ # ##Apologies accept
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('PUT', '/apologies/accept'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/apologies/accept'
+ else:
+ url = 'http://ishin-production.aktsk.jp/apologies/accept'
+ r = requests.put(url, headers=headers)
+
+ # ##On Demand
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('PUT', '/user'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/user'
+ else:
+ url = 'http://ishin-production.aktsk.jp/user'
+ data = {'user': {'is_ondemand': True}}
+ r = requests.put(url, data=json.dumps(data), headers=headers)
+ print(Fore.CYAN + Style.BRIGHT + 'Tutorial Progress: 6/8')
+
+ # ##Hidden potential releasable
+
+ print(Fore.CYAN + Style.BRIGHT + 'Tutorial Progress: 7/8')
+ print(Fore.CYAN + Style.BRIGHT + 'Tutorial Progress: 8/8')
+ print(Fore.RED + Style.BRIGHT + 'TUTORIAL COMPLETE')
+
+
+####################################################################
+def db_download():
+ #
+ glb_out_of_date = False
+
+ # Check local DB versions in help.txt
+ while True:
+ if os.path.isfile('help.txt'):
+ f = open(os.path.join('help.txt'), 'r')
+ local_version_glb = f.readline().rstrip()
+ local_version_jp = f.readline().rstrip()
+ f.close()
+ break
+ else:
+ f = open(os.path.join('help.txt'), 'w')
+ f.write('111\n')
+ f.write('111\n')
+ f.close()
+
+ # Check what is the current client this may end up being unnecessary
+ original_client = config.client
+
+ # Set first db to download to global.
+ config.client = 'global'
+ config.identifier = signup()
+ config.access_token, config.secret = signin(config.identifier)
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/client_assets/database'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ 'X-Language': 'en',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/client_assets/database'
+ else:
+ url = 'http://ishin-production.aktsk.jp/client_assets/database'
+
+ r = requests.get(url, allow_redirects=True, headers=headers)
+ if local_version_glb != str(r.json()['version']):
+ glb_out_of_date = True
+ glb_current = r.json()['version']
+
+ print(Fore.RED + Style.BRIGHT + 'GLB DB out of date...')
+ print(Fore.RED + Style.BRIGHT + 'Downloading...')
+ url = r.json()['url']
+ r = requests.get(url, allow_redirects=True)
+ open('dataenc_glb.db', 'wb').write(r.content)
+
+ # Revert client to original
+ config.client = original_client
+
+ print(Fore.RED + Style.BRIGHT \
+ + 'Decrypting Latest Databases... This can take a few minutes...')
+
+ # Calling database decrypt script
+ if glb_out_of_date:
+ print('Decrypting Global Database')
+ decryptor.main()
+ with open('help.txt', 'r') as file:
+ data = file.readlines()
+ data[0] = str(glb_current) + '\n'
+ with open('help.txt', 'w') as file:
+ file.writelines(data)
+
+ print(Fore.GREEN + Style.BRIGHT + 'Database update complete.')
+
+
+####################################################################
+def accept_missions():
+ # Accept all remaining missions
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/missions'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////'
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/missions'
+ else:
+ url = 'http://ishin-production.aktsk.jp/missions'
+ r = requests.get(url, headers=headers)
+ missions = r.json()
+ mission_list = []
+ for mission in missions['missions']:
+ if mission['completed_at'] != None and mission['accepted_reward_at'] == None:
+ mission_list.append(mission['id'])
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('POST', '/missions/accept'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////'
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/missions/accept'
+ else:
+ url = 'http://ishin-production.aktsk.jp/missions/accept'
+ data = {"mission_ids": mission_list}
+ r = requests.post(url, data=json.dumps(data), headers=headers)
+ if 'error' not in r.json():
+ print(Fore.GREEN + Style.BRIGHT + 'Accepted missions')
+
+
+####################################################################
+def accept_gifts():
+ # Gets Gift Ids
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/gifts'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/gifts'
+ else:
+ url = 'http://ishin-production.aktsk.jp/gifts'
+ r = requests.get(url, headers=headers)
+
+ gifts = []
+ for x in r.json()['gifts']:
+ gifts.append(x['id'])
+
+ # AcceptGifts
+ if len(gifts) == 0:
+ print('No gifts to accept...')
+ return 0
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('POST', '/gifts/accept'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/gifts/accept'
+ else:
+ url = 'http://ishin-production.aktsk.jp/gifts/accept'
+
+ chunks = [gifts[x:x + 25] for x in range(0, len(gifts), 25)]
+ for data in chunks:
+ data = {'gift_ids': data}
+ r = requests.post(url, data=json.dumps(data), headers=headers)
+ if 'error' not in r.json():
+ print(Fore.GREEN + Style.BRIGHT + 'Gifts Accepted...')
+ else:
+ print(r.json())
+
+
+####################################################################
+def change_supporter():
+ # Needs to have translation properly implemented!
+
+ ###Get user cards
+ print(Fore.CYAN + Style.BRIGHT + 'Fetching user cards...')
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/cards'),
+ 'Content-type': 'application/json',
+ 'X-Language': 'en',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/cards'
+ else:
+ url = 'http://ishin-production.aktsk.jp/cards'
+ r = requests.get(url, headers=headers)
+ master_cards = r.json()['cards']
+ print(Fore.GREEN + Style.BRIGHT + 'Done...')
+
+ ###Sort user cards into a list of dictionaries with attributes
+ print(Fore.CYAN + Style.BRIGHT + 'Fetching card attributes...')
+ card_list = []
+ for card in master_cards:
+ ###Get card collection object from database
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ db_card = config.Cards.find_or_fail(card['card_id'])
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ db_card = config.Cards.find_or_fail(card['card_id'])
+ # db_card = config.Cards.where('id','=',card['card_id']).first()
+
+ ###Get card rarity
+ if db_card.rarity == 0:
+ rarity = 'N'
+ elif db_card.rarity == 1:
+ rarity = 'R'
+ elif db_card.rarity == 2:
+ rarity = 'SR'
+ elif db_card.rarity == 3:
+ rarity = 'SSR'
+ elif db_card.rarity == 4:
+ rarity = 'UR'
+ elif db_card.rarity == 5:
+ rarity = 'LR'
+ ###Get card Type
+ if str(db_card.element)[-1] == '0':
+ type = '[AGL] '
+ elif str(db_card.element)[-1] == '1':
+ type = '[TEQ] '
+ elif str(db_card.element)[-1] == '2':
+ type = '[INT] '
+ elif str(db_card.element)[-1] == '3':
+ type = '[STR] '
+ elif str(db_card.element)[-1] == '4':
+ type = '[PHY] '
+ ###Get card categories list
+ categories = []
+ # Get category id's given card id
+ card_card_categories = config.CardCardCategories.where(
+ 'card_id', '=', db_card.id).get()
+
+ try:
+ for category in card_card_categories:
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ categories.append(config.CardCategories.find(
+ category.card_category_id).name)
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ categories.append(config.CardCategories.find(
+ category.card_category_id).name)
+ except:
+ None
+ ###Get card link_skills list
+ link_skills = []
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill1_id).name)
+ except AttributeError:
+ try:
+ config.Model.set_connection_resolver(config.db_jp)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill1_id).name)
+ except:
+ None
+ except:
+ None
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill2_id).name)
+ except AttributeError:
+ try:
+ config.Model.set_connection_resolver(config.db_jp)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill2_id).name)
+ except:
+ None
+ except:
+ None
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill3_id).name)
+ except AttributeError:
+ try:
+ config.Model.set_connection_resolver(config.db_jp)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill3_id).name)
+ except:
+ None
+ except:
+ None
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill4_id).name)
+ except AttributeError:
+ try:
+ config.Model.set_connection_resolver(config.db_jp)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill4_id).name)
+ except:
+ None
+ except:
+ None
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill5_id).name)
+ except AttributeError:
+ try:
+ config.Model.set_connection_resolver(config.db_jp)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill5_id).name)
+ except:
+ None
+ except:
+ None
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill6_id).name)
+ except AttributeError:
+ try:
+ config.Model.set_connection_resolver(config.db_jp)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill6_id).name)
+ except:
+ None
+ except:
+ None
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill7_id).name)
+ except AttributeError:
+ try:
+ config.Model.set_connection_resolver(config.db_jp)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill7_id).name)
+ except:
+ None
+ except:
+ None
+
+ dict = {
+ 'ID': db_card.id,
+ 'Rarity': rarity,
+ 'Name': db_card.name,
+ 'Type': type,
+ 'Cost': db_card.cost,
+ 'Hercule': db_card.is_selling_only,
+ 'HP': db_card.hp_init,
+ 'Categories': categories,
+ 'Links': link_skills,
+ 'UniqueID': card['id']
+ }
+ card_list.append(dict)
+ print(Fore.GREEN + Style.BRIGHT + "Done...")
+
+ ###Sort cards
+ print(Fore.CYAN + Style.BRIGHT + "Sorting cards...")
+ card_list = sorted(card_list, key=lambda k: k['Name'])
+ card_list = sorted(card_list, key=lambda k: k['Rarity'])
+ card_list = sorted(card_list, key=lambda k: k['Cost'])
+ print(Fore.GREEN + Style.BRIGHT + "Done...")
+ ###Define cards to display
+ cards_to_display_dicts = []
+ cards_to_display = []
+ # Take cards in card_list that aren't hercule statues or kais?
+ for char in card_list:
+ if char['Hercule'] != 1 and char['HP'] > 5:
+ cards_to_display_dicts.append(char)
+ cards_to_display.append(
+ char['Type'] + char['Rarity'] + ' ' + char['Name'] + ' | ' + str(char['ID']) + ' | ' + str(
+ char['UniqueID']) + ' | ' ' x '+str(x['quantity']))
+
+ ###Define links to display
+ links_master = []
+ config.Model.set_connection_resolver(config.db_jp)
+ for link in config.LinkSkills.all():
+ links_master.append(link.name)
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ links_master.append(config.LinkSkills.find_or_fail(link.id).name)
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ links_master.append(config.LinkSkills.find_or_fail(link.id).name)
+
+ links_to_display = sorted(links_master)
+
+ ###Define categories to display
+ categories_master = []
+ config.Model.set_connection_resolver(config.db_jp)
+ for category in config.CardCategories.all():
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ categories_master.append(config.CardCategories.find_or_fail(category.id).name)
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ categories_master.append(config.CardCategories.find_or_fail(category.id).name)
+
+ categories_to_display = sorted(categories_master)
+
+ ###Define window layout
+
+ col1 = [[sg.Listbox(values=(cards_to_display), size=(30, 20), key='CARDS')],
+ [sg.Listbox(values=([]), size=(30, 6), key='CARDS_CHOSEN')],
+ [sg.Button(button_text='Set as Supporter', key='choose_card')]]
+
+ col2 = [[sg.Listbox(values=(sorted(categories_to_display)), size=(25, 20), key='CATEGORIES')],
+ [sg.Listbox(values=([]), size=(25, 6), key='CATEGORIES_CHOSEN')],
+ [sg.Button(button_text='Choose Categories', key='choose_categories'),
+ sg.Button(button_text='Clear Categories', key='clear_categories')]]
+
+ col3 = [[sg.Listbox(values=(sorted(links_to_display)), size=(25, 20), key='LINKS')],
+ [sg.Listbox(values=([]), size=(25, 6), key='LINKS_CHOSEN')],
+ [sg.Button(button_text='Choose Links', key='choose_links'),
+ sg.Button(button_text='Clear Links', key='clear_links')]]
+
+ layout = [[sg.Column(col1), sg.Column(col2), sg.Column(col3)]]
+ window = sg.Window('Supporter Update', grab_anywhere=True, keep_on_top=True).Layout(layout)
+
+ ###Begin window loop
+ chosen_links = []
+ chosen_categories = []
+
+ ###
+ chosen_cards_ids = []
+ chosen_cards_unique_ids = []
+ chosen_cards_names = []
+ chosen_cards_to_display = []
+
+ while len(chosen_cards_ids) < 1:
+ event, values = window.Read()
+
+ if event == None:
+ return 0
+
+ if event == 'choose_card':
+ if len(values['CARDS']) < 1:
+ continue
+ # Get ID of chosen card to send to bandai
+ chosen_line = values['CARDS'][0]
+ char_name, char_id, char_unique_id = chosen_line.split(' | ')
+ chosen_cards_ids.append(int(char_id))
+ chosen_cards_unique_ids.append(int(char_unique_id))
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ chosen_cards_names.append(config.Cards.find(char_id).name)
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ chosen_cards_names.append(config.Cards.find(char_id).name)
+
+ # Chosen cards to display in lower box
+ chosen_cards_to_display.append(chosen_line)
+
+ if event == 'choose_categories':
+ for category in values['CATEGORIES']:
+ chosen_categories.append(category)
+ categories_to_display.remove(category)
+
+ if event == 'clear_categories':
+ categories_to_display.extend(chosen_categories)
+ chosen_categories[:] = []
+ categories_to_display = sorted(categories_to_display)
+
+ if event == 'choose_links':
+ for link in values['LINKS']:
+ chosen_links.append(link)
+ links_to_display.remove(link)
+
+ if event == 'clear_links':
+ links_to_display.extend(chosen_links)
+ chosen_links[:] = []
+ links_to_display = sorted(links_to_display)
+ break
+
+ ###Re-populate cards to display, checking filter criteria
+ cards_to_display[:] = []
+ for char in cards_to_display_dicts:
+ if char['Name'] in chosen_cards_names:
+ continue
+
+ if len(list(set(chosen_links) & set(char['Links']))) != len(chosen_links):
+ # print("List intersection")
+ continue
+
+ if len(list(set(chosen_categories) & set(char['Categories']))) != len(chosen_categories):
+ # print("Category intersectino")
+ continue
+
+ cards_to_display.append(
+ char['Type'] + char['Rarity'] + ' ' + char['Name'] + ' | ' + str(char['ID']) + ' | ' + str(
+ char['UniqueID']))
+
+ ###Update window elements
+ window.FindElement('CARDS').Update(values=cards_to_display)
+ window.FindElement('CARDS_CHOSEN').Update(values=chosen_cards_to_display)
+ window.FindElement('CATEGORIES').Update(values=categories_to_display)
+ window.FindElement('CATEGORIES_CHOSEN').Update(values=chosen_categories)
+ window.FindElement('LINKS').Update(values=links_to_display)
+ window.FindElement('LINKS_CHOSEN').Update(values=chosen_links)
+
+ window.Close()
+ ###Send selected supporter to bandai
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('PUT', '/support_leaders'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/support_leaders'
+ else:
+ url = 'http://ishin-production.aktsk.jp/support_leaders'
+ # print(chosen_cards_unique_ids)
+ data = {'support_leader_ids': chosen_cards_unique_ids}
+ # print(data)
+ r = requests.put(url, data=json.dumps(data), headers=headers)
+ if 'error' in r.json():
+ print(Fore.RED + Style.BRIGHT + str(r.json()))
+ else:
+ # print(r.json())
+ print(chosen_cards_names)
+ print(Fore.GREEN + Style.BRIGHT + "Supporter updated!")
+
+ return 0
+
+
+####################################################################
+def change_team():
+ # Needs to have translation properly implemented!
+
+ ###Get user deck to change
+ chosen_deck = int(input("Enter the deck number you would like to change: "))
+
+ ###Get user cards
+ print(Fore.CYAN + Style.BRIGHT + 'Fetching user cards...')
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/cards'),
+ 'Content-type': 'application/json',
+ 'X-Language': 'en',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/cards'
+ else:
+ url = 'http://ishin-production.aktsk.jp/cards'
+ r = requests.get(url, headers=headers)
+ master_cards = r.json()['cards']
+ print(Fore.GREEN + Style.BRIGHT + 'Done...')
+
+ ###Sort user cards into a list of dictionaries with attributes
+ print(Fore.CYAN + Style.BRIGHT + 'Fetching card attributes...')
+ card_list = []
+ for card in master_cards:
+ ###Get card collection object from database
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ db_card = config.Cards.find_or_fail(card['card_id'])
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ db_card = config.Cards.find_or_fail(card['card_id'])
+ # db_card = config.Cards.where('id','=',card['card_id']).first()
+
+ ###Get card rarity
+ if db_card.rarity == 0:
+ rarity = 'N'
+ elif db_card.rarity == 1:
+ rarity = 'R'
+ elif db_card.rarity == 2:
+ rarity = 'SR'
+ elif db_card.rarity == 3:
+ rarity = 'SSR'
+ elif db_card.rarity == 4:
+ rarity = 'UR'
+ elif db_card.rarity == 5:
+ rarity = 'LR'
+ ###Get card Type
+ if str(db_card.element)[-1] == '0':
+ type = '[AGL] '
+ elif str(db_card.element)[-1] == '1':
+ type = '[TEQ] '
+ elif str(db_card.element)[-1] == '2':
+ type = '[INT] '
+ elif str(db_card.element)[-1] == '3':
+ type = '[STR] '
+ elif str(db_card.element)[-1] == '4':
+ type = '[PHY] '
+ ###Get card categories list
+ categories = []
+ # Get category id's given card id
+ card_card_categories = config.CardCardCategories.where(
+ 'card_id', '=', db_card.id).get()
+
+ try:
+ for category in card_card_categories:
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ categories.append(config.CardCategories.find(
+ category.card_category_id).name)
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ categories.append(config.CardCategories.find(
+ category.card_category_id).name)
+ except:
+ None
+ ###Get card link_skills list
+ link_skills = []
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill1_id).name)
+ except AttributeError:
+ try:
+ config.Model.set_connection_resolver(config.db_jp)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill1_id).name)
+ except:
+ None
+ except:
+ None
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill2_id).name)
+ except AttributeError:
+ try:
+ config.Model.set_connection_resolver(config.db_jp)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill2_id).name)
+ except:
+ None
+ except:
+ None
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill3_id).name)
+ except AttributeError:
+ try:
+ config.Model.set_connection_resolver(config.db_jp)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill3_id).name)
+ except:
+ None
+ except:
+ None
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill4_id).name)
+ except AttributeError:
+ try:
+ config.Model.set_connection_resolver(config.db_jp)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill4_id).name)
+ except:
+ None
+ except:
+ None
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill5_id).name)
+ except AttributeError:
+ try:
+ config.Model.set_connection_resolver(config.db_jp)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill5_id).name)
+ except:
+ None
+ except:
+ None
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill6_id).name)
+ except AttributeError:
+ try:
+ config.Model.set_connection_resolver(config.db_jp)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill6_id).name)
+ except:
+ None
+ except:
+ None
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill7_id).name)
+ except AttributeError:
+ try:
+ config.Model.set_connection_resolver(config.db_jp)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill7_id).name)
+ except:
+ None
+ except:
+ None
+
+ dict = {
+ 'ID': db_card.id,
+ 'Rarity': rarity,
+ 'Name': db_card.name,
+ 'Type': type,
+ 'Cost': db_card.cost,
+ 'Hercule': db_card.is_selling_only,
+ 'HP': db_card.hp_init,
+ 'Categories': categories,
+ 'Links': link_skills,
+ 'UniqueID': card['id']
+ }
+ card_list.append(dict)
+ print(Fore.GREEN + Style.BRIGHT + "Done...")
+
+ ###Sort cards
+ print(Fore.CYAN + Style.BRIGHT + "Sorting cards...")
+ card_list = sorted(card_list, key=lambda k: k['Name'])
+ card_list = sorted(card_list, key=lambda k: k['Rarity'])
+ card_list = sorted(card_list, key=lambda k: k['Cost'])
+ print(Fore.GREEN + Style.BRIGHT + "Done...")
+ ###Define cards to display
+ cards_to_display_dicts = []
+ cards_to_display = []
+ # Take cards in card_list that aren't hercule statues or kais?
+ for char in card_list:
+ if char['Hercule'] != 1 and char['HP'] > 5:
+ cards_to_display_dicts.append(char)
+ cards_to_display.append(
+ char['Type'] + char['Rarity'] + ' ' + char['Name'] + ' | ' + str(char['ID']) + ' | ' + str(
+ char['UniqueID']))
+
+ ###Define links to display
+ links_master = []
+ config.Model.set_connection_resolver(config.db_jp)
+ for link in config.LinkSkills.all():
+ links_master.append(link.name)
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ links_master.append(config.LinkSkills.find_or_fail(link.id).name)
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ links_master.append(config.LinkSkills.find_or_fail(link.id).name)
+
+ links_to_display = sorted(links_master)
+
+ ###Define categories to display
+ categories_master = []
+ config.Model.set_connection_resolver(config.db_jp)
+ for category in config.CardCategories.all():
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ categories_master.append(config.CardCategories.find_or_fail(category.id).name)
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ categories_master.append(config.CardCategories.find_or_fail(category.id).name)
+
+ categories_to_display = sorted(categories_master)
+
+ ###Define window layout
+
+ col1 = [[sg.Listbox(values=(cards_to_display), size=(30, 20), key='CARDS')],
+ [sg.Listbox(values=([]), size=(30, 6), key='CARDS_CHOSEN')],
+ [sg.Button(button_text='Choose Card', key='choose_card'),
+ sg.Button(button_text='Confirm Team', key='confirm_team')]]
+
+ col2 = [[sg.Listbox(values=(sorted(categories_to_display)), size=(25, 20), key='CATEGORIES')],
+ [sg.Listbox(values=([]), size=(25, 6), key='CATEGORIES_CHOSEN')],
+ [sg.Button(button_text='Choose Categories', key='choose_categories'),
+ sg.Button(button_text='Clear Categories', key='clear_categories')]]
+
+ col3 = [[sg.Listbox(values=(sorted(links_to_display)), size=(25, 20), key='LINKS')],
+ [sg.Listbox(values=([]), size=(25, 6), key='LINKS_CHOSEN')],
+ [sg.Button(button_text='Choose Links', key='choose_links'),
+ sg.Button(button_text='Clear Links', key='clear_links')]]
+
+ layout = [[sg.Column(col1), sg.Column(col2), sg.Column(col3)]]
+ window = sg.Window('Deck Update', grab_anywhere=True, keep_on_top=True).Layout(layout)
+
+ ###Begin window loop
+ chosen_links = []
+ chosen_categories = []
+
+ ###
+ chosen_cards_ids = []
+ chosen_cards_unique_ids = []
+ chosen_cards_names = []
+ chosen_cards_to_display = []
+
+ while len(chosen_cards_ids) < 6:
+ event, values = window.Read()
+
+ if event == None:
+ return 0
+
+ if event == 'choose_card':
+ if len(values['CARDS']) < 1:
+ continue
+ # Get ID of chosen card to send to bandai
+ chosen_line = values['CARDS'][0]
+ char_name, char_id, char_unique_id = chosen_line.split(' | ')
+ chosen_cards_ids.append(int(char_id))
+ chosen_cards_unique_ids.append(int(char_unique_id))
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ chosen_cards_names.append(config.Cards.find(char_id).name)
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ chosen_cards_names.append(config.Cards.find(char_id).name)
+
+ # Chosen cards to display in lower box
+ chosen_cards_to_display.append(chosen_line)
+
+ if event == 'choose_categories':
+ for category in values['CATEGORIES']:
+ chosen_categories.append(category)
+ categories_to_display.remove(category)
+
+ if event == 'clear_categories':
+ categories_to_display.extend(chosen_categories)
+ chosen_categories[:] = []
+ categories_to_display = sorted(categories_to_display)
+
+ if event == 'choose_links':
+ for link in values['LINKS']:
+ chosen_links.append(link)
+ links_to_display.remove(link)
+
+ if event == 'clear_links':
+ links_to_display.extend(chosen_links)
+ chosen_links[:] = []
+ links_to_display = sorted(links_to_display)
+
+ if event == 'confirm_team':
+ if len(chosen_cards_unique_ids) < 6:
+ if len(chosen_cards_unique_ids) == 0:
+ print(Fore.RED + Style.BRIGHT + 'No cards selected.')
+ return 0
+ loop = 6 - len(chosen_cards_unique_ids)
+ for i in range(int(loop)):
+ chosen_cards_unique_ids.append('0')
+ break
+
+ ###Re-populate cards to display, checking filter criteria
+ cards_to_display[:] = []
+ for char in cards_to_display_dicts:
+ if char['Name'] in chosen_cards_names:
+ continue
+
+ if len(list(set(chosen_links) & set(char['Links']))) != len(chosen_links):
+ # print("List intersection")
+ continue
+
+ if len(list(set(chosen_categories) & set(char['Categories']))) != len(chosen_categories):
+ # print("Category intersectino")
+ continue
+
+ cards_to_display.append(
+ char['Type'] + char['Rarity'] + ' ' + char['Name'] + ' | ' + str(char['ID']) + ' | ' + str(
+ char['UniqueID']))
+
+ ###Update window elements
+ window.FindElement('CARDS').Update(values=cards_to_display)
+ window.FindElement('CARDS_CHOSEN').Update(values=chosen_cards_to_display)
+ window.FindElement('CATEGORIES').Update(values=categories_to_display)
+ window.FindElement('CATEGORIES_CHOSEN').Update(values=chosen_categories)
+ window.FindElement('LINKS').Update(values=links_to_display)
+ window.FindElement('LINKS_CHOSEN').Update(values=chosen_links)
+
+ window.Close()
+ ###Send selected team to bandai
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('POST', '/teams'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/teams'
+ else:
+ url = 'http://ishin-production.aktsk.jp/teams'
+ # print(chosen_cards_unique_ids)
+ data = {'selected_team_num': 1, 'user_card_teams': [
+ {'num': chosen_deck, 'user_card_ids': chosen_cards_unique_ids},
+ ]}
+ # print(data)
+ r = requests.post(url, data=json.dumps(data), headers=headers)
+ if 'error' in r.json():
+ print(Fore.RED + Style.BRIGHT + str(r.json()))
+ else:
+ # print(r.json())
+ print(chosen_cards_names)
+ print(Fore.GREEN + Style.BRIGHT + "Deck updated!")
+
+ return 0
+
+
+####################################################################
+def get_kagi_id(stage):
+ # return kagi ID to use for a stage
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/eventkagi_items'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/eventkagi_items'
+ else:
+ url = 'http://ishin-production.aktsk.jp/eventkagi_items'
+ r = requests.get(url, headers=headers)
+
+ kagi_items = r.json()['eventkagi_items']
+ area_id = config.Quests.find(stage).area_id
+ area_category = config.Area.find(area_id).category
+ areatabs = config.AreaTabs.all()
+ for tab in areatabs:
+ j = json.loads(tab.area_category_ids)
+ if area_category in j['area_category_ids']:
+ kagi_id = int(tab.id)
+ print('Kagi ID: ' + str(tab.id))
+ break
+ for kagi in kagi_items:
+ if kagi['eventkagi_item_id'] == kagi_id:
+ if kagi['quantity'] > 0:
+ print('kagi_id' + kagi_id)
+ return kagi_id
+ else:
+ return None
+
+ return None
+
+
+####################################################################
+
+def complete_unfinished_quest_stages():
+ # ## Will eventually use this to streamline stuff
+ # type: (object, object) -> object
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/user_areas'),
+ 'Content-type': 'application/json',
+ 'X-Language': 'en',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/user_areas'
+ else:
+ url = 'http://ishin-production.aktsk.jp/user_areas'
+ r = requests.get(url, headers=headers)
+
+ maps = []
+ for user in r.json()['user_areas']:
+ for map in user['user_sugoroku_maps']:
+ if map['cleared_count'] == 0 and map['sugoroku_map_id'] < 999999 and map['sugoroku_map_id'] > 100:
+ maps.append(map)
+
+ if len(maps) == 0:
+ print("No quests to complete!")
+ print('--------------------------------------------')
+ return 0
+
+ i = 0
+ while i == 0:
+ # print(maps)
+ for map in maps:
+ complete_stage(str(map['sugoroku_map_id'])[:-1], str(map['sugoroku_map_id'])[-1])
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/user_areas'),
+ 'Content-type': 'application/json',
+ 'X-Language': 'en',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ r = requests.get(url, headers=headers)
+ maps_check = []
+ # print(r.json())
+ for user in r.json()['user_areas']:
+ for map in user['user_sugoroku_maps']:
+ if map['cleared_count'] == 0 and map['sugoroku_map_id'] < 999999 and map['sugoroku_map_id'] > 100:
+ maps_check.append(map)
+ if maps_check == maps:
+ i = 1
+ else:
+ maps = maps_check
+ refresh_client()
+ return 1
+
+
+####################################################################
+def refresh_client():
+ config.access_token, config.secret = signin(config.identifier)
+ print(Fore.GREEN + Style.BRIGHT + 'Refreshed Token')
+
+
+####################################################################
+def change_name():
+ # Changes name associated with account
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('PUT', '/user'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ name = input('What would you like to change your name to?: ')
+ user = {'user': {'name': name}}
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/user'
+ else:
+ url = 'http://ishin-production.aktsk.jp/user'
+ r = requests.put(url, data=json.dumps(user), headers=headers)
+ if 'error' in r.json():
+ print(r.json())
+ else:
+ print("Name changed to: " + name)
+
+
+####################################################################
+def increase_capacity():
+ # Increases account card capacity by 5 every time it is called
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('POST', '/user/capacity/card'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/user/capacity/card'
+ else:
+ url = 'http://ishin-production.aktsk.jp/user/capacity/card'
+
+ r = requests.post(url, headers=headers)
+ if 'error' in r.json():
+ print(Fore.RED + Style.BRIGHT + str(r.json()))
+ else:
+ print(Fore.GREEN + Style.BRIGHT + 'Card capacity +5')
+
+
+####################################################################
+
+def get_user_info():
+ # ## Returns User dictionary and info
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/user'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/user'
+ else:
+ url = 'http://ishin-production.aktsk.jp/user'
+ r = requests.get(url, headers=headers)
+ user = r.json()
+
+ print('Account OS: ' + config.platform.upper())
+ print('User ID: ' + str(user['user']['id']))
+ print('Stones: ' + str(user['user']['stone']))
+ print('Zeni: ' + str(user['user']['zeni']))
+ print('Rank: ' + str(user['user']['rank']))
+ print('Stamina: ' + str(user['user']['act']))
+ print('Name: ' + str(user['user']['name']))
+ print('Total Card Capacity: ' + str(user['user']['total_card_capacity']))
+
+
+####################################################################
+def complete_unfinished_events():
+ # ## Will eventually use this to streamline stuff
+ # type: (object, object) -> object
+ ### Get current event IDs
+ # ## Gets current events json which contains some useful data
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/events'),
+ 'Content-type': 'application/json',
+ 'X-Language': 'en',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/events'
+ else:
+ url = 'http://ishin-production.aktsk.jp/events'
+ r = requests.get(url, headers=headers)
+ events = r.json()
+ event_ids = []
+ for event in events['events']:
+ event_ids.append(event['id'])
+ event_ids = sorted(event_ids)
+ try:
+ event_ids.remove(135)
+ except:
+ None
+
+ ### Complete areas if they are in the current ID pool
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/user_areas'),
+ 'Content-type': 'application/json',
+ 'X-Language': 'en',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/user_areas'
+ else:
+ url = 'http://ishin-production.aktsk.jp/user_areas'
+ r = requests.get(url, headers=headers)
+ areas = r.json()['user_areas']
+ i = 1
+ for area in areas:
+ if area['area_id'] in event_ids:
+ for stage in area['user_sugoroku_maps']:
+ if stage['cleared_count'] == 0:
+ complete_stage(str(stage['sugoroku_map_id'])[:-1], str(stage['sugoroku_map_id'])[-1])
+ i += 1
+ if i % 30 == 0:
+ refresh_client()
+
+
+####################################################################
+def complete_clash():
+ print('Fetching current clash...')
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/resources/home?rmbattles=true'),
+ 'X-Language': 'en',
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/resources/home?rmbattles=true'
+ else:
+ url = 'http://ishin-production.aktsk.jp/resources/home?rmbattles=true'
+ r = requests.get(url, headers=headers)
+ clash_id = r.json()['rmbattles']['id']
+
+ #### dropout
+ print('Resetting clash to beginning...')
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('POST', '/rmbattles/' + str(clash_id) + '/stages/dropout'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ sign = {
+ 'reason': "dropout"
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/rmbattles/' + str(clash_id) + '/stages/dropout'
+ else:
+ url = 'http://ishin-production.aktsk.jp/rmbattles/' + str(clash_id) + '/stages/dropout'
+
+ r = requests.post(url, data=json.dumps(sign), headers=headers)
+ print('Reset complete...')
+
+ print('Fetching list of stages from Bandai...')
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/rmbattles/' + str(clash_id)),
+ 'X-Language': 'en',
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/rmbattles/' + str(clash_id)
+ else:
+ url = 'http://ishin-production.aktsk.jp/rmbattles/' + str(clash_id)
+
+ r = requests.get(url, headers=headers)
+
+ available_stages = []
+ for area in r.json()['level_stages'].values():
+ for stage in area:
+ available_stages.append(stage['id'])
+ print('Stages obtained...')
+ print('Asking Bandai for available cards...')
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/rmbattles/available_user_cards'),
+ 'X-Language': 'en',
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/rmbattles/available_user_cards'
+ else:
+ url = 'http://ishin-production.aktsk.jp/rmbattles/available_user_cards'
+
+ r = requests.get(url, headers=headers)
+ print('Cards received...')
+ available_user_cards = []
+ # print(r.json())
+ for card in r.json():
+ available_user_cards.append(card)
+ available_user_cards = available_user_cards[:99]
+
+ if len(available_user_cards) == 0:
+ print(Fore.RED + Style.BRIGHT + "Not enough cards to complete Battlefield with!")
+ return 0
+
+ is_beginning = True
+ # print(available_stages)
+ print('Sending Bandai full team...')
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('PUT', '/rmbattles/teams/1'),
+ 'X-Language': 'en',
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ data = {'user_card_ids': available_user_cards}
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/rmbattles/teams/1'
+ else:
+ url = 'http://ishin-production.aktsk.jp/rmbattles/teams/1'
+
+ r = requests.put(url, data=json.dumps(data), headers=headers)
+ print('Sent!')
+ print('')
+ print('Commencing Ultimate Clash!')
+ print('----------------------------')
+ for stage in available_stages:
+ leader = available_user_cards[0]
+ members = available_user_cards[1]
+ sub_leader = available_user_cards[2]
+
+ sign = {
+ 'is_beginning': is_beginning,
+ 'user_card_ids': {
+ 'leader': leader,
+ 'members': members,
+ 'sub_leader': sub_leader
+ }
+ }
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('POST', '/rmbattles/' + str(clash_id) + '/stages/' + str(stage) + '/start'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/rmbattles/' + str(clash_id) + '/stages/' + str(stage) + '/start'
+ else:
+ url = 'http://ishin-production.aktsk.jp/rmbattles/' + str(clash_id) + '/stages/' + str(stage) + '/start'
+
+ r = requests.post(url, data=json.dumps(sign), headers=headers)
+ print('Commencing Stage ' + Fore.YELLOW + str(stage))
+
+ is_beginning = False
+
+ ###Second request
+ finish_time = int(round(time.time(), 0) + 2000)
+ start_time = finish_time - randint(40000000, 50000000)
+ if 'sign' in r.json():
+ dec_sign = packet.decrypt_sign(r.json()['sign'])
+ enemy_hp = 0
+ try:
+ for enemy in dec_sign['enemies']:
+ enemy_hp += enemy[0]['hp']
+ except:
+ print('nah')
+
+ sign = {
+ 'damage': enemy_hp,
+ 'finished_at_ms': finish_time,
+ 'finished_reason': 'win',
+ 'is_cleared': True,
+ 'remaining_hp': 0,
+ 'round': 0,
+ 'started_at_ms': start_time,
+ 'token': dec_sign['token']
+ }
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('POST', '/rmbattles/' + str(clash_id) + '/stages/finish'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/rmbattles/' + str(clash_id) + '/stages/finish'
+ else:
+ url = 'http://ishin-production.aktsk.jp/rmbattles/' + str(clash_id) + '/stages/finish'
+
+ r = requests.post(url, data=json.dumps(sign), headers=headers)
+ print('Completed Stage ' + Fore.YELLOW + str(stage))
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/rmbattles/teams/1'),
+ 'X-Language': 'en',
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/rmbattles/teams/1'
+ else:
+ url = 'http://ishin-production.aktsk.jp/rmbattles/teams/1'
+
+ r = requests.get(url, headers=headers)
+ print('----------------------------')
+ if 'sortiable_user_card_ids' not in r.json():
+ return 0
+ available_user_cards = r.json()['sortiable_user_card_ids']
+
+
+####################################################################
+def complete_area(area_id):
+ # completes all stages and difficulties of a given area.
+ # JP Translated
+
+ # Check if GLB database has id, if not try JP DB.
+ if config.client == 'global':
+ config.Model.set_connection_resolver(config.db_glb)
+ quests = config.Quests.where('area_id', '=', area_id).get()
+ else:
+ config.Model.set_connection_resolver(config.db_jp)
+ quests = config.Quests.where('area_id', '=', area_id).get()
+
+ total = 0
+ for quest in quests:
+ config.Model.set_connection_resolver(config.db_jp)
+ sugorokus = config.Sugoroku.where('quest_id', '=', quest.id).get()
+ total += len(sugorokus)
+ i = 1
+ for quest in quests:
+ config.Model.set_connection_resolver(config.db_jp)
+ sugorokus = config.Sugoroku.where('quest_id', '=', quest.id).get()
+ difficulties = []
+ for sugoroku in sugorokus:
+ print('Completion of area: ' + str(i) + '/' + str(total))
+ complete_stage(str(quest.id), sugoroku.difficulty)
+ i += 1
+
+
+####################################################################
+def save_account():
+ if not os.path.isdir("Saves"):
+ try:
+ os.mkdir('Saves')
+ os.mkdir('Saves/ios')
+ os.mkdir('Saves/android')
+ os.mkdir('Saves/Jp')
+ os.mkdir('Saves/Jp/ios')
+ os.mkdir('Saves/Jp/android')
+ os.mkdir('Saves/fresh')
+ os.mkdir('Saves/fresh/ios')
+ os.mkdir('Saves/fresh/android')
+ except:
+ print(Fore.RED + Style.BRIGHT + 'Unable to create saves file')
+ return 0
+
+ valid_save = False
+ while valid_save == False:
+ save_name = input("What would you like to name the file?")
+ while save_name.isalnum() == 0:
+ print(Fore.RED + Style.BRIGHT + "Name not allowed!")
+ save_name = input('What would you like to name this save?: ')
+ if os.path.exists('Saves' + os.sep + config.platform + os.sep + save_name + ".txt"):
+ print(Fore.RED + Style.BRIGHT + "File by that name already exists.")
+ else:
+ try:
+ f = open(os.path.join('Saves' + os.sep + config.platform + os.sep + save_name + ".txt"), 'w')
+ f.write(str(config.identifier) + '\n')
+ f.write(str(config.AdId) + '\n')
+ f.write(str(config.UniqueId) + '\n')
+ f.write(str(config.platform) + '\n')
+ f.write(str(config.client) + '\n')
+ f.close()
+ print('--------------------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Written details to file: ' + save_name)
+ print(Fore.RED + Style.BRIGHT + 'If ' + save_name + ' is deleted your account will be lost!')
+ print('--------------------------------------------')
+ break
+ except Exception as e:
+ print(e)
+
+
+####################################################################
+def load_account():
+ while 1 == 1:
+ print(
+ 'Choose your operating system (' + Fore.YELLOW + Style.BRIGHT + 'Android: 1' + Style.RESET_ALL + ' or' + Fore.YELLOW + Style.BRIGHT + ' IOS: 2' + Style.RESET_ALL + ')',end='')
+ platform = input('')
+ if platform[0].lower() in ['1', '2']:
+ if platform[0].lower() == '1':
+ config.platform = 'android'
+ else:
+ config.platform = 'ios'
+ break
+ else:
+ print(Fore.RED + 'Could not identify correct operating system to use.')
+
+ while 1 == 1:
+ save_name = input("What save would you like to load?: ")
+ if os.path.isfile('Saves' + os.sep + config.platform + os.sep + save_name + ".txt"):
+ try:
+ f = open(os.path.join('Saves', config.platform, save_name + ".txt"), 'r')
+ config.identifier = f.readline().rstrip()
+ config.AdId = f.readline().rstrip()
+ config.UniqueId = f.readline().rstrip()
+ config.platform = f.readline().rstrip()
+ client = f.readline().rstrip()
+ if config.client == client:
+ break
+ else:
+ print(Fore.RED + Style.BRIGHT + 'Save does not match client version.')
+
+ except Exception as e:
+ print(e)
+
+ else:
+ print(Fore.RED + Style.BRIGHT + "Could not find " + save_name)
+ refresh_client()
+
+
+####################################################################
+
+def daily_login():
+ # ## Accepts Outstanding Login Bonuses
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET',
+ '/resources/home?apologies=true&banners=true&bonus_schedules=true&budokai=true&comeback_campaigns=true&gifts=true&login_bonuses=true&rmbattles=true'),
+ 'X-Language': 'en',
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/resources/home?apologies=true&banners=true&bonus_schedules=true&budokai=true&comeback_campaigns=true&gifts=true&login_bonuses=true&rmbattles=true'
+ else:
+ url = 'http://ishin-production.aktsk.jp/resources/home?apologies=true&banners=true&bonus_schedules=true&budokai=true&comeback_campaigns=true&gifts=true&login_bonuses=true&rmbattles=true'
+ r = requests.get(url, headers=headers)
+ if 'error' in r.json():
+ print(r.json())
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('POST', '/login_bonuses/accept'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/login_bonuses/accept'
+ else:
+ url = 'http://ishin-production.aktsk.jp/login_bonuses/accept'
+
+ r = requests.post(url, headers=headers)
+ if 'error' in r.json():
+ print(r.json())
+
+
+####################################################################
+def dragonballs():
+ is_got = 0
+ ###Check for Dragonballs
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/dragonball_sets'),
+ 'Content-type': 'application/json',
+ 'X-Language': 'en',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////'
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/dragonball_sets'
+ else:
+ url = 'http://ishin-production.aktsk.jp/dragonball_sets'
+ r = requests.get(url, headers=headers)
+ if 'error' in r.json():
+ print(Fore.RED + Style.BRIGHT + str(r.json()))
+ return 0
+
+ ####Determine which dragonball set is being used
+ set = r.json()['dragonball_sets'][0]['id']
+
+ ### Complete stages and count dragonballs
+ for dragonball in r.json()['dragonball_sets']:
+ for db in reversed(dragonball['dragonballs']):
+ if db['is_got'] == True:
+ is_got += 1
+ elif db['is_got'] == False:
+ is_got += 1
+ complete_stage(str(db['quest_id']), db['difficulties'][0])
+
+ ### If all dragonballs found then wish
+ if is_got == 7:
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/dragonball_sets/' + str(set) + '/wishes'),
+ 'Content-type': 'application/json',
+ 'X-Language': 'en',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////'
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/dragonball_sets/' + str(set) + '/wishes'
+ else:
+ url = 'http://ishin-production.aktsk.jp/dragonball_sets/' + str(set) + '/wishes'
+
+ r = requests.get(url, headers=headers)
+ if 'error' in r.json():
+ print(Fore.RED + Style.BRIGHT + str(r.json()))
+ return 0
+ wish_ids = []
+ for wish in r.json()['dragonball_wishes']:
+ if wish['is_wishable']:
+ print('#########################')
+ print('Wish ID: ' + str(wish['id']))
+ wish_ids.append(str(wish['id']))
+ print(wish['title'])
+ print(wish['description'])
+ print('')
+
+ print(Fore.YELLOW + 'What wish would you like to ask shenron for? ID: ', end='')
+ choice = input()
+ while choice not in wish_ids:
+ print("Shenron did not understand you! ID: ", end='')
+ choice = input()
+ wish_ids[:] = []
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('POST', '/dragonball_sets/' + str(set) + '/wishes'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/dragonball_sets/' + str(set) + '/wishes'
+ else:
+ url = 'http://ishin-production.aktsk.jp/dragonball_sets/' + str(set) + '/wishes'
+ data = {'dragonball_wish_ids': [int(choice)]}
+ r = requests.post(url, data=json.dumps(data), headers=headers)
+ if 'error' in r.json():
+ print(Fore.RED + Style.BRIGHT + str(r.json()))
+ else:
+ print(Fore.YELLOW + 'Wish granted!')
+ print('')
+
+ dragonballs()
+
+ return 0
+
+
+####################################################################
+def transfer_account():
+ # Determine correct platform to use
+ set_platform()
+
+ transfercode = input('Enter your transfer code: ')
+
+ config.AdId = packet.guid()['AdId']
+ config.UniqueId = packet.guid()['UniqueId']
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ data = {'eternal': True, 'old_user_id': '', 'user_account': {
+ 'device': 'samsung',
+ 'device_model': 'SM-G930V',
+ 'os_version': '6.0',
+ 'platform': config.platform,
+ 'unique_id': config.UniqueId,
+ }}
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/auth/link_codes/' \
+ + str(transfercode)
+ else:
+ url = 'http://ishin-production.aktsk.jp/auth/link_codes/' \
+ + str(transfercode)
+ print('URL: ' + url)
+ r = requests.put(url, data=json.dumps(data), headers=headers)
+ if 'error' in r.json():
+ print(r.json())
+ print(base64.b64decode(r.json()['identifiers']).decode('utf-8'))
+ config.identifier = base64.b64decode(r.json()['identifiers']).decode('utf-8')
+
+ save_account()
+ refresh_client()
+
+
+####################################################################
+def user_command_executor(command):
+ if ',' in command:
+ command = command.replace(" ", "")
+ command = command.replace(",", "\n")
+ s = io.StringIO(command + '\n')
+ sys.stdin = s
+ command = input()
+
+ if command == 'help':
+ print('---------------------------------')
+ print(' ')
+ print(Style.BRIGHT + Fore.YELLOW + '\'stage\'' + Style.RESET_ALL + ' | Completes stage given ID or name.')
+ print(Style.BRIGHT + Fore.YELLOW + '\'gt\'' + Style.RESET_ALL + ' | Gt DragonBalls.')
+ print(
+ Style.BRIGHT + Fore.YELLOW + '\'area\'' + Style.RESET_ALL + ' | Complete all stages and difficulties in an area.')
+ print(
+ Style.BRIGHT + Fore.YELLOW + '\'cards\'' + Style.RESET_ALL + ' | Provides a GUI to view card information.')
+ print(
+ Style.BRIGHT + Fore.YELLOW + '\'completequests\'' + Style.RESET_ALL + ' | Completes all unfinished quest stages.')
+ print(
+ Style.BRIGHT + Fore.YELLOW + '\'completeevents\'' + Style.RESET_ALL + ' | Completes all unfinished event stages.')
+ print(
+ Style.BRIGHT + Fore.YELLOW + '\'dokkan\'' + Style.RESET_ALL + ' | Completes all event stages, regardless of being finished beforehand.')
+ print(
+ Style.BRIGHT + Fore.YELLOW + '\'potara\'' + Style.RESET_ALL + ' | Provides a series of prompts to collect Potara medals.')
+ print(Style.BRIGHT + Fore.YELLOW + '\'bossrush\'' + Style.RESET_ALL + ' | Completes all bossrush stages.')
+ print(
+ Style.BRIGHT + Fore.YELLOW + '\'hercule\'' + Style.RESET_ALL + ' | Completes all Hercule Punch Machine stages.')
+ print(
+ Style.BRIGHT + Fore.YELLOW + '\'completezbattles\'' + Style.RESET_ALL + ' | Completes all unfinished zbattles to stage 30.')
+ print(
+ Style.BRIGHT + Fore.YELLOW + '\'zstages\'' + Style.RESET_ALL + ' | Provides a GUI to complete single Z-Battle stages.')
+ print(
+ Style.BRIGHT + Fore.YELLOW + '\'clash\'' + Style.RESET_ALL + ' | Complete ultimate clash if you have enough UR cards.')
+ print(
+ Style.BRIGHT + Fore.YELLOW + '\'listevents\'' + Style.RESET_ALL + ' | Prints a list of all currently available events.')
+ print(
+ Style.BRIGHT + Fore.YELLOW + '\'summon\'' + Style.RESET_ALL + ' | Provides a GUI to choose what banner to summon.')
+ print(
+ Style.BRIGHT + Fore.YELLOW + '\'listsummons\'' + Style.RESET_ALL + ' | Prints a list of all currently available summons.')
+ print(
+ Style.BRIGHT + Fore.YELLOW + '\'dragonballs\'' + Style.RESET_ALL + ' | Collects Dragonballs and makes wishes.')
+ print(
+ Style.BRIGHT + Fore.YELLOW + '\'omegafarm\'' + Style.RESET_ALL + ' | Completes everything (story, events, zbattle)')
+ print(Style.BRIGHT + Fore.YELLOW + '\'info\'' + Style.RESET_ALL + ' | Print out account information.')
+ print(Style.BRIGHT + Fore.YELLOW + '\'items\'' + Style.RESET_ALL + ' | Provides a GUI to view user items.')
+ print(
+ Style.BRIGHT + Fore.YELLOW + '\'medals\'' + Style.RESET_ALL + ' | Provides a GUI to view and sell medals.')
+ print(
+ Style.BRIGHT + Fore.YELLOW + '\'sbr or nbsr\'' + Style.RESET_ALL + ' | sbr)')
+ print(
+ Style.BRIGHT + Fore.YELLOW + '\'rankup\'' + Style.RESET_ALL + ' | Levels character)')
+ print(Style.BRIGHT + Fore.YELLOW + '\'sell\'' + Style.RESET_ALL + ' | Provides a GUI to sell cards.')
+ print(Style.BRIGHT + Fore.YELLOW + '\'team\'' + Style.RESET_ALL + ' | Change composition of a deck.')
+ print(Style.BRIGHT + Fore.YELLOW + '\'supporter\'' + Style.RESET_ALL + ' | Change your support unit.')
+ print(Style.BRIGHT + Fore.YELLOW + '\'deck\'' + Style.RESET_ALL + ' | Select a deck to be the default.')
+ print(Style.BRIGHT + Fore.YELLOW + '\'gift\'' + Style.RESET_ALL + ' | Accepts gifts and missions.')
+ print(Style.BRIGHT + Fore.YELLOW + '\'transfer\'' + Style.RESET_ALL + ' | Generates User ID and Transfer Code.')
+ print(Style.BRIGHT + Fore.YELLOW + '\'capacity\'' + Style.RESET_ALL + ' | Increase card capacity by +5.')
+ print(Style.BRIGHT + Fore.YELLOW + '\'name\'' + Style.RESET_ALL + ' | Change account name.')
+ print(Style.BRIGHT + Fore.YELLOW + '\'key\'' + Style.RESET_ALL + ' | Kagi stages.')
+ print(Style.BRIGHT + Fore.YELLOW + '\'Farm\'' + Style.RESET_ALL + ' | LR medals.')
+ print(Style.BRIGHT + Fore.YELLOW + '\'fu\'' + Style.RESET_ALL + ' | 40 ur medals.')
+ print(Style.BRIGHT + Fore.YELLOW + '\'swap\'' + Style.RESET_ALL + ' |change Accounts(global).')
+ print(Style.BRIGHT + Fore.YELLOW + '\'swapjp\'' + Style.RESET_ALL + ' |change Accounts(japan).')
+ print(Style.BRIGHT + Fore.YELLOW + '\'refresh\'' + Style.RESET_ALL + ' | Reauthenticates the client.')
+ elif command == 'stage':
+ stage = input('What stage would you like to complete?: ')
+ difficulty = input('Enter the difficulty|(0:Easy, 1:Hard etc...): ')
+ loop = input('Enter how many times to execute: ')
+ for i in range(int(loop)):
+ complete_stage(stage, difficulty)
+ elif command == 'area':
+ area = input('Enter the area to complete: ')
+ loop = input('How many times to complete the entire area: ')
+ for i in range(int(loop)):
+ complete_area(area)
+ elif command == 'gift':
+ accept_gifts()
+ accept_missions()
+ elif command == 'omegafarm':
+ complete_unfinished_quest_stages()
+ refresh_client()
+ complete_unfinished_events()
+ complete_unfinished_zbattles()
+ complete_clash()
+ accept_gifts()
+ accept_missions()
+ refresh_client()
+ ## When this will get updated, we shall add :finishzbattle,30, + sell + sellhercule + baba(?)
+ elif command == 'completequests':
+ complete_unfinished_quest_stages()
+ elif command == 'completeevents':
+ complete_unfinished_events()
+ elif command == 'completezbattles':
+ complete_unfinished_zbattles()
+ elif command == 'zstages':
+ complete_zbattle_stage()
+ elif command == 'clash':
+ complete_clash()
+ elif command == 'daily':
+ complete_stage('130001', 0)
+ complete_stage('131001', 0)
+ complete_stage('132001', 0)
+ complete_potential()
+ accept_gifts()
+ accept_missions()
+ elif command == 'listevents':
+ list_events()
+ elif command == 'summon':
+ summon()
+ elif command == 'listsummons':
+ list_summons()
+ elif command == 'dragonballs':
+ dragonballs()
+ elif command == 'info':
+ get_user_info()
+ elif command == 'items':
+ items_viewer()
+ elif command == 'medals':
+ sell_medals()
+ elif command == 'sell':
+ sell_cards__bulk_GUI()
+ elif command == 'cards':
+ list_cards()
+ elif command == 'supporter':
+ change_supporter()
+ elif command == 'team':
+ change_team()
+ elif command == 'farm':
+ lr_farm()
+ elif command == 'fu':
+ ur_farm()
+ elif command == 'deck':
+ config.deck = int(input('Enter a deck number to use: '))
+ elif command == 'transfer':
+ new_transfer()
+ elif command == 'capacity':
+ valid = False
+ while not valid:
+ try:
+ increase_times = int(input("How many times do you want to increase the capacity? (+5 per time): "))
+ valid = True
+ except ValueError:
+ print("That's not a valid number.")
+ # Checking if you have enough Dragon Stones
+ if increase_times > get_user()['user']['stone']:
+ print("You don't have enough Dragon Stones.")
+ pass
+ # Increasing the capacity
+ else:
+ for _ in range(increase_times):
+ increase_capacity()
+ increase_capacity()
+ elif command == 'name':
+ change_name()
+ elif command == 'dokkan':
+ dokkan()
+ elif command == 'supporter':
+ change_supporter()
+ elif command == 'sbr':
+ complete_sbr()
+ elif command == 'bossrush':
+ complete_stage('701001', 3)
+ complete_stage('701001', 4)
+ complete_stage('701002', 4)
+ complete_stage('701002', 5)
+ complete_stage('701003', 5)
+ complete_stage('701004', 5)
+ complete_stage('701005', 5)
+ complete_stage('701006', 5)
+ complete_stage('701007', 5)
+ complete_stage('701008', 5)
+ if config.client == "japan":
+ complete_stage('701007', 5)
+ complete_stage('701008', 5)
+ accept_missions()
+ elif command == 'potara':
+ potara()
+ elif command == 'hercule':
+ complete_stage('711001', 1)
+ complete_stage('711002', 1)
+ complete_stage('711003', 1)
+ complete_stage('711004', 1)
+ complete_stage('711005', 1)
+ complete_stage('711006', 1)
+ complete_potential()
+ accept_gifts()
+ accept_missions()
+ elif command == 'rankup':
+ rankup()
+ elif command == 'chooseevents':
+ event_viewer()
+ elif command == 'swap':
+ swap()
+ elif command == 'swapjp':
+ swapjp()
+ elif command == 'gt':
+ Gt()
+ elif command == 'key':
+ get_kagi()
+ elif command == 'limit':
+ limit_stage()
+ elif command == 'refresh':
+ refresh_client()
+ else:
+ print('Command not found.')
+
+
+####################################################################
+def complete_unfinished_zbattles(kagi=False):
+ # JP Translated
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/events'),
+ 'Content-type': 'application/json',
+ 'X-Language': 'en',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/events'
+ else:
+ url = 'http://ishin-production.aktsk.jp/events'
+ r = requests.get(url, headers=headers)
+ events = r.json()
+ try:
+ for event in events['z_battle_stages']:
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ x = config.ZBattles.where('z_battle_stage_id', '=', event['id']).first().enemy_name
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ print(config.ZBattles.where('z_battle_stage_id', '=', event['id']).first().enemy_name, end='')
+ print(Fore.CYAN + Style.BRIGHT + ' | ID: ' + str(event['id']))
+
+ # Get current zbattle level
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/user_areas'),
+ 'Content-type': 'application/json',
+ 'X-Language': 'en',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/user_areas'
+ else:
+ url = 'http://ishin-production.aktsk.jp/user_areas'
+ r = requests.get(url, headers=headers)
+ if 'user_z_battles' in r.json():
+ zbattles = r.json()['user_z_battles']
+ if zbattles == []:
+ zbattles = 0
+ else:
+ zbattles = 0
+
+ level = 1
+ for zbattle in zbattles:
+ if int(zbattle['z_battle_stage_id']) == int(event['id']):
+ level = zbattle['max_clear_level'] + 1
+ print('Current EZA Level: ' + str(level))
+
+ # Stop at level 100 !! This may not work for all zbattle e.g kid gohan
+ while level < 100:
+ ##Get supporters
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/z_battles/' + str(event['id']) + '/supporters'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/z_battles/' + str(event['id']) + '/supporters'
+ else:
+ url = 'http://ishin-production.aktsk.jp/z_battles/' + str(event['id']) + '/supporters'
+ r = requests.get(url, headers=headers)
+ if 'supporters' in r.json():
+ supporter = r.json()['supporters'][0]['id']
+ elif 'error' in r.json():
+ print(Fore.RED + Style.BRIGHT + r.json())
+ return 0
+ else:
+ print(Fore.RED + Style.BRIGHT + 'Problem with ZBattle')
+ print(r.raw())
+ return 0
+
+ ###Send first request
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('POST', '/z_battles/' + str(event['id']) + '/start'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+
+ if kagi == True:
+ sign = json.dumps({
+ 'friend_id': supporter,
+ 'level': level,
+ 'selected_team_num': config.deck,
+ 'eventkagi_item_id': 5
+ })
+ else:
+ sign = json.dumps({
+ 'friend_id': supporter,
+ 'level': level,
+ 'selected_team_num': config.deck,
+ })
+
+ enc_sign = packet.encrypt_sign(sign)
+ data = {'sign': enc_sign}
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/z_battles/' + str(event['id']) + '/start'
+ else:
+ url = 'http://ishin-production.aktsk.jp/z_battles/' + str(event['id']) + '/start'
+ r = requests.post(url, data=json.dumps(data), headers=headers)
+
+ if 'sign' in r.json():
+ dec_sign = packet.decrypt_sign(r.json()['sign'])
+ # Check if error was due to lack of stamina
+ elif 'error' in r.json():
+ if r.json()['error']['code'] == 'act_is_not_enough':
+ # Check if allowed to refill stamina
+ if config.allow_stamina_refill == True:
+ refill_stamina()
+ r = requests.post(url, data=json.dumps(data),
+ headers=headers)
+ else:
+ print(r.json())
+ return 0
+ else:
+ print(Fore.RED + Style.BRIGHT + 'Problem with ZBattle')
+ print(r.raw())
+ return 0
+
+ finish_time = int(round(time.time(), 0) + 2000)
+ start_time = finish_time - randint(6200000, 8200000)
+
+ data = {
+ 'elapsed_time': finish_time - start_time,
+ 'is_cleared': True,
+ 'level': level,
+ 's': 'rGAX18h84InCwFGbd/4zr1FvDNKfmo/TJ02pd6onclk=',
+ 't': 'eyJzdW1tYXJ5Ijp7ImVuZW15X2F0dGFjayI6MTAwMzg2LCJlbmVteV9hdHRhY2tfY291bnQiOjUsImVuZW15X2hlYWxfY291bnRzIjpbMF0sImVuZW15X2hlYWxzIjpbMF0sImVuZW15X21heF9hdHRhY2siOjEwMDAwMCwiZW5lbXlfbWluX2F0dGFjayI6NTAwMDAsInBsYXllcl9hdHRhY2tfY291bnRzIjpbMTBdLCJwbGF5ZXJfYXR0YWNrcyI6WzMwNjYwNTJdLCJwbGF5ZXJfaGVhbCI6MCwicGxheWVyX2hlYWxfY291bnQiOjAsInBsYXllcl9tYXhfYXR0YWNrcyI6WzEyMzY4NTBdLCJwbGF5ZXJfbWluX2F0dGFja3MiOls0NzcxOThdLCJ0eXBlIjoic3VtbWFyeSJ9fQ==',
+ 'token': dec_sign['token'],
+ 'used_items': [],
+ 'z_battle_finished_at_ms': finish_time,
+ 'z_battle_started_at_ms': start_time,
+ }
+ # enc_sign = encrypt_sign(sign)
+
+ headers = {
+ 'User-Agent': 'Android',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('POST', '/z_battles/' + str(event['id']) + '/finish'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/z_battles/' + str(event['id']) + '/finish'
+ else:
+ url = 'http://ishin-production.aktsk.jp/z_battles/' + str(event['id']) + '/finish'
+
+ r = requests.post(url, data=json.dumps(data), headers=headers)
+ dec_sign = packet.decrypt_sign(r.json()['sign'])
+ # ## Print out Items from Database
+ print('Level: ' + str(level))
+ # ## Print out Items from Database
+ if 'items' in dec_sign:
+ supportitems = []
+ awakeningitems = []
+ trainingitems = []
+ potentialitems = []
+ treasureitems = []
+ carditems = []
+ trainingfields = []
+ stones = 0
+ supportitemsset = set()
+ awakeningitemsset = set()
+ trainingitemsset = set()
+ potentialitemsset = set()
+ treasureitemsset = set()
+ carditemsset = set()
+ trainingfieldsset = set()
+ print('Items:')
+ print('-------------------------')
+ if 'quest_clear_rewards' in dec_sign:
+ for x in dec_sign['quest_clear_rewards']:
+ if x['item_type'] == 'Point::Stone':
+ stones += x['amount']
+ for x in dec_sign['items']:
+ if x['item_type'] == 'SupportItem':
+
+ # print('' + SupportItems.find(x['item_id']).name + ' x '+str(x['quantity']))
+
+ for i in range(x['quantity']):
+ supportitems.append(x['item_id'])
+ supportitemsset.add(x['item_id'])
+ elif x['item_type'] == 'PotentialItem':
+
+ # print('' + PotentialItems.find(x['item_id']).name + ' x '+str(x['quantity']))
+
+ for i in range(x['quantity']):
+ potentialitems.append(x['item_id'])
+ potentialitemsset.add(x['item_id'])
+ elif x['item_type'] == 'TrainingItem':
+
+ # print('' + TrainingItems.find(x['item_id']).name + ' x '+str(x['quantity']))
+
+ for i in range(x['quantity']):
+ trainingitems.append(x['item_id'])
+ trainingitemsset.add(x['item_id'])
+ elif x['item_type'] == 'AwakeningItem':
+
+ # print('' + AwakeningItems.find(x['item_id']).name + ' x '+str(x['quantity']))
+
+ for i in range(x['quantity']):
+ awakeningitems.append(x['item_id'])
+ awakeningitemsset.add(x['item_id'])
+ elif x['item_type'] == 'TreasureItem':
+
+ # print('' + TreasureItems.find(x['item_id']).name + ' x '+str(x['quantity']))
+
+ for i in range(x['quantity']):
+ treasureitems.append(x['item_id'])
+ treasureitemsset.add(x['item_id'])
+ elif x['item_type'] == 'Card':
+
+ # card = Cards.find(x['item_id'])
+
+ carditems.append(x['item_id'])
+ carditemsset.add(x['item_id'])
+ elif x['item_type'] == 'Point::Stone':
+
+ # print('' + card.name + '['+rarity+']'+ ' x '+str(x['quantity']))
+ # print('' + TreasureItems.find(x['item_id']).name + ' x '+str(x['quantity']))
+
+ stones += 1
+ elif x['item_type'] == 'TrainingField':
+
+ # card = Cards.find(x['item_id'])
+
+ for i in range(x['quantity']):
+ trainingfields.append(x['item_id'])
+ trainingfieldsset.add(x['item_id'])
+ else:
+ print(x['item_type'])
+
+ # Print items
+ for x in supportitemsset:
+ # JP Translation
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ config.SupportItems.find_or_fail(x).name
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+
+ # Print name and item count
+ print(Fore.CYAN + Style.BRIGHT + config.SupportItems.find(x).name + ' x' \
+ + str(supportitems.count(x)))
+ for x in awakeningitemsset:
+ # JP Translation
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ config.AwakeningItems.find_or_fail(x).name
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+
+ # Print name and item count
+ print(Fore.MAGENTA + Style.BRIGHT + config.AwakeningItems.find(x).name + ' x' \
+ + str(awakeningitems.count(x)))
+ for x in trainingitemsset:
+ # JP Translation
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ config.TrainingItems.find_or_fail(x).name
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+
+ # Print name and item count
+ print(Fore.RED + Style.BRIGHT + config.TrainingItems.find(x).name + ' x' \
+ + str(trainingitems.count(x)))
+ for x in potentialitemsset:
+ # JP Translation
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ config.PotentialItems.find_or_fail(x).name
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+
+ # Print name and item count
+ print(config.PotentialItems.find_or_fail(x).name + ' x' \
+ + str(potentialitems.count(x)))
+ for x in treasureitemsset:
+ # JP Translation
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ config.TreasureItems.find_or_fail(x).name
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+
+ # Print name and item count
+ print(Fore.GREEN + Style.BRIGHT + config.TreasureItems.find(x).name + ' x' \
+ + str(treasureitems.count(x)))
+ for x in trainingfieldsset:
+ # JP Translation
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ config.TrainingFields.find_or_fail(x).name
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+
+ # Print name and item count
+ print(config.TrainingFields.find(x).name + ' x' \
+ + str(trainingfields.count(x)))
+ for x in carditemsset:
+ # JP Translation
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ config.Cards.find_or_fail(x).name
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+
+ # Print name and item count
+ print(config.Cards.find(x).name + ' x' + str(carditems.count(x)))
+ print(Fore.YELLOW + Style.BRIGHT + 'Stones x' + str(stones))
+ if 'gasha_point' in dec_sign:
+ print('Friend Points: ' + str(dec_sign['gasha_point']))
+
+ print('--------------------------')
+ print('##############################################')
+ level += 1
+ refresh_client()
+
+ except Exception as e:
+ print(Fore.RED + Style.BRIGHT + str(e))
+ print(Fore.RED + Style.BRIGHT + 'Trouble finding new Z-Battle events')
+
+
+####################################################################
+def set_platform():
+ while True:
+ print(
+ 'Choose your operating system (' + Fore.YELLOW + Style.BRIGHT + 'Android: 1' + Style.RESET_ALL + ' or' + Fore.YELLOW + Style.BRIGHT + ' IOS: 2' + Style.RESET_ALL + ') ',end='')
+ platform = input('')
+ if platform[0].lower() in ['1', '2']:
+ if platform[0].lower() == '1':
+ config.platform = 'android'
+ else:
+ config.platform = 'ios'
+ break
+ else:
+ print(Fore.RED + Style.BRIGHT + 'Could not identify correct operating system to use.')
+
+
+####################################################################
+def list_events():
+ # Prints all currently available events
+ # JP Translated
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/events'),
+ 'Content-type': 'application/json',
+ 'X-Language': 'en',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/events'
+ else:
+ url = 'http://ishin-production.aktsk.jp/events'
+ r = requests.get(url, headers=headers)
+ events = r.json()
+
+ area_id = None
+ for event in events['events']:
+ for quest in event['quests']:
+ if str(event['id']) != area_id:
+ area_id = str(event['id'])
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ area_name = str(config.Area.where('id', '=', area_id).first().name)
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ area_name = str(config.Area.where('id', '=', area_id).first().name)
+ print('--------------------------------------------')
+ print(Back.BLUE + Fore.WHITE + Style.BRIGHT \
+ + area_name)
+ print('--------------------------------------------')
+
+ ids = quest['id']
+ config.Model.set_connection_resolver(config.db_glb)
+ sugorokus = config.Sugoroku.where('quest_id', '=', int(ids)).get()
+ if len(sugorokus) < 1:
+ config.Model.set_connection_resolver(config.db_jp)
+ sugorokus = config.Sugoroku.where('quest_id', '=', int(ids)).get()
+ difficulties = []
+ for sugoroku in sugorokus:
+ difficulties.append(sugoroku.difficulty)
+ print(config.Quests.find(ids).name + ' ' + str(ids) \
+ + ' Difficulties: ' + str(difficulties) \
+ + ' AreaID: ' + str(event['id']))
+
+
+####################################################################
+def event_viewer():
+ # Event GUI with options to complete stage.
+ # JP Translation needs work
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 4.4; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/events'),
+ 'Content-type': 'application/json',
+ 'X-Language': 'en',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/events'
+ else:
+ url = 'http://ishin-production.aktsk.jp/events'
+ r = requests.get(url, headers=headers)
+ events = r.json()
+
+ # Build areas list
+ areas_to_display = []
+ stage_ids = []
+ areas = {}
+
+ for event in events['events']:
+ area_id = str(event['id'])
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ area_name = area_id + ' | ' + str(config.Area.where('id', '=', area_id).first().name)
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ area_name = area_id + ' | ' + str(config.Area.where('id', '=', area_id).first().name)
+ areas_to_display.append(area_name)
+ stage_ids[:] = []
+ for quest in event['quests']:
+ stage_ids.append(quest['id'])
+ areas[area_id] = stage_ids[:]
+
+ stages_to_display = []
+ difficulties = [0]
+ stage_name = ''
+
+ col1 = [[sg.Listbox(values=(sorted(areas_to_display)), change_submits=True, size=(30, 20), key='AREAS')]]
+ col2 = [[sg.Listbox(values=(sorted(stages_to_display)), change_submits=True, size=(30, 20), key='STAGES')]]
+ col3 = [[sg.Text('Name', key='STAGE_NAME', size=(30, 2))],
+ [sg.Text('Difficulty: '), sg.Combo(difficulties, key='DIFFICULTIES', size=(6, 3), readonly=True)],
+ [sg.Text('How many times to complete:')
+ , sg.Spin([i for i in range(1, 999)], key='LOOP', initial_value=1, size=(3, 3))],
+ [sg.Button(button_text='Complete Stage', key='COMPLETE_STAGE')]]
+
+ layout = [[sg.Column(col1), sg.Column(col2), sg.Column(col3)]]
+ window = sg.Window('Event Viewer').Layout(layout)
+
+ while True:
+ event, values = window.Read()
+ if event == None:
+ return 0
+
+ if event == 'AREAS' and len(values['AREAS']) > 0:
+ stages_to_display[:] = []
+ # Check if GLB database has id, if not try JP DB.
+ area_id = values['AREAS'][0].split(' | ')[0]
+
+ for stage_id in areas[area_id]:
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ stage_name = config.Quests.find_or_fail(stage_id).name
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ stage_name = config.Quests.find_or_fail(stage_id).name
+ stages_to_display.append(stage_name + ' | ' + str(stage_id))
+
+ if event == 'STAGES' and len(values['STAGES']) > 0:
+ difficulties[:] = []
+ stage_id = values['STAGES'][0].split(' | ')[1]
+ stage_name = values['STAGES'][0].split(' | ')[0]
+ sugorokus = config.Sugoroku.where('quest_id', '=', str(stage_id)).get()
+ difficulties = []
+ for sugoroku in sugorokus:
+ difficulties.append(str(sugoroku.difficulty))
+ window.FindElement('DIFFICULTIES').Update(values=difficulties)
+ window.FindElement('STAGE_NAME').Update(stage_name)
+
+ if event == 'COMPLETE_STAGE' and stage_name != '':
+ window.Hide()
+ window.Refresh()
+ for i in range(int(values['LOOP'])):
+ complete_stage(stage_id, values['DIFFICULTIES'])
+ window.UnHide()
+ window.Refresh()
+
+ window.FindElement('STAGES').Update(values=stages_to_display)
+
+
+####################################################################
+def complete_potential():
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/events'),
+ 'Content-type': 'application/json',
+ 'X-Language': 'en',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/events'
+ else:
+ url = 'http://ishin-production.aktsk.jp/events'
+ r = requests.get(url, headers=headers)
+ events = r.json()
+ for event in events['events']:
+ if event['id'] >= 140 and event['id'] < 145:
+ for quest in event['quests']:
+ ids = quest['id']
+ config.Model.set_connection_resolver(config.db_jp)
+ sugorokus = config.Sugoroku.where('quest_id', '=',
+ int(ids)).get()
+ difficulties = []
+ for sugoroku in sugorokus:
+ config.Model.set_connection_resolver(config.db_jp)
+ complete_stage(str(ids), sugoroku.difficulty)
+
+
+####################################################################
+
+def list_summons():
+ # Prints current available summons, could be formatted better but meh
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/gashas'),
+ 'Content-type': 'application/json',
+ 'X-Language': 'en',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/gashas'
+ else:
+ url = 'http://ishin-production.aktsk.jp/gashas'
+
+ r = requests.get(url, headers=headers)
+
+ for gasha in r.json()['gashas']:
+ print(gasha['name'].replace('\n', ' ') + ' ' + str(gasha['id']))
+ if len(gasha['description']) > 0:
+ print(Fore.YELLOW + re.sub(r'\{[^{}]*\}', "", gasha['description']).replace('\n', ' '))
+
+
+####################################################################
+def summon():
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/gashas'),
+ 'Content-type': 'application/json',
+ 'X-Language': 'en',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/gashas'
+ else:
+ url = 'http://ishin-production.aktsk.jp/gashas'
+ r = requests.get(url, headers=headers)
+ gashas = []
+ for gasha in r.json()['gashas']:
+ gashas.append(gasha['name'] + ' | ' + str(gasha['id']))
+
+ layout = [[sg.Listbox(values=(gashas), size=(30, 20), key='GASHAS')],
+ [sg.Radio('Multi', "TYPE", default=True), sg.Radio('Single', "TYPE")],
+ [sg.Spin([i for i in range(1, 999)], key='LOOP', initial_value=1, size=(3, 3))],
+ [sg.Button(button_text='Summon!', key='SUMMON')]]
+ window = sg.Window('Event Viewer').Layout(layout)
+
+ while True:
+ event, values = window.Read()
+ if event == None:
+ return 0
+
+ if event == 'SUMMON' and len(values['GASHAS']) > 0:
+ summon_id = values['GASHAS'][0].split(' | ')[1]
+ if values[0]:
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('POST', '/gashas/' + str(summon_id)
+ + '/courses/2/draw'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/gashas/' + str(summon_id) \
+ + '/courses/2/draw'
+ else:
+ url = 'http://ishin-production.aktsk.jp/gashas/' + str(summon_id) \
+ + '/courses/2/draw'
+ window.Hide()
+ window.Refresh()
+ for i in range(int(values['LOOP'])):
+ r = requests.post(url, headers=headers).json()
+ if 'error' in r:
+ print(r)
+ window.Close()
+ return 0
+ card_list = []
+ for card in r['gasha_items']:
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ config.Cards.find_or_fail(int(card['item_id'])).rarity
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ config.Cards.find_or_fail(int(card['item_id'])).rarity
+
+ if config.Cards.find(int(card['item_id'])).rarity == 0:
+ rarity = Fore.RED + Style.BRIGHT + 'N' + Style.RESET_ALL
+ elif config.Cards.find(int(card['item_id'])).rarity == 1:
+ rarity = Fore.RED + Style.BRIGHT + 'R' + Style.RESET_ALL
+ elif config.Cards.find(int(card['item_id'])).rarity == 2:
+ rarity = Fore.RED + Style.BRIGHT + 'SR' + Style.RESET_ALL
+ elif config.Cards.find(int(card['item_id'])).rarity == 3:
+ rarity = Fore.YELLOW + 'SSR' + Style.RESET_ALL
+ elif config.Cards.find(int(card['item_id'])).rarity == 4:
+ rarity = Fore.MAGENTA + Style.BRIGHT + 'UR' + Style.RESET_ALL
+ elif config.Cards.find(int(card['item_id'])).rarity == 5:
+ rarity = Fore.CYAN + 'LR' + Style.RESET_ALL
+ if str(config.Cards.find(int(card['item_id'])).element)[-1] == '0':
+ type = Fore.CYAN + Style.BRIGHT + 'AGL '
+ elif str(config.Cards.find(int(card['item_id'])).element)[-1] == '1':
+ type = Fore.GREEN + Style.BRIGHT + 'TEQ '
+ elif str(config.Cards.find(int(card['item_id'])).element)[-1] == '2':
+ type = Fore.MAGENTA + Style.BRIGHT + 'INT '
+ elif str(config.Cards.find(int(card['item_id'])).element)[-1] == '3':
+ type = Fore.RED + Style.BRIGHT + 'STR '
+ elif str(config.Cards.find(int(card['item_id'])).element)[-1] == '4':
+ type = Fore.YELLOW + 'PHY '
+ card_list.append(type + config.Cards.find(int(card['item_id'
+ ])).name + ' ' + rarity)
+ for card in card_list:
+ print(card)
+ window.UnHide()
+ window.Refresh()
+
+
+ else:
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('POST', '/gashas/' + str(summon_id)
+ + '/courses/1/draw'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/gashas/' + str(summon_id) \
+ + '/courses/1/draw'
+ else:
+ url = 'http://ishin-production.aktsk.jp/gashas/' + str(summon_id) \
+ + '/courses/1/draw'
+ window.Hide()
+ window.Refresh()
+ for i in range(int(values['LOOP'])):
+ r = requests.post(url, headers=headers).json()
+ if 'error' in r:
+ print(r)
+ window.Close()
+ return 0
+ card_list = []
+ for card in r['gasha_items']:
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ config.Cards.find_or_fail(int(card['item_id'])).rarity
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ config.Cards.find_or_fail(int(card['item_id'])).rarity
+
+ if config.Cards.find(int(card['item_id'])).rarity == 0:
+ rarity = Fore.RED + Style.BRIGHT + 'N' + Style.RESET_ALL
+ elif config.Cards.find(int(card['item_id'])).rarity == 1:
+ rarity = Fore.RED + Style.BRIGHT + 'R' + Style.RESET_ALL
+ elif config.Cards.find(int(card['item_id'])).rarity == 2:
+ rarity = Fore.RED + Style.BRIGHT + 'SR' + Style.RESET_ALL
+ elif config.Cards.find(int(card['item_id'])).rarity == 3:
+ rarity = Fore.YELLOW + 'SSR' + Style.RESET_ALL
+ elif config.Cards.find(int(card['item_id'])).rarity == 4:
+ rarity = Fore.MAGENTA + Style.BRIGHT + 'UR' + Style.RESET_ALL
+ elif config.Cards.find(int(card['item_id'])).rarity == 5:
+ rarity = Fore.CYAN + 'LR' + Style.RESET_ALL
+ if str(config.Cards.find(int(card['item_id'])).element)[-1] == '0':
+ type = Fore.CYAN + Style.BRIGHT + 'AGL '
+ elif str(config.Cards.find(int(card['item_id'])).element)[-1] == '1':
+ type = Fore.GREEN + Style.BRIGHT + 'TEQ '
+ elif str(config.Cards.find(int(card['item_id'])).element)[-1] == '2':
+ type = Fore.MAGENTA + Style.BRIGHT + 'INT '
+ elif str(config.Cards.find(int(card['item_id'])).element)[-1] == '3':
+ type = Fore.RED + Style.BRIGHT + 'STR '
+ elif str(config.Cards.find(int(card['item_id'])).element)[-1] == '4':
+ type = Fore.YELLOW + 'PHY '
+ card_list.append(type + config.Cards.find(int(card['item_id'
+ ])).name + ' ' + rarity)
+ for card in card_list:
+ print(card)
+ window.UnHide()
+ window.Refresh()
+ print('------------------------------------------')
+
+
+####################################################################
+def sell_cards__bulk_GUI():
+ # Provides a GUI to select a range of cards to sell.
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/teams'),
+ 'Content-type': 'application/json',
+ 'X-Language': 'en',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/teams'
+ else:
+ url = 'http://ishin-production.aktsk.jp/teams'
+ r = requests.get(url, headers=headers)
+
+ team_cards = []
+ for team in r.json()['user_card_teams']:
+ team_cards.extend(team['user_card_ids'])
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/support_leaders'),
+ 'Content-type': 'application/json',
+ 'X-Language': 'en',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/support_leaders'
+ else:
+ url = 'http://ishin-production.aktsk.jp/support_leaders'
+ r = requests.get(url, headers=headers)
+ team_cards.extend(r.json()['support_leader_ids'])
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/cards'),
+ 'Content-type': 'application/json',
+ 'X-Language': 'en',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/cards'
+ else:
+ url = 'http://ishin-production.aktsk.jp/cards'
+ r = requests.get(url, headers=headers)
+
+ cards_master_dict = []
+ for card in r.json()['cards']:
+ # Avoid selling favourited cards
+ if card['is_favorite'] == True:
+ continue
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ # Quick and dirty way to exclude elder kais from sell
+ hp_max = config.Cards.find_or_fail(card['card_id']).hp_max
+ if hp_max == 1:
+ continue
+
+ card_name = config.Cards.find_or_fail(card['card_id']).name
+ rarity = config.Cards.find_or_fail(card['card_id']).rarity
+ if card['id'] not in team_cards:
+ cards_master_dict.append({
+ 'card_id': card['card_id'],
+ 'unique_id': card['id'],
+ 'name': card_name,
+ 'rarity': rarity
+ })
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ # Quick and dirty way to exclude elder kais from sell
+ hp_max = config.Cards.find_or_fail(card['card_id']).hp_max
+ if hp_max == '1':
+ print('max')
+ continue
+
+ card_name = config.Cards.find_or_fail(card['card_id']).name
+ rarity = config.Cards.find_or_fail(card['card_id']).rarity
+ if card['id'] not in team_cards:
+ cards_master_dict.append({
+ 'card_id': card['card_id'],
+ 'unique_id': card['id'],
+ 'name': card_name,
+ 'rarity': rarity
+ })
+
+ cards_to_display_dicts = []
+ cards_to_display_dicts = cards_master_dict[:]
+
+ cards_to_display = []
+ for card in cards_to_display_dicts:
+ cards_to_display.append(card['name'])
+
+ col1 = [[sg.Checkbox('N', default=False, key='N', change_submits=True)],
+ [sg.Checkbox('R', default=False, key='R', change_submits=True)],
+ [sg.Checkbox('SR', default=False, key='SR', change_submits=True)],
+ [sg.Checkbox('SSR', default=False, key='SSR', change_submits=True)]]
+ col2 = [[sg.Listbox(values=([]), size=(30, 20), key='CARDS')]]
+ layout = [[sg.Column(col1), sg.Column(col2)], [sg.Button(button_text='Sell!', key='SELL')]]
+ window = sg.Window('Sell Cards').Layout(layout)
+ while True:
+ event, values = window.Read()
+
+ if event == None:
+ window.Close()
+ return 0
+
+ if event in ['N', 'R', 'SR', 'SSR', 'SELL']:
+ accepted_rarities = []
+ if values['N']:
+ accepted_rarities.append(0)
+ if values['R']:
+ accepted_rarities.append(1)
+ if values['SR']:
+ accepted_rarities.append(2)
+ if values['SSR']:
+ accepted_rarities.append(3)
+
+ cards_to_display[:] = []
+ cards_to_display_dicts[:] = []
+ for card in cards_master_dict:
+ if card['rarity'] in accepted_rarities:
+ cards_to_display.append(card['name'])
+ cards_to_display_dicts.append(card)
+
+ if event == 'SELL':
+ cards_to_sell = []
+ window.Hide()
+ window.Refresh()
+ for card in cards_to_display_dicts:
+ cards_to_sell.append(card['unique_id'])
+ cards_master_dict.remove(card)
+ sell_cards(cards_to_sell)
+ cards_to_display[:] = []
+ cards_to_display_dicts[:] = []
+ cards_to_display_dicts[:] = cards_master_dict
+ for card in cards_to_display_dicts:
+ if card['rarity'] in accepted_rarities:
+ cards_to_display.append(card['name'])
+ window.UnHide()
+ window.Refresh()
+
+ window.FindElement('CARDS').Update(values=cards_to_display)
+
+ return 0
+
+
+####################################################################
+def items_viewer():
+ # ## Accepts Outstanding Login Bonuses
+ headers = {
+ 'User-Agent': 'Android',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET',
+ '/resources/login?potential_items=true&training_items=true&support_items=true&treasure_items=true&special_items=true'),
+ 'X-Language': 'en',
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/resources/login?potential_items=true&training_items=true&support_items=true&treasure_items=true&special_items=true'
+ else:
+ url = 'http://ishin-production.aktsk.jp/resources/login?potential_items=true&training_items=true&support_items=true&treasure_items=true&special_items=true'
+ r = requests.get(url, headers=headers)
+
+ col1 = [[sg.Checkbox('Support Items', default=False, key='SUPPORT', change_submits=True)],
+ [sg.Checkbox('Training Items', default=False, key='TRAINING', change_submits=True)],
+ [sg.Checkbox('Potential Items', default=False, key='POTENTIAL', change_submits=True)],
+ [sg.Checkbox('Treasure Items', default=False, key='TREASURE', change_submits=True)],
+ [sg.Checkbox('Special Items', default=False, key='SPECIAL', change_submits=True)]]
+ col2 = [[sg.Output(size=(40, 30))]]
+ layout = [[sg.Column(col1), sg.Column(col2)]]
+ window = sg.Window('Items').Layout(layout)
+ while True:
+ event, values = window.Read()
+
+ if event == None:
+ window.Close()
+ return 0
+
+ if event in ['SUPPORT', 'TRAINING', 'POTENTIAL', 'TREASURE', 'SPECIAL']:
+ os.system('cls' if os.name == 'nt' else 'clear')
+ if values['SUPPORT']:
+ print('\n##########################')
+ print('Support Items -')
+ print('##########################')
+ window.Refresh()
+ for item in r.json()['support_items']['items']:
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ print(str(config.SupportItems.find_or_fail(item['item_id']).name) + ' x' + str(
+ item['quantity']))
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ print(str(config.SupportItems.find_or_fail(item['item_id']).name) + ' x' + str(
+ item['quantity']))
+ window.Refresh()
+ if values['TRAINING']:
+ print('\n##########################')
+ print('Training Items -')
+ print('##########################')
+ window.Refresh()
+ for item in r.json()['training_items']:
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ print(str(config.TrainingItems.find(item['training_item_id']).name) + ' x' + str(
+ item['quantity']))
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ print(str(config.TrainingItems.find(item['training_item_id']).name) + ' x' + str(
+ item['quantity']))
+ window.Refresh()
+ if values['POTENTIAL']:
+ print('\n##########################')
+ print('Potential Items -')
+ print('##########################')
+ window.Refresh()
+ for item in reversed(r.json()['potential_items']['user_potential_items']):
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ print(str(config.PotentialItems.find(item['potential_item_id']).name) + ' x' + str(
+ item['quantity']))
+ print(config.PotentialItems.find(item['potential_item_id']).description)
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ print(str(config.PotentialItems.find(item['potential_item_id']).name) + ' x' + str(
+ item['quantity']))
+ print(config.PotentialItems.find(item['potential_item_id']).description)
+ window.Refresh()
+ if values['TREASURE']:
+ print('\n##########################')
+ print('Treasure Items -')
+ print('##########################')
+ window.Refresh()
+ for item in r.json()['treasure_items']['user_treasure_items']:
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ print(str(config.TreasureItems.find(item['treasure_item_id']).name) + ' x' + str(
+ item['quantity']))
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ print(str(config.TreasureItems.find(item['treasure_item_id']).name) + ' x' + str(
+ item['quantity']))
+ window.Refresh()
+ if values['SPECIAL']:
+ print('\n##########################')
+ print('Special Items -')
+ print('##########################')
+ window.Refresh()
+ for item in r.json()['special_items']:
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ print(str(config.SpecialItems.find(item['special_item_id']).name) + ' x' + str(
+ item['quantity']))
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ print(str(config.SpecialItems.find(item['special_item_id']).name) + ' x' + str(
+ item['quantity']))
+ window.Refresh()
+
+
+####################################################################
+def list_cards():
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/cards'),
+ 'Content-type': 'application/json',
+ 'X-Language': 'en',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/cards'
+ else:
+ url = 'http://ishin-production.aktsk.jp/cards'
+ r = requests.get(url, headers=headers)
+ cards = {}
+ for card in r.json()['cards']:
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ name = config.Cards.find_or_fail(card['card_id']).name
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ name = config.Cards.find_or_fail(card['card_id']).name
+
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ element = str(config.Cards.find_or_fail(card['card_id']).element)
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ element = str(config.Cards.find_or_fail(card['card_id']).element)
+
+ if element[-1] == '0':
+ element = 'AGL'
+ elif element[-1] == '1':
+ element = 'TEQ'
+ elif element[-1] == '2':
+ element = 'INT'
+ elif element[-1] == '3':
+ element = 'STR'
+ elif element[-1] == '4':
+ element = 'PHY'
+
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ cost = config.Cards.find_or_fail(card['card_id']).cost
+ leader_skill_id = config.Cards.find_or_fail(card['card_id']).leader_skill_id
+ passive_skill_id = config.Cards.find_or_fail(card['card_id']).passive_skill_set_id
+ links_skill_ids = []
+ links_skill_ids.append(config.Cards.find_or_fail(card['card_id']).link_skill1_id)
+ links_skill_ids.append(config.Cards.find_or_fail(card['card_id']).link_skill2_id)
+ links_skill_ids.append(config.Cards.find_or_fail(card['card_id']).link_skill3_id)
+ links_skill_ids.append(config.Cards.find_or_fail(card['card_id']).link_skill4_id)
+ links_skill_ids.append(config.Cards.find_or_fail(card['card_id']).link_skill5_id)
+ links_skill_ids.append(config.Cards.find_or_fail(card['card_id']).link_skill6_id)
+ links_skill_ids.append(config.Cards.find_or_fail(card['card_id']).link_skill7_id)
+
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ cost = config.Cards.find_or_fail(card['card_id']).cost
+ leader_skill_id = config.Cards.find_or_fail(card['card_id']).leader_skill_id
+ passive_skill_id = config.Cards.find_or_fail(card['card_id']).passive_skill_set_id
+ links_skill_ids = []
+ links_skill_ids.append(config.Cards.find_or_fail(card['card_id']).link_skill1_id)
+ links_skill_ids.append(config.Cards.find_or_fail(card['card_id']).link_skill2_id)
+ links_skill_ids.append(config.Cards.find_or_fail(card['card_id']).link_skill3_id)
+ links_skill_ids.append(config.Cards.find_or_fail(card['card_id']).link_skill4_id)
+ links_skill_ids.append(config.Cards.find_or_fail(card['card_id']).link_skill5_id)
+ links_skill_ids.append(config.Cards.find_or_fail(card['card_id']).link_skill6_id)
+ links_skill_ids.append(config.Cards.find_or_fail(card['card_id']).link_skill7_id)
+
+ cards[card['card_id']] = {
+ 'id': card['card_id'],
+ 'unique_id': card['id'],
+ 'name': name,
+ 'type': element,
+ 'cost': cost,
+ 'leader_skill_id': leader_skill_id,
+ 'link_skill_ids': links_skill_ids,
+ 'passive_skill_id': passive_skill_id
+ }
+ cards_sort = []
+ for item in cards:
+ cards_sort.append(cards[item])
+
+ # Sort cards for listbox
+ cards_sort = sorted(cards_sort, key=lambda k: k['name'])
+ cards_sort = sorted(cards_sort, key=lambda k: k['cost'])
+
+ # Card strings to for listbox value
+ cards_to_display = []
+ for card in cards_sort:
+ cards_to_display.append(card['type'] + ' ' + str(card['cost']) + ' ' + card['name'] + ' | ' + str(card['id']))
+
+ col1 = [[sg.Listbox(values=(cards_to_display), size=(30, 30), key='CARDS', change_submits=True,
+ font=('Courier', 15, 'bold'))]]
+ col2 = [[sg.Text('Type', key='TYPE', font=('', 15, 'bold'), auto_size_text=True),
+ sg.Text('Name', key='NAME', size=(None, 3), font=('', 15, 'bold'), auto_size_text=True)],
+ [sg.Text('Cost', key='COST', font=('', 10, 'bold'))],
+ [sg.Text('Leader Skill', key='LEADERSKILLNAME', size=(None, 1), font=('', 12, 'bold underline'))],
+ [sg.Text('Leader Skill Description', key='LEADERSKILLDESC', size=(None, 4), font=('', 10, 'bold'))],
+ [sg.Text('Passive', key='PASSIVESKILLNAME', size=(None, 2), font=('', 12, 'bold underline'))],
+ [sg.Text('Passive Description', key='PASSIVESKILLDESC', size=(None, 5), font=('', 10, 'bold'))],
+ [sg.Text('Link Skill', key='LINKSKILL1', size=(None, 1), font=('', 10, 'bold'))],
+ [sg.Text('Link Skill', key='LINKSKILL2', size=(None, 1), font=('', 10, 'bold'))],
+ [sg.Text('Link Skill', key='LINKSKILL3', size=(None, 1), font=('', 10, 'bold'))],
+ [sg.Text('Link Skill', key='LINKSKILL4', size=(None, 1), font=('', 10, 'bold'))],
+ [sg.Text('Link Skill', key='LINKSKILL5', size=(None, 1), font=('', 10, 'bold'))],
+ [sg.Text('Link Skill', key='LINKSKILL6', size=(None, 1), font=('', 10, 'bold'))],
+ [sg.Text('Link Skill', key='LINKSKILL7', size=(None, 1), font=('', 10, 'bold'))]]
+
+ layout = [[sg.Column(col1), sg.Column(col2)]]
+ window = sg.Window('Items').Layout(layout)
+ while True:
+ event, values = window.Read()
+
+ if event == None:
+ window.Close()
+ return 0
+
+ if event == 'CARDS':
+ # Get Card ID
+ card_id = int(values['CARDS'][0].split(' | ')[1])
+
+ # Get correct colour for card element
+ if cards[card_id]['type'] == 'PHY':
+ colour = 'gold2'
+ elif cards[card_id]['type'] == 'STR':
+ colour = 'red'
+ elif cards[card_id]['type'] == 'AGL':
+ colour = 'blue'
+ elif cards[card_id]['type'] == 'TEQ':
+ colour = 'green'
+ elif cards[card_id]['type'] == 'INT':
+ colour = 'purple'
+ else:
+ colour = 'black'
+
+ # Retrieve leaderskill from DB
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ leader_skill_name = config.LeaderSkills.find_or_fail(cards[card_id]['leader_skill_id']).name.replace(
+ '\n', ' ')
+ leader_skill_desc = config.LeaderSkills.find_or_fail(
+ cards[card_id]['leader_skill_id']).description.replace('\n', ' ')
+
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ leader_skill_name = config.LeaderSkills.find_or_fail(cards[card_id]['leader_skill_id']).name.replace(
+ '\n', ' ')
+ leader_skill_desc = config.LeaderSkills.find_or_fail(
+ cards[card_id]['leader_skill_id']).description.replace('\n', ' ')
+
+ # Retrieve passive skill
+ if cards[card_id]['passive_skill_id'] == None:
+ passive_skill_name = 'None'
+ passive_skill_desc = 'None'
+ else:
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ passive_skill_name = config.Passives.find_or_fail(cards[card_id]['passive_skill_id']).name.replace(
+ '\n', ' ')
+ passive_skill_desc = config.Passives.find_or_fail(
+ cards[card_id]['passive_skill_id']).description.replace('\n', ' ')
+
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ passive_skill_name = config.Passives.find_or_fail(cards[card_id]['passive_skill_id']).name.replace(
+ '\n', ' ')
+ passive_skill_desc = config.Passives.find_or_fail(
+ cards[card_id]['passive_skill_id']).description.replace('\n', ' ')
+
+ # Retrieve link skills from DB
+ ls1 = None
+ ls2 = None
+ ls3 = None
+ ls4 = None
+ ls5 = None
+ ls6 = None
+ ls7 = None
+
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ if config.LinkSkills.find(cards[card_id]['link_skill_ids'][0]) != None:
+ ls1 = config.LinkSkills.find(cards[card_id]['link_skill_ids'][0]).name.replace('\n', ' ')
+ if config.LinkSkills.find(cards[card_id]['link_skill_ids'][1]) != None:
+ ls2 = config.LinkSkills.find(cards[card_id]['link_skill_ids'][1]).name.replace('\n', ' ')
+ if config.LinkSkills.find(cards[card_id]['link_skill_ids'][2]) != None:
+ ls3 = config.LinkSkills.find(cards[card_id]['link_skill_ids'][2]).name.replace('\n', ' ')
+ if config.LinkSkills.find(cards[card_id]['link_skill_ids'][3]) != None:
+ ls4 = config.LinkSkills.find(cards[card_id]['link_skill_ids'][3]).name.replace('\n', ' ')
+ if config.LinkSkills.find(cards[card_id]['link_skill_ids'][4]) != None:
+ ls5 = config.LinkSkills.find(cards[card_id]['link_skill_ids'][4]).name.replace('\n', ' ')
+ else:
+ ls5 = 'Link Skill'
+ if config.LinkSkills.find(cards[card_id]['link_skill_ids'][5]) != None:
+ ls6 = config.LinkSkills.find(cards[card_id]['link_skill_ids'][5]).name.replace('\n', ' ')
+ else:
+ ls6 = 'Link Skill'
+ if config.LinkSkills.find(cards[card_id]['link_skill_ids'][6]) != None:
+ ls7 = config.LinkSkills.find(cards[card_id]['link_skill_ids'][6]).name.replace('\n', ' ')
+ else:
+ ls7 = 'Link Skill'
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ if config.LinkSkills.find(cards[card_id]['link_skill_ids'][0]) != None:
+ ls1 = config.LinkSkills.find(cards[card_id]['link_skill_ids'][0]).name.replace('\n', ' ')
+ if config.LinkSkills.find(cards[card_id]['link_skill_ids'][1]) != None:
+ ls2 = config.LinkSkills.find(cards[card_id]['link_skill_ids'][1]).name.replace('\n', ' ')
+ if config.LinkSkills.find(cards[card_id]['link_skill_ids'][2]) != None:
+ ls3 = config.LinkSkills.find(cards[card_id]['link_skill_ids'][2]).name.replace('\n', ' ')
+ if config.LinkSkills.find(cards[card_id]['link_skill_ids'][3]) != None:
+ ls4 = config.LinkSkills.find(cards[card_id]['link_skill_ids'][3]).name.replace('\n', ' ')
+ if config.LinkSkills.find(cards[card_id]['link_skill_ids'][4]) != None:
+ ls5 = config.LinkSkills.find(cards[card_id]['link_skill_ids'][4]).name.replace('\n', ' ')
+ else:
+ ls5 = 'Link Skill'
+ if config.LinkSkills.find(cards[card_id]['link_skill_ids'][5]) != None:
+ ls6 = config.LinkSkills.find(cards[card_id]['link_skill_ids'][5]).name.replace('\n', ' ')
+ else:
+ ls6 = 'Link Skill'
+ if config.LinkSkills.find(cards[card_id]['link_skill_ids'][6]) != None:
+ ls7 = config.LinkSkills.find(cards[card_id]['link_skill_ids'][6]).name.replace('\n', ' ')
+ else:
+ ls7 = 'Link Skill'
+
+ window.FindElement('NAME').Update(value=cards[card_id]['name'].replace('\n', ' '))
+ window.FindElement('TYPE').Update(value='[' + cards[card_id]['type'] + ']', text_color=colour)
+ window.FindElement('COST').Update(value='COST: ' + str(cards[card_id]['cost']))
+ window.FindElement('LEADERSKILLNAME').Update(value=leader_skill_name)
+ window.FindElement('LEADERSKILLDESC').Update(value=leader_skill_desc)
+ window.FindElement('PASSIVESKILLNAME').Update(value=passive_skill_name)
+ window.FindElement('PASSIVESKILLDESC').Update(value=passive_skill_desc)
+ window.FindElement('LINKSKILL1').Update(value=ls1)
+ window.FindElement('LINKSKILL2').Update(value=ls2)
+ window.FindElement('LINKSKILL3').Update(value=ls3)
+ window.FindElement('LINKSKILL4').Update(value=ls4)
+ window.FindElement('LINKSKILL5').Update(value=ls5)
+ window.FindElement('LINKSKILL6').Update(value=ls6)
+ window.FindElement('LINKSKILL7').Update(value=ls7)
+
+
+####################################################################
+def sell_medals():
+ # Get Medals
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/awakening_items'),
+ 'Content-type': 'application/json',
+ 'X-Language': 'en',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ config.Model.set_connection_resolver(config.db_glb)
+ url = 'https://ishin-global.aktsk.com/awakening_items'
+ else:
+ config.Model.set_connection_resolver(config.db_jp)
+ url = 'http://ishin-production.aktsk.jp/awakening_items'
+ r = requests.get(url, headers=headers)
+
+ # Create list with ID for listbox
+ medal_list = []
+ for medal in reversed(r.json()['awakening_items']):
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ item = config.Medal.find_or_fail(int(medal['awakening_item_id']))
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ item = config.Medal.find_or_fail(int(medal['awakening_item_id']))
+
+ medal_list.append(item.name + ' [x' + str(medal['quantity']) + '] | ' + str(item.id))
+
+ layout = [[sg.Text('Select a medal-')],
+ [sg.Listbox(values=(medal_list), size=(30, 15), key='medal_tally', font=('', 15, 'bold'))],
+ [sg.Text('Amount'), sg.Spin([i for i in range(1, 999)], initial_value=1, size=(5, None))],
+ [sg.Button(button_text='Sell', key='Medal')]]
+
+ window = sg.Window('Medal List', keep_on_top=True).Layout(layout)
+ while True:
+ event, values = window.Read()
+
+ if event == None:
+ window.Close()
+ return 0
+
+ # Check if medal selected and sell
+ if event == 'Medal':
+ if len(values['medal_tally']) == 0:
+ print(Fore.RED + Style.BRIGHT + "You did not select a medal.")
+ continue
+
+ value = values['medal_tally'][0]
+ medal = value.split(' | ')
+ medalo = medal[1]
+ amount = values[0]
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('POST', '/awakening_items/exchange'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/awakening_items/exchange'
+ else:
+ url = 'http://ishin-production.aktsk.jp/awakening_items/exchange'
+
+ medal_id = int(medalo)
+ chunk = int(amount) // 99
+ remainder = int(amount) % 99
+
+ window.Hide()
+ window.Refresh()
+ for i in range(chunk):
+ data = {'awakening_item_id': medal_id, 'quantity': 99}
+ r = requests.post(url, data=json.dumps(data), headers=headers)
+ if 'error' in r.json():
+ print(Fore.RED + Style.BRIGHT + str(r.json))
+ else:
+ print(Fore.GREEN + Style.BRIGHT + 'Sold Medals x' + str(99))
+
+ if remainder > 0:
+ data = {'awakening_item_id': medal_id, 'quantity': remainder}
+ r = requests.post(url, data=json.dumps(data), headers=headers)
+ if 'error' in r.json():
+ print(Fore.RED + Style.BRIGHT + str(r.json))
+ else:
+ print(Fore.GREEN + Style.BRIGHT + 'Sold Medals x' + str(remainder))
+
+ # New medal list
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/awakening_items'),
+ 'Content-type': 'application/json',
+ 'X-Language': 'en',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/awakening_items'
+ else:
+ url = 'http://ishin-production.aktsk.jp/awakening_items'
+ r = requests.get(url, headers=headers)
+
+ medal_list[:] = []
+ for medal in reversed(r.json()['awakening_items']):
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ item = config.Medal.find_or_fail(int(medal['awakening_item_id']))
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ item = config.Medal.find_or_fail(int(medal['awakening_item_id']))
+
+ medal_list.append(item.name + ' [x' + str(medal['quantity']) + ']' + ' | ' + str(item.id))
+
+ window.FindElement('medal_tally').Update(values=medal_list)
+ window.UnHide()
+ window.Refresh()
+
+
+####################################################################
+def complete_zbattle_stage(kagi=False):
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/events'),
+ 'Content-type': 'application/json',
+ 'X-Language': 'en',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/events'
+ else:
+ url = 'http://ishin-production.aktsk.jp/events'
+ r = requests.get(url, headers=headers)
+ events = r.json()
+
+ zbattles_to_display = []
+ for event in events['z_battle_stages']:
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ zbattle = config.ZBattles.where('z_battle_stage_id', '=', event['id']).first().enemy_name + ' | ' + str(
+ event['id'])
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ zbattle = config.ZBattles.where('z_battle_stage_id', '=', event['id']).first().enemy_name + ' | ' + str(
+ event['id'])
+ zbattles_to_display.append(zbattle)
+
+ col1 = [[sg.Text('Select a Z-Battle', font=('', 15, 'bold'))],
+ [sg.Listbox(values=(zbattles_to_display), size=(30, 15), key='ZBATTLE', font=('', 15, 'bold'))]]
+
+ col2 = [[sg.Text('Select Single Stage:'), sg.Combo(['5', '10', '15', '20', '25', '30'], size=(6, 3), key='LEVEL')],
+ [sg.Text('Amount of times: '),
+ sg.Spin([i for i in range(1, 999)], initial_value=1, size=(5, None), key='LOOP')],
+ [sg.Button(button_text='Go!', key='GO')]]
+
+ layout = [[sg.Column(col1), sg.Column(col2)]]
+ window = sg.Window('Medal List').Layout(layout)
+
+ while True:
+ event, values = window.Read()
+ if event == None:
+ window.Close()
+ return 0
+
+ if event == 'GO':
+ if len(values['ZBATTLE']) == 0:
+ print(Fore.RED + Style.BRIGHT + "Select a Z-Battle!")
+ continue
+
+ for i in range(int(values['LOOP'])):
+ window.Hide()
+ window.Refresh()
+ #
+ stage = values['ZBATTLE'][0].split(' | ')[1]
+ level = values['LEVEL']
+
+ ##Get supporters
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/z_battles/' + str(stage) + '/supporters'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/z_battles/' + str(stage) + '/supporters'
+ else:
+ url = 'http://ishin-production.aktsk.jp/z_battles/' + str(stage) + '/supporters'
+ r = requests.get(url, headers=headers)
+ if 'supporters' in r.json():
+ supporter = r.json()['supporters'][0]['id']
+ elif 'error' in r.json():
+ print(Fore.RED + Style.BRIGHT + r.json())
+ return 0
+ else:
+ print(Fore.RED + Style.BRIGHT + 'Problem with ZBattle')
+ print(r.raw())
+ return 0
+
+ ###Send first request
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('POST', '/z_battles/' + str(stage) + '/start'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+
+ if kagi == True:
+ sign = json.dumps({
+ 'friend_id': supporter,
+ 'level': int(level),
+ 'selected_team_num': config.deck,
+ 'eventkagi_item_id': 5
+ })
+ else:
+ sign = json.dumps({
+ 'friend_id': supporter,
+ 'level': int(level),
+ 'selected_team_num': config.deck,
+ })
+
+ enc_sign = packet.encrypt_sign(sign)
+ data = {'sign': enc_sign}
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/z_battles/' + str(stage) + '/start'
+ else:
+ url = 'http://ishin-production.aktsk.jp/z_battles/' + str(stage) + '/start'
+ r = requests.post(url, data=json.dumps(data), headers=headers)
+
+ if 'sign' in r.json():
+ dec_sign = packet.decrypt_sign(r.json()['sign'])
+ # Check if error was due to lack of stamina
+ elif 'error' in r.json():
+ if r.json()['error']['code'] == 'act_is_not_enough':
+ # Check if allowed to refill stamina
+ if config.allow_stamina_refill == True:
+ refill_stamina()
+ r = requests.post(url, data=json.dumps(data),
+ headers=headers)
+ else:
+ print(r.json())
+ return 0
+ else:
+ print(Fore.RED + Style.BRIGHT + 'Problem with ZBattle')
+ print(r.raw())
+ return 0
+
+ finish_time = int(round(time.time(), 0) + 2000)
+ start_time = finish_time - randint(6200000, 8200000)
+
+ data = {
+ 'elapsed_time': finish_time - start_time,
+ 'is_cleared': True,
+ 'level': int(level),
+ 's': 'rGAX18h84InCwFGbd/4zr1FvDNKfmo/TJ02pd6onclk=',
+ 't': 'eyJzdW1tYXJ5Ijp7ImVuZW15X2F0dGFjayI6MTAwMzg2LCJlbmVteV9hdHRhY2tfY291bnQiOjUsImVuZW15X2hlYWxfY291bnRzIjpbMF0sImVuZW15X2hlYWxzIjpbMF0sImVuZW15X21heF9hdHRhY2siOjEwMDAwMCwiZW5lbXlfbWluX2F0dGFjayI6NTAwMDAsInBsYXllcl9hdHRhY2tfY291bnRzIjpbMTBdLCJwbGF5ZXJfYXR0YWNrcyI6WzMwNjYwNTJdLCJwbGF5ZXJfaGVhbCI6MCwicGxheWVyX2hlYWxfY291bnQiOjAsInBsYXllcl9tYXhfYXR0YWNrcyI6WzEyMzY4NTBdLCJwbGF5ZXJfbWluX2F0dGFja3MiOls0NzcxOThdLCJ0eXBlIjoic3VtbWFyeSJ9fQ==',
+ 'token': dec_sign['token'],
+ 'used_items': [],
+ 'z_battle_finished_at_ms': finish_time,
+ 'z_battle_started_at_ms': start_time,
+ }
+ # enc_sign = encrypt_sign(sign)
+
+ headers = {
+ 'User-Agent': 'Android',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('POST', '/z_battles/' + str(stage) + '/finish'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/z_battles/' + str(stage) + '/finish'
+ else:
+ url = 'http://ishin-production.aktsk.jp/z_battles/' + str(stage) + '/finish'
+
+ r = requests.post(url, data=json.dumps(data), headers=headers)
+ dec_sign = packet.decrypt_sign(r.json()['sign'])
+ # ## Print out Items from Database
+ print('Level: ' + str(level))
+ # ## Print out Items from Database
+ if 'items' in dec_sign:
+ supportitems = []
+ awakeningitems = []
+ trainingitems = []
+ potentialitems = []
+ treasureitems = []
+ carditems = []
+ trainingfields = []
+ stones = 0
+ supportitemsset = set()
+ awakeningitemsset = set()
+ trainingitemsset = set()
+ potentialitemsset = set()
+ treasureitemsset = set()
+ carditemsset = set()
+ trainingfieldsset = set()
+ print('Items:')
+ print('-------------------------')
+ if 'quest_clear_rewards' in dec_sign:
+ for x in dec_sign['quest_clear_rewards']:
+ if x['item_type'] == 'Point::Stone':
+ stones += x['amount']
+ for x in dec_sign['items']:
+ if x['item_type'] == 'SupportItem':
+
+ # print('' + SupportItems.find(x['item_id']).name + ' x '+str(x['quantity']))
+
+ for i in range(x['quantity']):
+ supportitems.append(x['item_id'])
+ supportitemsset.add(x['item_id'])
+ elif x['item_type'] == 'PotentialItem':
+
+ # print('' + PotentialItems.find(x['item_id']).name + ' x '+str(x['quantity']))
+
+ for i in range(x['quantity']):
+ potentialitems.append(x['item_id'])
+ potentialitemsset.add(x['item_id'])
+ elif x['item_type'] == 'TrainingItem':
+
+ # print('' + TrainingItems.find(x['item_id']).name + ' x '+str(x['quantity']))
+
+ for i in range(x['quantity']):
+ trainingitems.append(x['item_id'])
+ trainingitemsset.add(x['item_id'])
+ elif x['item_type'] == 'AwakeningItem':
+
+ # print('' + AwakeningItems.find(x['item_id']).name + ' x '+str(x['quantity']))
+
+ for i in range(x['quantity']):
+ awakeningitems.append(x['item_id'])
+ awakeningitemsset.add(x['item_id'])
+ elif x['item_type'] == 'TreasureItem':
+
+ # print('' + TreasureItems.find(x['item_id']).name + ' x '+str(x['quantity']))
+
+ for i in range(x['quantity']):
+ treasureitems.append(x['item_id'])
+ treasureitemsset.add(x['item_id'])
+ elif x['item_type'] == 'Card':
+
+ # card = Cards.find(x['item_id'])
+
+ carditems.append(x['item_id'])
+ carditemsset.add(x['item_id'])
+ elif x['item_type'] == 'Point::Stone':
+ stones += 1
+ elif x['item_type'] == 'TrainingField':
+
+ # card = Cards.find(x['item_id'])
+
+ for i in range(x['quantity']):
+ trainingfields.append(x['item_id'])
+ trainingfieldsset.add(x['item_id'])
+ else:
+ print(x['item_type'])
+
+ # Print items
+ for x in supportitemsset:
+ # JP Translation
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ config.SupportItems.find_or_fail(x).name
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+
+ # Print name and item count
+ print(Fore.CYAN + Style.BRIGHT + config.SupportItems.find(x).name + ' x' \
+ + str(supportitems.count(x)))
+ for x in awakeningitemsset:
+ # JP Translation
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ config.AwakeningItems.find_or_fail(x).name
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+
+ # Print name and item count
+ print(Fore.MAGENTA + Style.BRIGHT + config.AwakeningItems.find(x).name + ' x' \
+ + str(awakeningitems.count(x)))
+ for x in trainingitemsset:
+ # JP Translation
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ config.TrainingItems.find_or_fail(x).name
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+
+ # Print name and item count
+ print(Fore.RED + Style.BRIGHT + config.TrainingItems.find(x).name + ' x' \
+ + str(trainingitems.count(x)))
+ for x in potentialitemsset:
+ # JP Translation
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ config.PotentialItems.find_or_fail(x).name
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+
+ # Print name and item count
+ print(config.PotentialItems.find_or_fail(x).name + ' x' \
+ + str(potentialitems.count(x)))
+ for x in treasureitemsset:
+ # JP Translation
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ config.TreasureItems.find_or_fail(x).name
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+
+ # Print name and item count
+ print(Fore.GREEN + Style.BRIGHT + config.TreasureItems.find(x).name + ' x' \
+ + str(treasureitems.count(x)))
+ for x in trainingfieldsset:
+ # JP Translation
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ config.TrainingFields.find_or_fail(x).name
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+
+ # Print name and item count
+ print(config.TrainingFields.find(x).name + ' x' \
+ + str(trainingfields.count(x)))
+ for x in carditemsset:
+ # JP Translation
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ config.Cards.find_or_fail(x).name
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+
+ # Print name and item count
+ print(config.Cards.find(x).name + ' x' + str(carditems.count(x)))
+ print(Fore.YELLOW + Style.BRIGHT + 'Stones x' + str(stones))
+ if 'gasha_point' in dec_sign:
+ print('Friend Points: ' + str(dec_sign['gasha_point']))
+
+ print('--------------------------')
+ print('##############################################')
+ window.UnHide()
+ window.Refresh()
+
+
+####################################################################
+def bulk_daily_logins():
+ layout = [[sg.Text('Choose what gets completed!')],
+ [sg.Checkbox('Daily Login', default=True)],
+ [sg.Checkbox('Accept Gifts')],
+ [sg.Checkbox('Complete Daily Events')],
+ [sg.Text('Enter command to execute:')],
+ [sg.Input(key='user_input')],
+ [sg.Ok()]]
+
+ window = sg.Window('Daily Logins', keep_on_top=True).Layout(layout)
+ event, values = window.Read()
+ window.Close()
+ if event == None:
+ return 0
+
+ login = values[0]
+ gift = values[1]
+ daily_events = values[2]
+ user_input = values['user_input']
+ print(user_input)
+
+ # Fetch saves to choose from
+ files = []
+ for subdir, dirs, os_files in os.walk("Saves"):
+ for file in sorted(os_files):
+ files.append(subdir + os.sep + file)
+
+ ### Create window that manages saves selections
+ # Layout
+ chosen_files = []
+ column1 = [
+ [sg.Listbox(values=(files), size=(30, None), bind_return_key=True, select_mode='multiple', key='select_save')],
+ [sg.Button(button_text='Select All', key='all')]]
+ column2 = [[sg.Listbox(values=(chosen_files), size=(30, None), bind_return_key=True, select_mode='multiple',
+ key='remove_save')],
+ [sg.Button(button_text='Remove All', key='remove_all')]]
+ layout = [[sg.Column(column1), sg.Column(column2)],
+ [sg.Button(button_text='Start Grind!', key='Done')]]
+ window = sg.Window('Saves', keep_on_top=True, font=('Helvetica', 15)).Layout(layout)
+
+ while event != 'Done':
+ event, value = window.Read()
+ if event == 'select_save':
+ chosen_files.extend(value['select_save'])
+ for save in value['select_save']:
+ files.remove(save)
+
+ if event == 'remove_save':
+ files.extend(value['remove_save'])
+ for save in value['remove_save']:
+ chosen_files.remove(save)
+
+ if event == 'all':
+ chosen_files.extend(files)
+ files[:] = []
+
+ if event == 'remove_all':
+ files.extend(chosen_files)
+ chosen_files[:] = []
+
+ if event == None:
+ print(Fore.RED + Style.BRIGHT + 'User terminated daily logins')
+ return 0
+
+ window.FindElement('select_save').Update(values=sorted(files))
+ window.FindElement('remove_save').Update(values=sorted(chosen_files))
+
+ window.Close()
+
+ ### Execution per file
+ for file in chosen_files:
+ bulk_daily_save_processor(file, login, gift, daily_events, user_input)
+
+
+####################################################################
+def bulk_daily_save_processor(save, login, gift, daily_events, user_input):
+ f = open(os.path.join(save), 'r')
+ config.identifier = f.readline().rstrip()
+ config.AdId = f.readline().rstrip()
+ config.UniqueId = f.readline().rstrip()
+ config.platform = f.readline().rstrip()
+ config.client = f.readline().rstrip()
+ f.close()
+
+ try:
+ refresh_client()
+ except:
+ print('Sign in failed' + save + ".txt")
+ return 0
+
+ ###
+ if login == True:
+ daily_login()
+ if gift == True:
+ accept_gifts()
+ if daily_events == True:
+ complete_stage('130001', 0)
+ complete_stage('131001', 0)
+ complete_stage('132001', 0)
+ complete_potential()
+ accept_gifts()
+ accept_missions()
+ print('Completed Daily Grind')
+ while len(user_input) > 1:
+ user_command_executor(user_input)
+ try:
+ user_input = input()
+ except:
+ sys.stdin = sys.__stdin__
+ break
+ user_input = input()
+
+
+##############################################################################
+def fresh_save_account():
+ if not os.path.isdir("Saves"):
+ try:
+ os.mkdir('Saves')
+ os.mkdir('Saves/ios')
+ os.mkdir('Saves/android')
+ os.mkdir('Saves/Jp')
+ os.mkdir('Saves/Jp/ios')
+ os.mkdir('Saves/Jp/android')
+ os.mkdir('Saves/fresh')
+ os.mkdir('Saves/fresh/ios')
+ os.mkdir('Saves/fresh/android')
+ except:
+ print(Fore.RED + Style.BRIGHT + 'Unable to create saves file')
+ return 0
+
+ valid_save = False
+ while valid_save == False:
+ save_name = input("What would you like to name the file?")
+ while save_name.isalnum() == 0:
+ print(Fore.RED + Style.BRIGHT + "Name not allowed!")
+ save_name = input('What would you like to name this save?: ')
+ if os.path.exists('Saves' + os.sep + config.platform + os.sep + save_name + ".txt"):
+ print(Fore.RED + Style.BRIGHT + "File by that name already exists.")
+ else:
+ try:
+ f = open(os.path.join('Saves/fresh' + os.sep + config.platform + os.sep + save_name + ".txt"), 'w')
+ f.write(str(config.identifier) + '\n')
+ f.write(str(config.AdId) + '\n')
+ f.write(str(config.UniqueId) + '\n')
+ f.write(str(config.platform) + '\n')
+ f.write(str(config.client) + '\n')
+ f.close()
+ print('--------------------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Written details to file: ' + save_name)
+ print(Fore.RED + Style.BRIGHT + 'If ' + save_name + ' is deleted your account will be lost!')
+ print('--------------------------------------------')
+ break
+ except Exception as e:
+ print(e)
+
+
+######################################################################################
+def fresh_load_account():
+ while 1 == 1:
+ print(
+ 'Choose your operating system (' + Fore.YELLOW + Style.BRIGHT + 'Android: 1' + Style.RESET_ALL + ' or' + Fore.YELLOW + Style.BRIGHT + ' IOS: 2' + Style.RESET_ALL + ')',end='')
+ platform = input('')
+ if platform[0].lower() in ['1', '2']:
+ if platform[0].lower() == '1':
+ config.platform = 'android'
+ else:
+ config.platform = 'ios'
+ break
+ else:
+ print(Fore.RED + 'Could not identify correct operating system to use.')
+
+ while 1 == 1:
+ save_name = input("What save would you like to load?: ")
+ if os.path.isfile('Saves/fresh' + os.sep + config.platform + os.sep + save_name + ".txt"):
+ try:
+ f = open(os.path.join('Saves/fresh', config.platform, save_name + ".txt"), 'r')
+ config.identifier = f.readline().rstrip()
+ config.AdId = f.readline().rstrip()
+ config.UniqueId = f.readline().rstrip()
+ config.platform = f.readline().rstrip()
+ client = f.readline().rstrip()
+ if config.client == client:
+ break
+ else:
+ print(Fore.RED + Style.BRIGHT + 'Save does not match client version.')
+
+ except Exception as e:
+ print(e)
+
+ else:
+ print(Fore.RED + Style.BRIGHT + "Could not find " + save_name)
+ refresh_client()
+
+
+##########################################################################################
+def fresh_bulk_daily_logins():
+ layout = [[sg.Text('Choose what gets completed!')],
+ [sg.Checkbox('Daily Login', default=True)],
+ [sg.Checkbox('Accept Gifts')],
+ [sg.Checkbox('Complete Daily Events')],
+ [sg.Text('Enter command to execute:')],
+ [sg.Input(key='user_input')],
+ [sg.Ok()]]
+
+ window = sg.Window('Daily Logins', keep_on_top=True).Layout(layout)
+ event, values = window.Read()
+ window.Close()
+ if event == None:
+ return 0
+
+ login = values[0]
+ gift = values[1]
+ daily_events = values[2]
+ user_input = values['user_input']
+ print(user_input)
+
+ # Fetch saves to choose from
+ files = []
+ for subdir, dirs, os_files in os.walk("Saves/fresh"):
+ for file in sorted(os_files):
+ files.append(subdir + os.sep + file)
+
+ ### Create window that manages saves selections
+ # Layout
+ chosen_files = []
+ column1 = [
+ [sg.Listbox(values=(files), size=(30, None), bind_return_key=True, select_mode='multiple', key='select_save')],
+ [sg.Button(button_text='Select All', key='all')]]
+ column2 = [[sg.Listbox(values=(chosen_files), size=(30, None), bind_return_key=True, select_mode='multiple',
+ key='remove_save')],
+ [sg.Button(button_text='Remove All', key='remove_all')]]
+ layout = [[sg.Column(column1), sg.Column(column2)],
+ [sg.Button(button_text='Start Grind!', key='Done')]]
+ window = sg.Window('Saves/fresh', keep_on_top=True, font=('Helvetica', 15)).Layout(layout)
+
+ while event != 'Done':
+ event, value = window.Read()
+ if event == 'select_save':
+ chosen_files.extend(value['select_save'])
+ for save in value['select_save']:
+ files.remove(save)
+
+ if event == 'remove_save':
+ files.extend(value['remove_save'])
+ for save in value['remove_save']:
+ chosen_files.remove(save)
+
+ if event == 'all':
+ chosen_files.extend(files)
+ files[:] = []
+
+ if event == 'remove_all':
+ files.extend(chosen_files)
+ chosen_files[:] = []
+
+ if event == None:
+ print(Fore.RED + Style.BRIGHT + 'User terminated daily logins')
+ return 0
+
+ window.FindElement('select_save').Update(values=sorted(files))
+ window.FindElement('remove_save').Update(values=sorted(chosen_files))
+
+ window.Close()
+
+ for file in chosen_files:
+ bulk_daily_save_processor(file, login, gift, daily_events, user_input)
+
+
+########################################################################################################################
+def dokkan():
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/events'),
+ 'Content-type': 'application/json',
+ 'X-Language': 'en',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/events'
+ else:
+ url = 'http://ishin-production.aktsk.jp/events'
+ r = requests.get(url, headers=headers)
+ events = r.json()
+ event_ids = []
+ for event in events['events']:
+ event_ids.append(event['id'])
+ event_ids = sorted(event_ids)
+ try:
+ event_ids.remove(135)
+ except:
+ None
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/user_areas'),
+ 'Content-type': 'application/json',
+ 'X-Language': 'en',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/user_areas'
+ else:
+ url = 'http://ishin-production.aktsk.jp/user_areas'
+ r = requests.get(url, headers=headers)
+ areas = r.json()['user_areas']
+ i = 1
+ for area in areas:
+ if area['area_id'] in event_ids:
+ for stage in area['user_sugoroku_maps']:
+ complete_stage(str(stage['sugoroku_map_id'])[:-1], str(stage['sugoroku_map_id'])[-1])
+ i += 1
+ if i % 30 == 0:
+ refresh_client()
+
+
+def make_name():
+ names = ['NOTE', 'POCO', 'FROZE', 'MIRIGO', 'NICO', 'FORTE', 'VIOLA', 'KABRA', 'BASAKU', 'Ethan', 'INDRA', 'KAGYU',
+ 'NIMU', 'ORI', 'ARMADILLO', 'GENOME', 'MIZORE', 'BEAT', 'ERITO' ]
+
+ random.choice(names)
+
+
+def complete_sbr():
+ print('------------------------------------')
+ while True:
+ print(Fore.CYAN + Style.BRIGHT + 'SBR ')
+ print('---------------------------------')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + '1-10 :' + Fore.YELLOW + Style.BRIGHT + ' 0')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + '11-20 :' + Fore.YELLOW + Style.BRIGHT + ' 1')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + '21-30 :' + Fore.YELLOW + Style.BRIGHT + ' 2')
+ command = input('Enter your choice: ')
+ if command == '0':
+ print(' ')
+ extra.complete_sbr()
+ break
+ elif command == '1':
+ print(' ')
+ extra.sbr_next()
+ break
+ elif command == '2':
+ print(' ')
+ extra.sbr_new()
+
+
+
+def potara():
+ print('---------------------------------')
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear without any Support Items')
+ complete_stage('10002', 2)
+ complete_stage('11002', 2)
+ complete_stage('12002', 2)
+ complete_stage('13002', 2)
+ complete_stage('14002', 2)
+ complete_stage('16002', 2)
+ complete_stage('17002', 2)
+ complete_stage('18002', 2)
+ complete_stage('19002', 3)
+ complete_stage('20002', 3)
+ complete_stage('22002', 3)
+ complete_stage('23002', 3)
+ complete_stage('24002', 3)
+ complete_stage('25002', 3)
+ complete_stage('26002', 3)
+ print('---------------------------------')
+ print(Fore.YELLOW + Style.BRIGHT + '6 Characters with Link Skill: Z-Fighters')
+ change_team()
+ complete_stage('10004', 2)
+ complete_stage('10006', 2)
+ complete_stage('10008', 2)
+ complete_stage('11004', 2)
+ complete_stage('11006', 2)
+ complete_stage('11008', 2)
+ print('---------------------------------')
+ print(Fore.YELLOW + Style.BRIGHT + '6 Characters with Link Skill: Prodigies')
+ change_team()
+ complete_stage('12004', 2)
+ complete_stage('12006', 2)
+ complete_stage('12008', 2)
+ complete_stage('13004', 2)
+ complete_stage('13006', 2)
+ complete_stage('13008', 2)
+ print('---------------------------------')
+ print(Fore.YELLOW + Style.BRIGHT + '6 Characters with Link Skill: Saiyan Warrior Race')
+ change_team()
+ complete_stage('14004', 2)
+ complete_stage('14006', 2)
+ complete_stage('14008', 2)
+ print('---------------------------------')
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with a Vegito on your team')
+ change_team()
+ complete_stage('15002', 2)
+ complete_stage('21002', 3)
+ complete_stage('27002', 3 )
+ print('---------------------------------')
+ print(Fore.YELLOW + Style.BRIGHT + '6 Characters with Link Skill: Golden Warrior')
+ change_team()
+ complete_stage('16004', 2)
+ complete_stage('16006', 2)
+ complete_stage('16008', 2)
+ complete_stage('17004', 2)
+ complete_stage('17006', 2)
+ complete_stage('17008', 2)
+ print('---------------------------------')
+ print(Fore.YELLOW + Style.BRIGHT + '6 Characters with Link Skill: Shocking Speed')
+ change_team()
+ complete_stage('18004', 2)
+ complete_stage('18006', 2)
+ complete_stage('18008', 2)
+ complete_stage('19004', 3)
+ complete_stage('19006', 3)
+ complete_stage('19008', 3)
+ print('---------------------------------')
+ print(Fore.YELLOW + Style.BRIGHT + '6 Characters with Link Skill: Fused Fighter')
+ change_team()
+ complete_stage('20004', 3)
+ complete_stage('20006', 3)
+ complete_stage('20008', 3)
+ print('---------------------------------')
+ print(Fore.YELLOW + Style.BRIGHT + '5 Characters with Link Skill: Fierce Battle')
+ change_team()
+ complete_stage('22006', 3)
+ complete_stage('23006', 3)
+ complete_stage('24006', 3)
+ complete_stage('25006', 3)
+ complete_stage('26006', 3)
+ print('---------------------------------')
+ print(Fore.YELLOW + Style.BRIGHT + 'AGL Type with Link Skill: Fierce Battle')
+ change_team()
+ complete_stage('22004', 3)
+ complete_stage('22008', 3)
+ print('---------------------------------')
+ print(Fore.YELLOW + Style.BRIGHT + 'TEQ Type with Link Skill: Fierce Battle')
+ change_team()
+ complete_stage('23004', 3)
+ complete_stage('23008', 3)
+ print('---------------------------------')
+ print(Fore.YELLOW + Style.BRIGHT + 'INT Type with Link Skill: Fierce Battle')
+ change_team()
+ complete_stage('24004', 3)
+ complete_stage('24008', 3)
+ print('---------------------------------')
+ print(Fore.YELLOW + Style.BRIGHT + 'STR Type with Link Skill: Fierce Battle')
+ change_team()
+ complete_stage('25004', 3)
+ complete_stage('25008', 3)
+ print('---------------------------------')
+ print(Fore.YELLOW + Style.BRIGHT + 'PHY Type with Link Skill: Fierce Battle')
+ change_team()
+ complete_stage('26004', 3)
+ complete_stage('26008', 3)
+
+
+def new_transfer():
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/user'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/user'
+ else:
+ url = 'http://ishin-production.aktsk.jp/user'
+ r = requests.get(url, headers=headers)
+ user = r.json()
+ print('Name: ' + str(user['user']['name']))
+ print('Rank: ' + str(user['user']['rank']))
+ print('Account OS: ' + config.platform.upper())
+ print('User ID: ' + str(user['user']['id']))
+ get_transfer_code()
+
+
+
+def rankup():
+ #gets current level and level to reach
+ reachlv = input('\n\nAt what level are you trying to reach?\n')
+ ranklv = get_user()['user']['rank']
+ multiplier = input('\nWhat is the current multiplier? ')
+ #converts into exp by getting table info
+ exp_curr = config.Ranks.find(ranklv).exp_total
+ exp_reach = config.Ranks.find(reachlv).exp_total
+ exp_needed = exp_reach - exp_curr
+ # The amount of exp the last stage gives // ttc= times to complete
+ ttc = exp_needed / (float(27600) * float(multiplier))
+ ttc = round(ttc+0.999)
+ print('Exp needed= ' + str(exp_needed))
+ print('The last stage will be done' + str(ttc) + ' times.')
+ # lööps bröther
+ for i in range(int(ttc)):
+ complete_stage('27003', '3', kagi = None)
+ print(Fore.GREEN + 'Account Rank:' + str(get_user()['user']['rank']))
+
+
+#######################################################################################################################
+def Jp_save_account():
+ if not os.path.isdir("Saves"):
+ try:
+ os.mkdir('Saves')
+ os.mkdir('Saves/ios')
+ os.mkdir('Saves/android')
+ os.mkdir('Saves/Jp')
+ os.mkdir('Saves/Jp/ios')
+ os.mkdir('Saves/Jp/android')
+ os.mkdir('Saves/fresh')
+ os.mkdir('Saves/fresh/ios')
+ os.mkdir('Saves/fresh/android')
+ except:
+ print(Fore.RED + Style.BRIGHT + 'Unable to create saves file')
+ return 0
+
+ valid_save = False
+ while valid_save == False:
+ save_name = input("What would you like to name the file?")
+ while save_name.isalnum() == 0:
+ print(Fore.RED + Style.BRIGHT + "Name not allowed!")
+ save_name = input('What would you like to name this save?: ')
+ if os.path.exists('Saves/Jp' + os.sep + config.platform + os.sep + save_name + ".txt"):
+ print(Fore.RED + Style.BRIGHT + "File by that name already exists.")
+ else:
+ try:
+ f = open(os.path.join('Saves/Jp' + os.sep + config.platform + os.sep + save_name + ".txt"), 'w')
+ f.write(str(config.identifier) + '\n')
+ f.write(str(config.AdId) + '\n')
+ f.write(str(config.UniqueId) + '\n')
+ f.write(str(config.platform) + '\n')
+ f.write(str(config.client) + '\n')
+ f.close()
+ print('--------------------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Written details to file: ' + save_name + ".txt")
+ print(Fore.RED + Style.BRIGHT + 'If ' + save_name + ' is deleted your account will be lost!')
+ print('--------------------------------------------')
+ break
+ except Exception as e:
+ print(e)
+
+
+def Jp_load_account():
+ while 1 == 1:
+ print(
+ 'Choose your operating system (' + Fore.YELLOW + Style.BRIGHT + 'Android: 1' + Style.RESET_ALL + ' or' + Fore.YELLOW + Style.BRIGHT + ' IOS: 2' + Style.RESET_ALL + ')',end='')
+ platform = input('')
+ if platform[0].lower() in ['1', '2']:
+ if platform[0].lower() == '1':
+ config.platform = 'android'
+ else:
+ config.platform = 'ios'
+ break
+ else:
+ print(Fore.RED + 'Could not identify correct operating system to use.')
+
+ while 1 == 1:
+ save_name = input("What save would you like to load?: ")
+ if os.path.isfile('Saves/Jp' + os.sep + config.platform + os.sep + save_name + ".txt"):
+ try:
+ f = open(os.path.join('Saves/Jp', config.platform, save_name + ".txt"), 'r')
+ config.identifier = f.readline().rstrip()
+ config.AdId = f.readline().rstrip()
+ config.UniqueId = f.readline().rstrip()
+ config.platform = f.readline().rstrip()
+ client = f.readline().rstrip()
+ if config.client == client:
+ break
+ else:
+ print(Fore.RED + Style.BRIGHT + 'Save does not match client version.')
+
+ except Exception as e:
+ print(e)
+
+ else:
+ print(Fore.RED + Style.BRIGHT + "Could not find " + save_name)
+ refresh_client()
+
+
+##########################################################################################
+def Jp_bulk_daily_logins():
+ layout = [[sg.Text('Choose what gets completed!')],
+ [sg.Checkbox('Daily Login', default=True)],
+ [sg.Checkbox('Accept Gifts')],
+ [sg.Checkbox('Complete Daily Events')],
+ [sg.Text('Enter command to execute:')],
+ [sg.Input(key='user_input')],
+ [sg.Ok()]]
+
+ window = sg.Window('Daily Logins', keep_on_top=True).Layout(layout)
+ event, values = window.Read()
+ window.Close()
+ if event == None:
+ return 0
+
+ login = values[0]
+ gift = values[1]
+ daily_events = values[2]
+ user_input = values['user_input']
+ print(user_input)
+
+ # Fetch saves to choose from
+ files = []
+ for subdir, dirs, os_files in os.walk("Saves/Jp"):
+ for file in sorted(os_files):
+ files.append(subdir + os.sep + file)
+
+ ### Create window that manages saves selections
+ # Layout
+ chosen_files = []
+ column1 = [
+ [sg.Listbox(values=(files), size=(30, None), bind_return_key=True, select_mode='multiple', key='select_save')],
+ [sg.Button(button_text='Select All', key='all')]]
+ column2 = [[sg.Listbox(values=(chosen_files), size=(30, None), bind_return_key=True, select_mode='multiple',
+ key='remove_save')],
+ [sg.Button(button_text='Remove All', key='remove_all')]]
+ layout = [[sg.Column(column1), sg.Column(column2)],
+ [sg.Button(button_text='Start Grind!', key='Done')]]
+ window = sg.Window('Saves/fresh', keep_on_top=True, font=('Helvetica', 15)).Layout(layout)
+
+ while event != 'Done':
+ event, value = window.Read()
+ if event == 'select_save':
+ chosen_files.extend(value['select_save'])
+ for save in value['select_save']:
+ files.remove(save)
+
+ if event == 'remove_save':
+ files.extend(value['remove_save'])
+ for save in value['remove_save']:
+ chosen_files.remove(save)
+
+ if event == 'all':
+ chosen_files.extend(files)
+ files[:] = []
+
+ if event == 'remove_all':
+ files.extend(chosen_files)
+ chosen_files[:] = []
+
+ if event == None:
+ print(Fore.RED + Style.BRIGHT + 'User terminated daily logins')
+ return 0
+
+ window.FindElement('select_save').Update(values=sorted(files))
+ window.FindElement('remove_save').Update(values=sorted(chosen_files))
+
+ window.Close()
+
+ for file in chosen_files:
+ bulk_daily_save_processor(file, login, gift, daily_events, user_input)
+
+
+def Jp_transfer_account():
+ # Determine correct platform to use
+ set_platform()
+
+ transfercode = input('Enter your transfer code: ')
+
+ config.AdId = packet.guid()['AdId']
+ config.UniqueId = packet.guid()['UniqueId']
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ data = {'eternal': True, 'old_user_id': '', 'user_account': {
+ 'device': 'samsung',
+ 'device_model': 'SM-G930V',
+ 'os_version': '6.0',
+ 'platform': config.platform,
+ 'unique_id': config.UniqueId,
+ }}
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/auth/link_codes/' \
+ + str(transfercode)
+ else:
+ url = 'http://ishin-production.aktsk.jp/auth/link_codes/' \
+ + str(transfercode)
+ print('URL: ' + url)
+ r = requests.put(url, data=json.dumps(data), headers=headers)
+ if 'error' in r.json():
+ print(r.json())
+ print(base64.b64decode(r.json()['identifiers']).decode('utf-8'))
+ config.identifier = base64.b64decode(r.json()['identifiers']).decode('utf-8')
+
+ Jp_save_account()
+ refresh_client()
+
+
+########################################################################################################################
+def Gt():
+ complete_stage('173001', 1)
+ complete_stage('173002', 1)
+ print('---------------------------------')
+ print(Fore.YELLOW + Style.BRIGHT + '1 Characters with Link Skill:Turtle School ')
+ change_team()
+ complete_stage('173003', 1)
+ print(Fore.YELLOW + Style.BRIGHT + '1 Characters with Link Skill:Pure Saiyan ')
+ change_team()
+ complete_stage('173004', 1)
+ print(Fore.YELLOW + Style.BRIGHT + '1 Characters with Link Skill:Super Saiyan ')
+ change_team()
+ complete_stage('173005', 1)
+ print(Fore.YELLOW + Style.BRIGHT + ' Use a team made up of only Super characters of each type in level 6 ')
+ change_team()
+ complete_stage('173006', 1)
+ print(Fore.YELLOW + Style.BRIGHT + 'Use a team made up of only characters from the "Buu Saga" category in level 7')
+ change_team()
+ complete_stage('173007', 1)
+ print(Fore.YELLOW + Style.BRIGHT + 'Only teams that include "Giru" can enter this stage.')
+ change_team()
+ complete_stage('172001', 1)
+ complete_stage('172002', 1)
+ complete_stage('172003', 1)
+ print(Fore.YELLOW + Style.BRIGHT + 'Teams that include the following characters cannot enter this stage. - Pan (GT) - Pan (GT) (Honey) - Goku (GT) & Pan (GT) & Trunks (GT)')
+ change_team()
+ complete_stage('172004', 1)
+ complete_stage('172005', 1)
+ complete_stage('172006', 1)
+ complete_stage('172007', 1)
+
+
+def get_kagi():
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 6.0; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/eventkagi_items'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/eventkagi_items'
+ else:
+ url = 'http://ishin-production.aktsk.jp/eventkagi_items'
+ r = requests.get(url, headers=headers)
+ stageid = input('\nWhich Stage do you want to complete?: ')
+ kagi_items = r.json()['eventkagi_items']
+ area_id = str(config.Quests.find(stageid).area_id)
+ area_category = config.Area.find(area_id).category
+ areatabs = config.AreaTabs.all()
+ for tab in areatabs:
+ j = json.loads(tab.area_category_ids)
+ if area_category in j['area_category_ids']:
+ kagi_id = int(tab.id)
+ print('Kagi ID: ' + str(tab.id))
+ break
+ for kagi in kagi_items:
+ if kagi['eventkagi_item_id'] == kagi_id:
+ if kagi['quantity'] > 0:
+ print('Available')
+ diff = input('\nstage difficulty?: \n')
+ times = input('\nhow many times do you want to complete?: \n')
+ for i in range(int(times)):
+ complete_stage(stageid, diff, kagi=kagi_id)
+ else:
+ print(Fore.RED + '\nThere are not enough keys or that stage does not exist for the keys')
+ return None
+
+
+def limit_stage():
+ print(Fore.CYAN + Style.BRIGHT + 'Clear all stages with SUPER UNITS ONLY')
+ change_team()
+ complete_stage('319001', 1)
+ complete_stage('319002', 1)
+ complete_stage('319003', 1)
+ complete_stage('319004', 1)
+ complete_stage('319005', 1)
+ complete_stage('319006', 1)
+ complete_stage('319007', 1)
+ complete_stage('319008', 1)
+ print(Fore.PURPLE + Style.BRIGHT + 'Clear without PHY character')
+ change_team()
+ complete_stage('319009', 1)
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear without AGL character')
+ change_team()
+ complete_stage('319010', 1)
+ print(Fore.RED + Style.BRIGHT + 'Clear without INT character')
+ change_team()
+ complete_stage('319011', 1)
+ print(Fore.GREEN + Style.BRIGHT + 'Clear without TEQ character')
+ change_team()
+ complete_stage('319012', 1)
+ print(Fore.CYAN + Style.BRIGHT + 'Clear without STR character')
+ change_team()
+ complete_stage('319013', 1)
+ print(Fore.WHITE + Style.BRIGHT + 'Clear with A CHARACTER OF EACH TYPE')
+ change_team()
+ complete_stage('319014', 2)
+ complete_stage('319015', 2)
+ complete_stage('319016', 2)
+ complete_stage('319017', 2)
+ complete_stage('319018', 2)
+ complete_stage('319019', 2)
+ complete_stage('319020', 2)
+ complete_stage('319021', 2)
+ complete_stage('319022', 2)
+ complete_stage('319023', 2)
+ complete_stage('319024', 2)
+
+ print(Fore.CYAN + Style.BRIGHT + 'Clear all stages with EXTREME UNITS ONLY')
+ change_team()
+ complete_stage('320001', 1)
+ complete_stage('320002', 1)
+ complete_stage('320003', 1)
+ complete_stage('320004', 1)
+ complete_stage('320005', 1)
+ complete_stage('320006', 1)
+ complete_stage('320007', 1)
+ complete_stage('320008', 1)
+ print(Fore.PURPLE + Style.BRIGHT + 'Clear without INT character')
+ change_team()
+ complete_stage('320009', 1)
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear without PHY character')
+ change_team()
+ complete_stage('320010', 1)
+ print(Fore.RED + Style.BRIGHT + 'Clear without STR character')
+ change_team()
+ complete_stage('320011', 1)
+ print(Fore.GREEN + Style.BRIGHT + 'Clear without TEQ character')
+ change_team()
+ complete_stage('320012', 1)
+ print(Fore.CYAN + Style.BRIGHT + 'Clear without AGL character')
+ change_team()
+ complete_stage('320013', 1)
+ print(Fore.WHITE + Style.BRIGHT + 'Clear with A CHARACTER OF EACH TYPE')
+ change_team()
+ complete_stage('320014', 2)
+ complete_stage('320015', 2)
+ complete_stage('320016', 2)
+ complete_stage('320017', 2)
+ complete_stage('320018', 2)
+ complete_stage('320019', 2)
+ complete_stage('320020', 2)
+ complete_stage('320021', 2)
+ complete_stage('320022', 2)
+ complete_stage('320023', 2)
+ complete_stage('320024', 2)
+
+
+def change_supporter():
+ # Needs to have translation properly implemented!
+
+ ###Get user cards
+ print(Fore.CYAN + Style.BRIGHT + 'Fetching user cards...')
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 4.4; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/cards'),
+ 'Content-type': 'application/json',
+ 'X-Language': 'en',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/cards'
+ else:
+ url = 'http://ishin-production.aktsk.jp/cards'
+ r = requests.get(url, headers=headers)
+ master_cards = r.json()['cards']
+ print(Fore.GREEN + Style.BRIGHT + 'Done...')
+
+ ###Sort user cards into a list of dictionaries with attributes
+ print(Fore.CYAN + Style.BRIGHT + 'Fetching card attributes...')
+ card_list = []
+ for card in master_cards:
+ ###Get card collection object from database
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ db_card = config.Cards.find_or_fail(card['card_id'])
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ db_card = config.Cards.find_or_fail(card['card_id'])
+ #db_card = config.Cards.where('id','=',card['card_id']).first()
+
+ ###Get card rarity
+ if db_card.rarity == 0:
+ rarity = 'N'
+ elif db_card.rarity == 1:
+ rarity = 'R'
+ elif db_card.rarity == 2:
+ rarity = 'SR'
+ elif db_card.rarity == 3:
+ rarity ='SSR'
+ elif db_card.rarity == 4:
+ rarity = 'UR'
+ elif db_card.rarity == 5:
+ rarity = 'LR'
+ ###Get card Type
+ if str(db_card.element)[-1] == '0':
+ type ='[AGL] '
+ elif str(db_card.element)[-1] == '1':
+ type ='[TEQ] '
+ elif str(db_card.element)[-1] == '2':
+ type ='[INT] '
+ elif str(db_card.element)[-1] == '3':
+ type ='[STR] '
+ elif str(db_card.element)[-1] == '4':
+ type ='[PHY] '
+ ###Get card categories list
+ categories = []
+ #Get category id's given card id
+ card_card_categories = config.CardCardCategories.where(
+ 'card_id','=',db_card.id).get()
+
+ try:
+ for category in card_card_categories:
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ categories.append(config.CardCategories.find(
+ category.card_category_id).name)
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ categories.append(config.CardCategories.find(
+ category.card_category_id).name)
+ except:
+ None
+ ###Get card link_skills list
+ link_skills = []
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill1_id).name)
+ except AttributeError:
+ try:
+ config.Model.set_connection_resolver(config.db_jp)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill1_id).name)
+ except:
+ None
+ except:
+ None
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill2_id).name)
+ except AttributeError:
+ try:
+ config.Model.set_connection_resolver(config.db_jp)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill2_id).name)
+ except:
+ None
+ except:
+ None
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill3_id).name)
+ except AttributeError:
+ try:
+ config.Model.set_connection_resolver(config.db_jp)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill3_id).name)
+ except:
+ None
+ except:
+ None
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill4_id).name)
+ except AttributeError:
+ try:
+ config.Model.set_connection_resolver(config.db_jp)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill4_id).name)
+ except:
+ None
+ except:
+ None
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill5_id).name)
+ except AttributeError:
+ try:
+ config.Model.set_connection_resolver(config.db_jp)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill5_id).name)
+ except:
+ None
+ except:
+ None
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill6_id).name)
+ except AttributeError:
+ try:
+ config.Model.set_connection_resolver(config.db_jp)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill6_id).name)
+ except:
+ None
+ except:
+ None
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill7_id).name)
+ except AttributeError:
+ try:
+ config.Model.set_connection_resolver(config.db_jp)
+ link_skills.append(config.LinkSkills.find(db_card.link_skill7_id).name)
+ except:
+ None
+ except:
+ None
+
+ dict = {
+ 'ID': db_card.id,
+ 'Rarity': rarity,
+ 'Name': db_card.name,
+ 'Type': type,
+ 'Cost': db_card.cost,
+ 'Hercule': db_card.is_selling_only,
+ 'HP': db_card.hp_init,
+ 'Categories':categories,
+ 'Links':link_skills,
+ 'UniqueID': card['id']
+ }
+ card_list.append(dict)
+ print(Fore.GREEN + Style.BRIGHT + "Done...")
+
+ ###Sort cards
+ print(Fore.CYAN + Style.BRIGHT + "Sorting cards...")
+ card_list = sorted(card_list, key=lambda k: k['Name'])
+ card_list = sorted(card_list, key=lambda k: k['Rarity'])
+ card_list = sorted(card_list, key=lambda k: k['Cost'])
+ print(Fore.GREEN + Style.BRIGHT + "Done...")
+ ###Define cards to display
+ cards_to_display_dicts = []
+ cards_to_display = []
+ #Take cards in card_list that aren't hercule statues or kais?
+ for char in card_list:
+ if char['Hercule'] != 1 and char['HP'] > 5:
+ cards_to_display_dicts.append(char)
+ cards_to_display.append(char['Type'] + char['Rarity']+ ' ' +char['Name'] + ' | ' + str(char['ID']) + ' | '+ str(char['UniqueID']))
+
+ ###Define links to display
+ links_master = []
+ config.Model.set_connection_resolver(config.db_jp)
+ for link in config.LinkSkills.all():
+ links_master.append(link.name)
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ links_master.append(config.LinkSkills.find_or_fail(link.id).name)
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ links_master.append(config.LinkSkills.find_or_fail(link.id).name)
+
+ links_to_display = sorted(links_master)
+
+ ###Define categories to display
+ categories_master = []
+ config.Model.set_connection_resolver(config.db_jp)
+ for category in config.CardCategories.all():
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ categories_master.append(config.CardCategories.find_or_fail(category.id).name)
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ categories_master.append(config.CardCategories.find_or_fail(category.id).name)
+
+ categories_to_display = sorted(categories_master)
+
+ ###Define window layout
+
+ col1 = [[sg.Listbox(values=(cards_to_display),size = (30,20),key='CARDS')],
+ [sg.Listbox(values=([]),size = (30,6),key = 'CARDS_CHOSEN')],
+ [sg.Button(button_text = 'Set as Supporter',key='choose_card')]]
+
+ col2 = [[sg.Listbox(values=(sorted(categories_to_display)),size = (25,20),key = 'CATEGORIES')],
+ [sg.Listbox(values=([]),size = (25,6),key = 'CATEGORIES_CHOSEN')],
+ [sg.Button(button_text ='Choose Categories',key='choose_categories'),
+ sg.Button(button_text ='Clear Categories',key='clear_categories')]]
+
+ col3 = [[sg.Listbox(values=(sorted(links_to_display)),size = (25,20),key='LINKS')],
+ [sg.Listbox(values=([]),size = (25,6),key = 'LINKS_CHOSEN')],
+ [sg.Button(button_text ='Choose Links',key='choose_links'),
+ sg.Button(button_text ='Clear Links',key='clear_links')]]
+
+ layout = [[sg.Column(col1),sg.Column(col2),sg.Column(col3)]]
+ window = sg.Window('Supporter Update',grab_anywhere=True,keep_on_top = True).Layout(layout)
+
+
+ ###Begin window loop
+ chosen_links = []
+ chosen_categories = []
+
+ ###
+ chosen_cards_ids = []
+ chosen_cards_unique_ids = []
+ chosen_cards_names = []
+ chosen_cards_to_display = []
+
+ while len(chosen_cards_ids) < 1:
+ event,values = window.Read()
+
+ if event == None:
+ return 0
+
+ if event == 'choose_card':
+ if len(values['CARDS']) < 1:
+ continue
+ #Get ID of chosen card to send to bandai
+ chosen_line = values['CARDS'][0]
+ char_name,char_id,char_unique_id = chosen_line.split(' | ')
+ chosen_cards_ids.append(int(char_id))
+ chosen_cards_unique_ids.append(int(char_unique_id))
+ try:
+ config.Model.set_connection_resolver(config.db_glb)
+ chosen_cards_names.append(config.Cards.find(char_id).name)
+ except:
+ config.Model.set_connection_resolver(config.db_jp)
+ chosen_cards_names.append(config.Cards.find(char_id).name)
+
+
+ #Chosen cards to display in lower box
+ chosen_cards_to_display.append(chosen_line)
+
+ if event == 'choose_categories':
+ for category in values['CATEGORIES']:
+ chosen_categories.append(category)
+ categories_to_display.remove(category)
+
+ if event == 'clear_categories':
+ categories_to_display.extend(chosen_categories)
+ chosen_categories[:] = []
+ categories_to_display = sorted(categories_to_display)
+
+
+ if event == 'choose_links':
+ for link in values['LINKS']:
+ chosen_links.append(link)
+ links_to_display.remove(link)
+
+ if event == 'clear_links':
+ links_to_display.extend(chosen_links)
+ chosen_links[:] = []
+ links_to_display = sorted(links_to_display)
+ break
+
+ ###Re-populate cards to display, checking filter criteria
+ cards_to_display[:] = []
+ for char in cards_to_display_dicts:
+ if char['Name'] in chosen_cards_names:
+ continue
+
+ if len(list(set(chosen_links) & set(char['Links']))) != len(chosen_links):
+ #print("List intersection")
+ continue
+
+ if len(list(set(chosen_categories) & set(char['Categories']))) != len(chosen_categories):
+ #print("Category intersectino")
+ continue
+
+ cards_to_display.append(char['Type'] + char['Rarity']+ ' ' +char['Name'] + ' | ' + str(char['ID']) + ' | '+ str(char['UniqueID']))
+
+
+ ###Update window elements
+ window.FindElement('CARDS').Update(values=cards_to_display)
+ window.FindElement('CARDS_CHOSEN').Update(values=chosen_cards_to_display)
+ window.FindElement('CATEGORIES').Update(values=categories_to_display)
+ window.FindElement('CATEGORIES_CHOSEN').Update(values=chosen_categories)
+ window.FindElement('LINKS').Update(values=links_to_display)
+ window.FindElement('LINKS_CHOSEN').Update(values=chosen_links)
+
+ window.Close()
+ ###Send selected supporter to bandai
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 4.4; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('PUT', '/support_leaders'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/support_leaders'
+ else:
+ url = 'http://ishin-production.aktsk.jp/support_leaders'
+ #print(chosen_cards_unique_ids)
+ data = {'support_leader_ids':chosen_cards_unique_ids}
+ #print(data)
+ r = requests.put(url, data = json.dumps(data),headers = headers)
+ if 'error' in r.json():
+ print(Fore.RED + Style.BRIGHT+str(r.json()))
+ else:
+ #print(r.json())
+ print(chosen_cards_names)
+ print(Fore.GREEN + Style.BRIGHT+"Supporter updated!")
+
+
+def lr_farm():
+
+ print('------------------------------------')
+ while True:
+ print(Fore.CYAN + Style.BRIGHT + 'Farm Lr,Tur,Ur Awakening Medals ')
+ print('---------------------------------')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'TEQ Super Saiyan God SS Vegito :' + Fore.YELLOW + Style.BRIGHT + ' 0')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Majin Vegeta :' + Fore.YELLOW + Style.BRIGHT + ' 1')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Lr Super Saiyan Goku & Super Saiyan Vegeta :' + Fore.YELLOW + Style.BRIGHT + ' 2')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Lr Full Power Boujack (Galactic Warrior) :' + Fore.YELLOW + Style.BRIGHT + ' 3')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Lr Cell (Perfect Form) & Cell Jr :' + Fore.YELLOW + Style.BRIGHT + ' 4')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Lr Trunks (Teen) (Future) & Mai (Future) :' + Fore.YELLOW + Style.BRIGHT + ' 5')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Lr Beerus & Whis :' + Fore.YELLOW + Style.BRIGHT + ' 6')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Lr Super Saiyan Gohan (Teen) & Super Saiyan Goten (Kid) :' + Fore.YELLOW + Style.BRIGHT + ' 7')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Lr Super Saiyan Goku (Angel) & Super Saiyan Vegeta (Angel) :' + Fore.YELLOW + Style.BRIGHT + ' 8')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Lr Trunks (Kid) & Goten (Kid) :' + Fore.YELLOW + Style.BRIGHT + ' 9')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Lr Super Saiyan 2 Gohan :' + Fore.YELLOW + Style.BRIGHT + ' 10')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Lr Super Saiyan 3 Goku :' + Fore.YELLOW + Style.BRIGHT + ' 11')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Lr Bardock :' + Fore.YELLOW + Style.BRIGHT + ' 12')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Lr Goku Black (Super Saiyan Rosé) & Zamasu :' + Fore.YELLOW + Style.BRIGHT + ' 13')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Lr Goku & Frieza (Final Form) (Angel):' + Fore.YELLOW + Style.BRIGHT + ' 14')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'Lr Legendary Super Saiyan Broly :' + Fore.YELLOW + Style.BRIGHT + ' 15')
+ print('---------------------------------')
+
+ command = input('Enter your choice: ')
+ if command == '0':
+ print(' ')
+ aa.ss()
+ break
+ elif command == '1':
+ print(' ')
+ Lrfarm.mv()
+ break
+ elif command == '2':
+ print(' ')
+ Lrfarm.gv()
+ break
+ elif command == '3':
+ print(' ')
+ Lrfarm.b()
+ break
+ elif command == '4':
+ print(' ')
+ Lrfarm.c()
+ break
+ elif command == '5':
+ print(' ')
+ Lrfarm.tm()
+ break
+ elif command == '6':
+ print(' ')
+ Lrfarm.bw()
+ break
+ elif command == '7':
+ print(' ')
+ Lrfarm.gg()
+ break
+ elif command == '8':
+ print(' ')
+ Lrfarm.vg()
+ break
+ elif command == '9':
+ print(' ')
+ Lrfarm.tg()
+ break
+ elif command == '10':
+ print(' ')
+ Lrfarm.gh()
+ break
+ elif command == '11':
+ print(' ')
+ Lrfarm.ggg()
+ break
+ elif command == '12':
+ print(' ')
+ Lrfarm.bd()
+ break
+ elif command == '13':
+ print(' ')
+ Lrfarm.gb()
+ break
+ elif command == '14':
+ print(' ')
+ Lrfarm.gf()
+ break
+ elif command == '15':
+ print(' ')
+ Lrfarm.by()
+
+
+def ur_farm():
+
+ print('------------------------------------')
+ while True:
+ print(Fore.CYAN + Style.BRIGHT + 'Farm Tur,Ur Awakening Medals ')
+ print('---------------------------------')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'TEQ Super Saiyan God SS Vegito:' + Fore.YELLOW + Style.BRIGHT + ' 0')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'PHY Super Saiyan Broly :' + Fore.YELLOW + Style.BRIGHT + ' 1')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'STR Super Gogeta :' + Fore.YELLOW + Style.BRIGHT + ' 2')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'AGL Super Saiyan Gogeta :' + Fore.YELLOW + Style.BRIGHT + ' 3')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'INT SSJ3 Bardock:' + Fore.YELLOW + Style.BRIGHT + ' 4')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'STR SSJ4 Goku :' + Fore.YELLOW + Style.BRIGHT + ' 5')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'INT UI Goku :' + Fore.YELLOW + Style.BRIGHT + ' 6')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'AGL SSJ4 Vegeta) :' + Fore.YELLOW + Style.BRIGHT + ' 7')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'PHY FP Frieza :' + Fore.YELLOW + Style.BRIGHT + ' 8')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'TEQ Golden Frieza :' + Fore.YELLOW + Style.BRIGHT + ' 9')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'AGL SSJ3 Goku :' + Fore.YELLOW + Style.BRIGHT + ' 10')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'TEQ SSJ4 Gogeta :' + Fore.YELLOW + Style.BRIGHT + ' 11')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'INT Super Gogeta :' + Fore.YELLOW + Style.BRIGHT + ' 12')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'SSJ3 Gotenks :' + Fore.YELLOW + Style.BRIGHT + ' 13')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'TEQ FP SSJ4 Goku:' + Fore.YELLOW + Style.BRIGHT + ' 14')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'STR Jiren :' + Fore.YELLOW + Style.BRIGHT + ' 15')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'INT Golden Frieza:' + Fore.YELLOW + Style.BRIGHT + ' 16')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'PHY Android 17 :' + Fore.YELLOW + Style.BRIGHT + ' 17')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'TEQ Hit:' + Fore.YELLOW + Style.BRIGHT + ' 18')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'AGL SSBE Vegeta :' + Fore.YELLOW + Style.BRIGHT + ' 19')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'PHY Kid Buu:' + Fore.YELLOW + Style.BRIGHT + ' 20')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'INT Kid Buu:' + Fore.YELLOW + Style.BRIGHT + ' 21')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'TEQ SSJ3 Goku (Angel):' + Fore.YELLOW + Style.BRIGHT + ' 22')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'PHY Goku Black :' + Fore.YELLOW + Style.BRIGHT + ' 23')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'INT Goku Black:' + Fore.YELLOW + Style.BRIGHT + ' 24')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'TEQ SSG Goku:' + Fore.YELLOW + Style.BRIGHT + ' 25')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'STR SSG Vegeta' + Fore.YELLOW + Style.BRIGHT + ' 26')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'AGL SSGSS Goku :' + Fore.YELLOW + Style.BRIGHT + ' 27')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'STR Toppo:' + Fore.YELLOW + Style.BRIGHT + ' 28')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'STR Rose Goku Black' + Fore.YELLOW + Style.BRIGHT + ' 29')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'PHY SSGSS Vegito' + Fore.YELLOW + Style.BRIGHT + ' 30')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'STR SSJ3 Goku :' + Fore.YELLOW + Style.BRIGHT + ' 31')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'TEQ SSJ3 Broly' + Fore.YELLOW + Style.BRIGHT + ' 32')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'AGL Transgoku :' + Fore.YELLOW + Style.BRIGHT + ' 33')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'STR SSJ3 Vegeta:' + Fore.YELLOW + Style.BRIGHT + ' 34')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'PHY SSJ3 Gotenks' + Fore.YELLOW + Style.BRIGHT + ' 35')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'AGL Turles' + Fore.YELLOW + Style.BRIGHT + ' 36')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'STR Janemba :' + Fore.YELLOW + Style.BRIGHT + ' 37')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'INT Janemba:' + Fore.YELLOW + Style.BRIGHT + ' 38')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'TEQ TransFrieza' + Fore.YELLOW + Style.BRIGHT + ' 39')
+ print('---------------------------------')
+ print(Fore.CYAN + Style.BRIGHT + 'AGL Broly' + Fore.YELLOW + Style.BRIGHT + ' 40')
+ print('---------------------------------')
+ print('---------------------------------')
+
+ command = input('Enter your choice: ')
+ if command == '0':
+ print(' ')
+ aa.ss()
+ break
+ elif command == '1':
+ print(' ')
+ aa.sss()
+ break
+ elif command == '2':
+ print(' ')
+ aa.ssss()
+ break
+ elif command == '3':
+ print(' ')
+ aa.s()
+ break
+ elif command == '4':
+ print(' ')
+ aa.a()
+ break
+ elif command == '5':
+ print(' ')
+ aa.aa()
+ break
+ elif command == '6':
+ print(' ')
+ aa.aaa()
+ break
+ elif command == '7':
+ print(' ')
+ aa.aaaa()
+ break
+ elif command == '8':
+ print(' ')
+ aa.b()
+ break
+ elif command == '9':
+ print(' ')
+ aa.bb()
+ break
+ elif command == '10':
+ print(' ')
+ aa.bbb()
+ break
+ elif command == '11':
+ print(' ')
+ aa.bbbb()
+ break
+ elif command == '12':
+ print(' ')
+ aa.c()
+ break
+ elif command == '13':
+ print(' ')
+ aa.cc()
+ break
+ elif command == '14':
+ print(' ')
+ aa.ccc()
+ break
+ elif command == '15':
+ print(' ')
+ aa.cccc()
+ break
+ elif command == '16':
+ print(' ')
+ aa.d()
+ break
+ elif command == '17':
+ print(' ')
+ aa.dd()
+ break
+ elif command == '18':
+ print(' ')
+ aa.ddd()
+ break
+ elif command == '19':
+ print(' ')
+ aa.dddd()
+ break
+ elif command == '20':
+ print(' ')
+ aa.e()
+ break
+ elif command == '21':
+ print(' ')
+ aa.ee()
+ break
+ elif command == '22':
+ print(' ')
+ aa.eee()
+ break
+ elif command == '23':
+ print(' ')
+ aa.eeee()
+ break
+ elif command == '24':
+ print(' ')
+ aa.f()
+ break
+ elif command == '25':
+ print(' ')
+ aa.ff()
+ break
+ elif command == '26':
+ print(' ')
+ aa.ffx()
+ break
+ elif command == '27':
+ print(' ')
+ aa.fff()
+ break
+ elif command == '28':
+ print(' ')
+ aa.ffff()
+ break
+ elif command == '29':
+ print(' ')
+ aa.g()
+ break
+ elif command == '30':
+ print(' ')
+ aa.gg()
+ break
+ elif command == '31':
+ print(' ')
+ aa.ggg()
+ break
+ elif command == '32':
+ print(' ')
+ aa.gggx()
+ break
+ elif command == '33':
+ print(' ')
+ aa.gggg()
+ break
+ elif command == '34':
+ print(' ')
+ aa.h()
+ break
+ elif command == '35':
+ print(' ')
+ aa.hh()
+ break
+ elif command == '36':
+ print(' ')
+ aa.hhh()
+ break
+ elif command == '37':
+ print(' ')
+ aa.hhhh()
+ break
+ elif command == '38':
+ print(' ')
+ aa.j()
+ break
+ elif command == '39':
+ print(' ')
+ aa.jj()
+ break
+ elif command == '40':
+ print(' ')
+ aa.jjj()
+ break
+
+
+def swap():
+
+ load_account()
+ daily_login()
+ accept_gifts()
+ accept_missions()
+
+def swapjp():
+ Jp_load_account()
+ daily_login()
+ accept_gifts()
+ accept_missions()
+
+
+def db_downloadjp():
+ #
+ jp_out_of_date = False
+
+
+ # Check local DB versions in help.txt
+ while True:
+ if os.path.isfile('help.txt'):
+ f = open(os.path.join('help.txt'), 'r')
+ local_version_glb = f.readline().rstrip()
+ local_version_jp = f.readline().rstrip()
+ f.close()
+ break
+ else:
+ f = open(os.path.join('help.txt'), 'w')
+ f.write('111\n')
+ f.write('111\n')
+ f.close()
+
+ # Check what is the current client this may end up being unnecessary
+ original_client = config.client
+
+ config.client = 'japan'
+ config.identifier = signup()
+ config.access_token, config.secret = signin(config.identifier)
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Android 4.4; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0',
+ 'Accept': '*/*',
+ 'Authorization': packet.mac('GET', '/client_assets/database'),
+ 'Content-type': 'application/json',
+ 'X-Platform': config.platform,
+ 'X-AssetVersion': '////',
+ 'X-DatabaseVersion': '////',
+ 'X-ClientVersion': '////',
+ 'X-Language': 'en',
+ }
+ if config.client == 'global':
+ url = 'https://ishin-global.aktsk.com/client_assets/database'
+ else:
+ url = 'http://ishin-production.aktsk.jp/client_assets/database'
+
+ r = requests.get(url, allow_redirects=True, headers=headers)
+ if local_version_jp != str(r.json()['version']):
+ jp_out_of_date = True
+ jp_current = r.json()['version']
+
+ print(Fore.RED + Style.BRIGHT + 'JP DB out of date...')
+ print(Fore.RED + Style.BRIGHT + 'Downloading...')
+ url = r.json()['url']
+ r = requests.get(url, allow_redirects=True)
+ open('dataenc_jp.db', 'wb').write(r.content)
+
+ # Revert client to original
+ config.client = original_client
+
+ print(Fore.RED + Style.BRIGHT \
+ + 'Decrypting Latest Databases... This can take a few minutes...')
+
+ if jp_out_of_date:
+ print('Decrypting JP Database')
+ decryptor.main(p = '2db857e837e0a81706e86ea66e2d1633')
+ with open('help.txt', 'r') as file:
+ data = file.readlines()
+ data[1] = str(jp_current) + '\n'
+ with open('help.txt', 'w') as file:
+ file.writelines(data)
+
+ print(Fore.GREEN + Style.BRIGHT + 'Database update complete.')
+discordurl = 'https://discord.gg/GFExDwX'
+
+
+
diff --git a/BotZone2.8v1 Android/config.py b/BotZone2.8v1 Android/config.py
new file mode 100644
index 0000000..29596aa
--- /dev/null
+++ b/BotZone2.8v1 Android/config.py
@@ -0,0 +1,121 @@
+from orator import DatabaseManager, Model
+
+AdId = None
+UniqueId = None
+identifier = None
+access_token = None
+secret = None
+client = 'japan'
+platform = 'android'
+
+deck = 1
+allow_stamina_refill = True
+
+
+### Database Config
+jp_config = {'mysql': {'driver': 'sqlite', 'database': 'jp.db'}}
+glb_config = {'mysql': {'driver': 'sqlite', 'database': 'glb.db'}}
+db_glb = DatabaseManager(glb_config)
+db_jp = DatabaseManager(jp_config)
+Model.set_connection_resolver(db_glb)
+
+class LeaderSkills(Model):
+
+ __table__ = 'leader_skills'
+
+class LinkSkills(Model):
+
+ __table__ = 'link_skills'
+
+class AreaTabs(Model):
+
+ __table__ = 'area_tabs'
+
+class CardSpecials(Model):
+
+ __table__ = 'card_specials'
+
+class Passives(Model):
+
+ __table__ = 'passive_skill_sets'
+
+class Supers(Model):
+
+ __table__ = 'specials'
+
+class ZBattles(Model):
+
+ __table__ = 'z_battle_stage_views'
+
+class CardCategories(Model):
+
+ __table__ = 'card_categories'
+
+class CardCardCategories(Model):
+
+ __table__ = 'card_card_categories'
+
+class TreasureItems(Model):
+
+ __table__ = 'treasure_items'
+
+
+class AwakeningItems(Model):
+
+ __table__ = 'awakening_items'
+
+
+class SupportItems(Model):
+
+ __table__ = 'support_items'
+
+
+class PotentialItems(Model):
+
+ __table__ = 'potential_items'
+
+class SpecialItems(Model):
+
+ __table__ = 'special_items'
+
+
+class TrainingItems(Model):
+
+ __table__ = 'training_items'
+
+
+class Cards(Model):
+
+ __table__ = 'cards'
+
+
+class Quests(Model):
+
+ __table__ = 'quests'
+
+class Ranks(Model):
+
+ __table__ = 'rank_statuses'
+
+
+class TrainingFields(Model):
+
+ __table__ = 'training_fields'
+
+
+class Sugoroku(Model):
+
+ __table__ = 'sugoroku_maps'
+
+
+class Area(Model):
+
+ __table__ = 'areas'
+
+
+class Medal(Model):
+
+ __table__ = 'awakening_items'
+
+
+
diff --git a/BotZone2.8v1 Android/decrypt.py b/BotZone2.8v1 Android/decrypt.py
new file mode 100644
index 0000000..96236d3
--- /dev/null
+++ b/BotZone2.8v1 Android/decrypt.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Module : decrypt.py
+# Author : bssthu
+# Project : pysqlsimplecipher
+# Creation date : 2016-06-03
+# Description :
+#
+
+
+import sys
+from pysqlsimplecipher import decryptor
+
+
+def usage():
+ print('Usage: python decrypt.py encrypted.db password output.db')
+
+
+def main():
+ # arguments
+ argv = sys.argv
+ if len(argv) != 4:
+ usage()
+ return
+ filename_in = argv[1]
+ password = bytearray(argv[2].encode('utf8'))
+ filename_out = argv[3]
+
+ decryptor.decrypt_file(filename_in, password, filename_out)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/BotZone2.8v1 Android/decryptor.py b/BotZone2.8v1 Android/decryptor.py
new file mode 100644
index 0000000..89ac460
--- /dev/null
+++ b/BotZone2.8v1 Android/decryptor.py
@@ -0,0 +1,32 @@
+# -*- coding: utf-8 -*-
+# Module : decrypt.py
+# Author : bssthu
+# Project : pysqlsimplecipher
+# Creation date : 2016-06-03
+# Description :
+#
+
+
+import sys
+from pysqlsimplecipher import decryptor
+
+
+def usage():
+ print('Usage: python decrypt.py encrypted.db password output.db')
+
+
+def main(p = '9bf9c6ed9d537c399a6c4513e92ab24717e1a488381e3338593abd923fc8a13b'):
+
+ password = bytearray(p.encode('utf8'))
+ if p == '9bf9c6ed9d537c399a6c4513e92ab24717e1a488381e3338593abd923fc8a13b':
+ filename_in = 'dataenc_glb.db'
+ filename_out = 'glb.db'
+ else:
+ filename_in = 'dataenc_jp.db'
+ filename_out = 'jp.db'
+
+ decryptor.decrypt_file(filename_in, password, filename_out)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/BotZone2.8v1 Android/encrypt.py b/BotZone2.8v1 Android/encrypt.py
new file mode 100644
index 0000000..a43a7e2
--- /dev/null
+++ b/BotZone2.8v1 Android/encrypt.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Module : encrypt.py
+# Author : bssthu
+# Project : pysqlsimplecipher
+# Creation date : 2016-06-03
+# Description :
+#
+
+
+import sys
+from pysqlsimplecipher import encryptor
+
+
+def usage():
+ print('Usage: python encrypt.py plain.db password output.db')
+
+
+def main():
+ # arguments
+ argv = sys.argv
+ if len(argv) != 4:
+ usage()
+ return
+ filename_in = argv[1]
+ password = bytearray(argv[2].encode('utf8'))
+ filename_out = argv[3]
+
+ encryptor.encrypt_file(filename_in, password, filename_out)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/BotZone2.8v1 Android/extra.py b/BotZone2.8v1 Android/extra.py
new file mode 100644
index 0000000..c9e0cc0
--- /dev/null
+++ b/BotZone2.8v1 Android/extra.py
@@ -0,0 +1,188 @@
+import commands
+from colorama import init, Fore, Back, Style
+# Coloroma autoreset
+init(autoreset=True)
+
+
+def sbr_new():
+
+ print('---------------------------------')
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Universe Survival Sagaa Category Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710021', 5)
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Super Saiyan 3" Category Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710022', 5)
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Giant Form Category Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710023', 5)
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Transformation Boost Category Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710024', 5)
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Ginyu Force Category Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710025', 5)
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Movie Bosses Category Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710026', 5)
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Pure Saiyans Category Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710027', 5)
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Future Saga Category Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710028', 5)
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Full Power Category Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710029', 5)
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Androids Category Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710030', 5)
+
+
+def sbr_next():
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Super Class Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710011', 5)
+
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Extreme Class Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710012', 5)
+
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Fusion Category Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710013', 5)
+
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Shadow Dragons Category Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710014', 5)
+
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Peppy Gals Category Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710015', 5)
+
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Hybrid Saiyans Category Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710016', 5)
+
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Resurrected Warriors Category Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710017', 5)
+
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Realm of Gods Category Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710018', 5)
+
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Majin Buu Saga Category Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710019', 5)
+
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Potara Category Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710020', 5)
+
+ print('---------------------------------')
+
+
+def complete_sbr():
+ print('------------------------------------')
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Super TEQ Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710001', 5)
+
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Extreme TEQ Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710002', 5)
+
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Super INT Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710003', 5)
+
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Extreme INT Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710004', 5)
+
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Super PHY Team')
+
+ commands.complete_stage('710005', 5)
+
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Extreme PHY Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710006', 5)
+
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Super AGL Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710007', 5)
+
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Extreme AGL Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710008', 5)
+
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Super STR Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710009', 5)
+
+ print(Fore.YELLOW + Style.BRIGHT + 'Clear with Extreme STR Team')
+
+ commands.change_team()
+
+ commands.complete_stage('710010', 5)
+
+
+print('---------------------------------')
+
diff --git a/BotZone2.8v1 Android/packet.py b/BotZone2.8v1 Android/packet.py
new file mode 100644
index 0000000..9bf6a76
--- /dev/null
+++ b/BotZone2.8v1 Android/packet.py
@@ -0,0 +1,139 @@
+### packet.py contains functions critical to sending requests to the server
+
+import base64
+import binascii
+import config
+from Crypto.Cipher import AES
+import hashlib
+import hmac
+import json
+import os
+import time
+import uuid
+
+# Padding for the input string --not
+# related to encryption itself.
+
+BLOCK_SIZE = 16 # Bytes
+pad = lambda s: s + (BLOCK_SIZE - len(s) % BLOCK_SIZE) * chr(BLOCK_SIZE
+ - len(s) % BLOCK_SIZE)
+unpad = lambda s: s[:-ord(s[len(s) - 1:])]
+
+####################################################################
+
+def guid():
+
+ # Generates UniqueID & AdIDcompatible with Bandais servers
+ # Returns dict
+
+ UUID = str(uuid.uuid4())
+ UniqueId = str(uuid.uuid4()) + ':' + UUID[0:8]
+ return dict(AdId=str(uuid.uuid4()), UniqueId=UniqueId)
+
+####################################################################
+
+def mac(method,action):
+
+ # Creates Mac Authentication header string used when sending requests
+ # returns string
+
+ ts = str(int(round(time.time(), 0)))
+ nonce = ts + ':' + config.AdId
+ if config.client == 'global':
+ value = ts + '\n' + nonce + '\n' + method + '\n' + action + '\n' \
+ + 'ishin-global.aktsk.com' + '\n' + '3001' + '''
+
+'''
+ else:
+ value = ts + '\n' + nonce + '\n' + method + '\n' + action + '\n' \
+ + 'ishin-production.aktsk.jp' + '\n' + '3001' + '''
+
+'''
+
+
+ hmac_hex_bin = hmac.new(config.secret.encode('utf-8'), value.encode('utf-8'
+ ), hashlib.sha256).digest()
+ mac = base64.b64encode(hmac_hex_bin).decode()
+ final = 'MAC ' + 'id=' + '"' + config.access_token + '"' + ' nonce=' + '"' \
+ + nonce + '"' + ' ts=' + '"' + ts + '"' + ' mac=' + '"' + mac \
+ + '"'
+ return final
+
+####################################################################
+# ================================================================
+# get_key_and_iv
+# ================================================================
+
+def get_key_and_iv(
+ password,
+ salt,
+ klen=32,
+ ilen=16,
+ msgdgst='md5',
+ ):
+ '''
+ Derive the key and the IV from the given password and salt.
+
+ This is a niftier implementation than my direct transliteration of
+ the C++ code although I modified to support different digests.
+
+ CITATION: http://stackoverflow.com/questions/13907841/implement-openssl-aes-encryption-in-python
+
+ @param password The password to use as the seed.
+ @param salt The salt.
+ @param klen The key length.
+ @param ilen The initialization vector length.
+ @param msgdgst The message digest algorithm to use.
+ '''
+
+ # equivalent to:
+ # from hashlib import as mdf
+ # from hashlib import md5 as mdf
+ # from hashlib import sha512 as mdf
+
+ mdf = getattr(__import__('hashlib', fromlist=[msgdgst]), msgdgst)
+ password = password.encode('ascii', 'ignore') # convert to ASCII
+
+ try:
+ maxlen = klen + ilen
+ keyiv = mdf(password + salt).digest()
+ tmp = [keyiv]
+ while len(tmp) < maxlen:
+ tmp.append(mdf(tmp[-1] + password + salt).digest())
+ keyiv += tmp[-1] # append the last byte
+ key = keyiv[:klen]
+ iv = keyiv[klen:klen + ilen]
+ return (key, iv)
+ except UnicodeDecodeError:
+ return (None, None)
+
+
+####################################################################
+def encrypt_sign(data):
+ data = pad(data)
+ key1 = str.encode(data)
+ password = \
+ 'MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzJ9JaHioVi6rr0TAfr6j'
+ salt = os.urandom(8)
+ (key, iv) = get_key_and_iv(password, salt, klen=32, ilen=16,
+ msgdgst='md5')
+ cipher = AES.new(key, AES.MODE_CBC, iv)
+ a = cipher.encrypt(key1)
+ a = salt + a
+ return base64.b64encode(a).decode()
+
+
+####################################################################
+def decrypt_sign(sign):
+ buffer = base64.b64decode(sign)
+ buffer_encoded = base64.b64encode(buffer)
+ password = \
+ 'MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzJ9JaHioVi6rr0TAfr6j'
+ salt = buffer[0:8]
+ (key, iv) = get_key_and_iv(password, salt, klen=32, ilen=16,
+ msgdgst='md5')
+ data = buffer[8:len(buffer)]
+ cipher = AES.new(key, AES.MODE_CBC, iv)
+ a = unpad(cipher.decrypt(data)).decode('utf8')
+ return json.loads(a)
+####################################################################
diff --git a/BotZone2.8v1 Android/pysqlsimplecipher/__init__.py b/BotZone2.8v1 Android/pysqlsimplecipher/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/BotZone2.8v1 Android/pysqlsimplecipher/config.py b/BotZone2.8v1 Android/pysqlsimplecipher/config.py
new file mode 100644
index 0000000..ace3f5e
--- /dev/null
+++ b/BotZone2.8v1 Android/pysqlsimplecipher/config.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Module : config.py
+# Author : bssthu
+# Project : pysqlsimplecipher
+# Creation date : 2016-06-03
+# Description :
+#
+
+
+salt_mask = 0x3a
+key_sz = 32
+key_iter = 64000
+hmac_key_sz = 32
+hmac_key_iter = 2
+page_sz = 1024
+iv_sz = 16
+reserve_sz = 48
+hmac_sz = 20
diff --git a/BotZone2.8v1 Android/pysqlsimplecipher/decryptor.py b/BotZone2.8v1 Android/pysqlsimplecipher/decryptor.py
new file mode 100644
index 0000000..f3216d9
--- /dev/null
+++ b/BotZone2.8v1 Android/pysqlsimplecipher/decryptor.py
@@ -0,0 +1,143 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Module : decryptor.py
+# Author : bssthu
+# Project : pysqlsimplecipher
+# Creation date : 2016-06-03
+# Description :
+#
+
+
+from pysqlsimplecipher import config
+from pysqlsimplecipher import util
+
+
+def decrypt_file(filename_in, password, filename_out):
+ if not isinstance(filename_in, str):
+ raise RuntimeError('filename_in must be a str.')
+ if not isinstance(password, bytearray):
+ raise RuntimeError('password must be a bytearray.')
+ if not isinstance(filename_out, str):
+ raise RuntimeError('filename_out must be a str.')
+
+ # read
+ with open(filename_in, 'rb') as fp:
+ raw = fp.read()
+
+ # decrypt
+ dec = decrypt_default(raw, password)
+
+ # write
+ with open(filename_out, 'wb') as fp:
+ fp.write(dec)
+
+
+def decrypt_default(raw, password):
+ # configs
+ salt_mask = config.salt_mask
+ key_sz = config.key_sz
+ key_iter = config.key_iter
+ hmac_key_sz = config.hmac_key_sz
+ hmac_key_iter = config.hmac_key_iter
+ page_sz = config.page_sz
+ iv_sz = config.iv_sz
+ reserve_sz = config.reserve_sz
+ hmac_sz = config.hmac_sz
+
+ return decrypt(raw, password, salt_mask, key_sz, key_iter, hmac_key_sz, hmac_key_iter, page_sz, iv_sz, reserve_sz, hmac_sz)
+
+
+def decrypt(raw, password, salt_mask, key_sz, key_iter, hmac_key_sz, hmac_key_iter, page_sz, iv_sz, reserve_sz, hmac_sz):
+ dec = b'SQLite format 3\0'
+
+ # derive key
+ salt_sz = 16
+ salt = raw[:salt_sz]
+ key, hmac_key = util.key_derive(salt, password, salt_mask, key_sz, key_iter, hmac_key_sz, hmac_key_iter)
+
+ # decrypt file header, try with default page size
+ page_sz, reserve_sz = decrypt_page_header(raw, key, salt_sz, page_sz, iv_sz, reserve_sz)
+ if page_sz < 0 or reserve_sz < 0:
+ raise RuntimeError('failed to decide page size or reserve size.')
+
+ # decrypt pages
+ for i in range(0, int(len(raw) / 1024)):
+ page = util.get_page(raw, page_sz, i + 1)
+ if i == 0:
+ # skip salt
+ page = page[salt_sz:]
+ page_content = page[:-reserve_sz]
+ reserve = page[-reserve_sz:]
+ iv = reserve[:iv_sz]
+ # check hmac
+ hmac_old = reserve[iv_sz:iv_sz+hmac_sz]
+ hmac_new = util.generate_hmac(hmac_key, page_content + iv, i + 1)
+ if not hmac_old == hmac_new:
+ raise RuntimeError('hmac check failed in page %d.' % (i+1))
+ # decrypt content
+ page_dec = util.decrypt(page_content, key, iv)
+ dec += page_dec + util.random_bytes(reserve_sz)
+
+ return dec
+
+
+def decrypt_page_header(raw, key, salt_sz, page_sz, iv_sz, reserve_sz):
+ """Try to decrypt first page with default config.
+
+ If default page size fail, change page size.
+ When succeed, return page_sz, reserve_sz.
+ If fail, return -1, -1.
+ """
+
+ if not util.is_valid_page_size(page_sz):
+ page_sz = 512
+
+ new_reserve_sz = try_get_reserve_size_for_specified_page_size(raw, key, salt_sz, page_sz, iv_sz, reserve_sz)
+ if new_reserve_sz > 0: # default page_sz is ok
+ return page_sz, new_reserve_sz
+
+ page_sz = 512
+ while page_sz <= 65536:
+ new_reserve_sz = try_get_reserve_size_for_specified_page_size(raw, key, salt_sz, page_sz, iv_sz, reserve_sz)
+ if new_reserve_sz > 0:
+ return page_sz, new_reserve_sz
+ page_sz <<= 1
+
+ return -1, -1 # fail
+
+
+def try_get_reserve_size_for_specified_page_size(raw, key, salt_sz, page_sz, iv_sz, reserve_sz):
+ """Try to decrypt first page with specified page size.
+
+ If default reserve size fail, change reserve size.
+ When succeed, return reserve size.
+ If always fail, return -1.
+ """
+
+ first_page_content = util.get_page(raw, page_sz, 1)[salt_sz:]
+
+ if reserve_sz >= iv_sz:
+ first_page_dec = decrypt_by_reserve_size(first_page_content, key, iv_sz, reserve_sz)
+ # default reserve_sz is ok
+ if util.is_valid_decrypted_header(first_page_dec) \
+ and page_sz == util.get_page_size_from_database_header(raw[:salt_sz] + first_page_dec) \
+ and reserve_sz == util.get_reserved_size_from_database_header(raw[:salt_sz] + first_page_dec):
+ return reserve_sz
+
+ # try every possible reserve size.
+ # the usable size of a page is at least 480.
+ for reserve_sz in range(iv_sz, page_sz - 480):
+ first_page_dec = decrypt_by_reserve_size(first_page_content, key, iv_sz, reserve_sz)
+ if util.is_valid_decrypted_header(first_page_dec) \
+ and page_sz == util.get_page_size_from_database_header(raw[:salt_sz] + first_page_dec) \
+ and reserve_sz == util.get_reserved_size_from_database_header(raw[:salt_sz] + first_page_dec):
+ return reserve_sz
+
+ return -1 # fail
+
+
+def decrypt_by_reserve_size(first_page_without_salt, key, iv_sz, reserve_sz):
+ """Decrypt page content using specified reserve size"""
+ reserve = first_page_without_salt[-reserve_sz:]
+ iv = reserve[:iv_sz]
+ return util.decrypt(first_page_without_salt, key, iv)
diff --git a/BotZone2.8v1 Android/pysqlsimplecipher/encryptor.py b/BotZone2.8v1 Android/pysqlsimplecipher/encryptor.py
new file mode 100644
index 0000000..ef86956
--- /dev/null
+++ b/BotZone2.8v1 Android/pysqlsimplecipher/encryptor.py
@@ -0,0 +1,95 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Module : encryptor.py
+# Author : bssthu
+# Project : pysqlsimplecipher
+# Creation date : 2016-06-03
+# Description :
+#
+
+
+import hashlib
+import hmac
+from pysqlsimplecipher import config
+from pysqlsimplecipher import util
+
+
+def check_database_header(header):
+ if not util.is_valid_database_header(header):
+ raise RuntimeError('invalid database header.')
+
+ page_sz = util.get_page_size_from_database_header(header)
+ if not util.is_valid_page_size(page_sz):
+ raise RuntimeError('invalid page size %d.' % page_sz)
+
+ reserve_sz = util.get_reserved_size_from_database_header(header)
+ if reserve_sz == 0:
+ raise RuntimeError('needs reserved space at the end of each page.')
+
+ return page_sz, reserve_sz
+
+
+def encrypt_file(filename_in, password, filename_out):
+ if not isinstance(filename_in, str):
+ raise RuntimeError('filename_in must be a str.')
+ if not isinstance(password, bytearray):
+ raise RuntimeError('password must be a bytearray.')
+ if not isinstance(filename_out, str):
+ raise RuntimeError('filename_out must be a str.')
+
+ # read
+ with open(filename_in, 'rb') as fp:
+ raw = fp.read()
+
+ # check header
+ page_sz, reserve_sz = check_database_header(raw[:100])
+
+ # encrypt
+ dec = encrypt_default(raw, password, page_sz, reserve_sz)
+
+ # write
+ with open(filename_out, 'wb') as fp:
+ fp.write(dec)
+
+
+def encrypt_default(raw, password, page_sz, reserve_sz):
+ # configs
+ salt_mask = config.salt_mask
+ key_sz = config.key_sz
+ key_iter = config.key_iter
+ hmac_key_sz = config.hmac_key_sz
+ hmac_key_iter = config.hmac_key_iter
+ iv_sz = config.iv_sz
+ hmac_sz = config.hmac_sz
+
+ if reserve_sz < iv_sz + hmac_sz:
+ raise RuntimeError('reserved space at the end of each page is %d, needs %d.' % (reserve_sz, iv_sz + hmac_sz))
+
+ return encrypt(raw, password, salt_mask, key_sz, key_iter, hmac_key_sz, hmac_key_iter, page_sz, iv_sz, reserve_sz, hmac_sz)
+
+
+def encrypt(raw, password, salt_mask, key_sz, key_iter, hmac_key_sz, hmac_key_iter, page_sz, iv_sz, reserve_sz, hmac_sz):
+ salt_sz = 16
+ salt = util.random_bytes(salt_sz)
+ enc = salt
+
+ # derive key
+ key, hmac_key = util.key_derive(salt, password, salt_mask, key_sz, key_iter, hmac_key_sz, hmac_key_iter)
+
+ # encrypt pages
+ for i in range(0, int(len(raw) / 1024)):
+ page = util.get_page(raw, page_sz, i + 1)
+ if i == 0:
+ # skip header string
+ page = page[salt_sz:]
+ page_content = page[:-reserve_sz]
+ iv = util.random_bytes(iv_sz)
+ # encrypt content
+ page_enc = util.encrypt(page_content, key, iv)
+ # generate hmac
+ hmac_new = util.generate_hmac(hmac_key, page_enc + iv, i + 1)
+ enc += page_enc + iv + hmac_new
+ if reserve_sz > iv_sz + hmac_sz:
+ enc += util.random_bytes(reserve_sz - iv_sz - hmac_sz)
+
+ return enc
diff --git a/BotZone2.8v1 Android/pysqlsimplecipher/util.py b/BotZone2.8v1 Android/pysqlsimplecipher/util.py
new file mode 100644
index 0000000..396bc49
--- /dev/null
+++ b/BotZone2.8v1 Android/pysqlsimplecipher/util.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Module : util.py
+# Author : bssthu
+# Project : pysqlsimplecipher
+# Creation date : 2016-06-03
+# Description :
+#
+
+
+import os
+import math
+import hashlib
+import hmac
+import struct
+from Crypto.Cipher import AES
+
+
+def encrypt(raw, key, iv):
+ cipher = AES.new(key, AES.MODE_CBC, iv)
+ return cipher.encrypt(raw)
+
+
+def decrypt(raw, key, iv):
+ cipher = AES.new(key, AES.MODE_CBC, iv)
+ return cipher.decrypt(raw)
+
+
+def is_valid_database_header(header):
+ return header[:16] == b'SQLite format 3\0' and is_valid_decrypted_header(header[16:])
+
+
+def is_valid_decrypted_header(header):
+ # skip first 16 bytes
+ if type(header) is str: # python2
+ header = [ord(x) for x in header]
+ return header[21-16] == 64 and header[22-16] == 32 and header[23-16] == 32
+
+
+def get_page_size_from_database_header(header):
+ if type(header) is str: # python2
+ page_sz = 256 * ord(header[16]) + ord(header[17])
+ else:
+ page_sz = int.from_bytes(header[16:18], 'big')
+ if page_sz == 1: # since SQLite version 3.7.1
+ page_sz = 65536
+ return page_sz
+
+
+def get_reserved_size_from_database_header(header):
+ if type(header) is str: # python2
+ return ord(header[20])
+ else:
+ return int(header[20])
+
+
+def is_valid_page_size(page_sz):
+ # page_sz must be power of 2, and greater than 512
+ return page_sz >= 512 and page_sz == 2 ** int(math.log(page_sz, 2))
+
+
+def get_page(raw, page_sz, page_no):
+ return raw[page_sz*(page_no-1):page_sz*page_no]
+
+
+def random_bytes(n):
+ return os.urandom(n)
+
+
+def key_derive(salt, password, salt_mask, key_sz, key_iter, hmac_key_sz, hmac_key_iter):
+ """Derive an encryption key for page encryption/decryption, an key for hmac generation"""
+ key = hashlib.pbkdf2_hmac('sha1', password, salt, key_iter, key_sz)
+
+ try:
+ hmac_salt = bytearray([x ^ salt_mask for x in salt])
+ hmac_key = hashlib.pbkdf2_hmac('sha1', key, hmac_salt, hmac_key_iter, hmac_key_sz)
+ except TypeError: # python2
+ hmac_salt = b''
+ for x in salt:
+ hmac_salt += chr(ord(x) ^ salt_mask)
+ hmac_key = hashlib.pbkdf2_hmac('sha1', str(key), hmac_salt, hmac_key_iter, hmac_key_sz)
+ return key, hmac_key
+
+
+def generate_hmac(hmac_key, content, page_no):
+ hmac_obj = hmac.new(hmac_key, content, hashlib.sha1)
+ hmac_obj.update(struct.pack('vdbe.new
+ mv vdbe.new tsrc/vdbe.c
+ cp fts5.c fts5.h tsrc
+ touch .target_source
+
+sqlite3.c: .target_source $(TOP)/tool/mksqlite3c.tcl
+ $(TCLSH_CMD) $(TOP)/tool/mksqlite3c.tcl
+ cp tsrc/sqlite3ext.h .
+ cp $(TOP)/ext/session/sqlite3session.h .
+
+sqlite3ext.h: .target_source
+ cp tsrc/sqlite3ext.h .
+
+tclsqlite3.c: sqlite3.c
+ echo '#ifndef USE_SYSTEM_SQLITE' >tclsqlite3.c
+ cat sqlite3.c >>tclsqlite3.c
+ echo '#endif /* USE_SYSTEM_SQLITE */' >>tclsqlite3.c
+ cat $(TOP)/src/tclsqlite.c >>tclsqlite3.c
+
+sqlite3-all.c: sqlite3.c $(TOP)/tool/split-sqlite3c.tcl
+ $(TCLSH_CMD) $(TOP)/tool/split-sqlite3c.tcl
+
+# Rule to build the amalgamation
+#
+sqlite3.lo: sqlite3.c
+ $(LTCOMPILE) $(TEMP_STORE) -c sqlite3.c
+
+# Rules to build the LEMON compiler generator
+#
+lemon$(BEXE): $(TOP)/tool/lemon.c $(TOP)/tool/lempar.c
+ $(BCC) -o $@ $(TOP)/tool/lemon.c
+ cp $(TOP)/tool/lempar.c .
+
+# Rules to build the program that generates the source-id
+#
+mksourceid$(BEXE): $(TOP)/tool/mksourceid.c
+ $(BCC) -o $@ $(TOP)/tool/mksourceid.c
+
+# Rules to build individual *.o files from generated *.c files. This
+# applies to:
+#
+# parse.o
+# opcodes.o
+#
+parse.lo: parse.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c parse.c
+
+opcodes.lo: opcodes.c
+ $(LTCOMPILE) $(TEMP_STORE) -c opcodes.c
+
+# BEGIN CRYPTO
+crypto.lo: $(TOP)/src/crypto.c $(HDR)
+ $(LTCOMPILE) -c $(TOP)/src/crypto.c
+crypto_impl.lo: $(TOP)/src/crypto_impl.c $(HDR)
+ $(LTCOMPILE) -c $(TOP)/src/crypto_impl.c
+crypto_openssl.lo: $(TOP)/src/crypto_openssl.c $(HDR)
+ $(LTCOMPILE) -c $(TOP)/src/crypto_openssl.c
+crypto_libtomcrypt.lo: $(TOP)/src/crypto_libtomcrypt.c $(HDR)
+ $(LTCOMPILE) -c $(TOP)/src/crypto_libtomcrypt.c
+crypto_cc.lo: $(TOP)/src/crypto_cc.c $(HDR)
+ $(LTCOMPILE) -c $(TOP)/src/crypto_cc.c
+# END CRYPTO
+
+# Rules to build individual *.o files from files in the src directory.
+#
+alter.lo: $(TOP)/src/alter.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/alter.c
+
+analyze.lo: $(TOP)/src/analyze.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/analyze.c
+
+attach.lo: $(TOP)/src/attach.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/attach.c
+
+auth.lo: $(TOP)/src/auth.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/auth.c
+
+backup.lo: $(TOP)/src/backup.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/backup.c
+
+bitvec.lo: $(TOP)/src/bitvec.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/bitvec.c
+
+btmutex.lo: $(TOP)/src/btmutex.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/btmutex.c
+
+btree.lo: $(TOP)/src/btree.c $(HDR) $(TOP)/src/pager.h
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/btree.c
+
+build.lo: $(TOP)/src/build.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/build.c
+
+callback.lo: $(TOP)/src/callback.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/callback.c
+
+complete.lo: $(TOP)/src/complete.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/complete.c
+
+ctime.lo: $(TOP)/src/ctime.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/ctime.c
+
+date.lo: $(TOP)/src/date.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/date.c
+
+dbpage.lo: $(TOP)/src/dbpage.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/dbpage.c
+
+dbstat.lo: $(TOP)/src/dbstat.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/dbstat.c
+
+delete.lo: $(TOP)/src/delete.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/delete.c
+
+expr.lo: $(TOP)/src/expr.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/expr.c
+
+fault.lo: $(TOP)/src/fault.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/fault.c
+
+fkey.lo: $(TOP)/src/fkey.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/fkey.c
+
+func.lo: $(TOP)/src/func.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/func.c
+
+global.lo: $(TOP)/src/global.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/global.c
+
+hash.lo: $(TOP)/src/hash.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/hash.c
+
+insert.lo: $(TOP)/src/insert.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/insert.c
+
+legacy.lo: $(TOP)/src/legacy.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/legacy.c
+
+loadext.lo: $(TOP)/src/loadext.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/loadext.c
+
+main.lo: $(TOP)/src/main.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/main.c
+
+malloc.lo: $(TOP)/src/malloc.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/malloc.c
+
+mem0.lo: $(TOP)/src/mem0.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/mem0.c
+
+mem1.lo: $(TOP)/src/mem1.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/mem1.c
+
+mem2.lo: $(TOP)/src/mem2.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/mem2.c
+
+mem3.lo: $(TOP)/src/mem3.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/mem3.c
+
+mem5.lo: $(TOP)/src/mem5.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/mem5.c
+
+memdb.lo: $(TOP)/src/memdb.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/memdb.c
+
+memjournal.lo: $(TOP)/src/memjournal.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/memjournal.c
+
+mutex.lo: $(TOP)/src/mutex.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/mutex.c
+
+mutex_noop.lo: $(TOP)/src/mutex_noop.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/mutex_noop.c
+
+mutex_unix.lo: $(TOP)/src/mutex_unix.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/mutex_unix.c
+
+mutex_w32.lo: $(TOP)/src/mutex_w32.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/mutex_w32.c
+
+notify.lo: $(TOP)/src/notify.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/notify.c
+
+pager.lo: $(TOP)/src/pager.c $(HDR) $(TOP)/src/pager.h
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/pager.c
+
+pcache.lo: $(TOP)/src/pcache.c $(HDR) $(TOP)/src/pcache.h
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/pcache.c
+
+pcache1.lo: $(TOP)/src/pcache1.c $(HDR) $(TOP)/src/pcache.h
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/pcache1.c
+
+os.lo: $(TOP)/src/os.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/os.c
+
+os_unix.lo: $(TOP)/src/os_unix.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/os_unix.c
+
+os_win.lo: $(TOP)/src/os_win.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/os_win.c
+
+pragma.lo: $(TOP)/src/pragma.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/pragma.c
+
+prepare.lo: $(TOP)/src/prepare.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/prepare.c
+
+printf.lo: $(TOP)/src/printf.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/printf.c
+
+random.lo: $(TOP)/src/random.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/random.c
+
+resolve.lo: $(TOP)/src/resolve.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/resolve.c
+
+rowset.lo: $(TOP)/src/rowset.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/rowset.c
+
+select.lo: $(TOP)/src/select.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/select.c
+
+status.lo: $(TOP)/src/status.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/status.c
+
+table.lo: $(TOP)/src/table.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/table.c
+
+threads.lo: $(TOP)/src/threads.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/threads.c
+
+tokenize.lo: $(TOP)/src/tokenize.c keywordhash.h $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/tokenize.c
+
+treeview.lo: $(TOP)/src/treeview.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/treeview.c
+
+trigger.lo: $(TOP)/src/trigger.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/trigger.c
+
+update.lo: $(TOP)/src/update.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/update.c
+
+upsert.lo: $(TOP)/src/upsert.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/upsert.c
+
+utf.lo: $(TOP)/src/utf.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/utf.c
+
+util.lo: $(TOP)/src/util.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/util.c
+
+vacuum.lo: $(TOP)/src/vacuum.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/vacuum.c
+
+vdbe.lo: $(TOP)/src/vdbe.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/vdbe.c
+
+vdbeapi.lo: $(TOP)/src/vdbeapi.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/vdbeapi.c
+
+vdbeaux.lo: $(TOP)/src/vdbeaux.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/vdbeaux.c
+
+vdbeblob.lo: $(TOP)/src/vdbeblob.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/vdbeblob.c
+
+vdbemem.lo: $(TOP)/src/vdbemem.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/vdbemem.c
+
+vdbesort.lo: $(TOP)/src/vdbesort.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/vdbesort.c
+
+vdbetrace.lo: $(TOP)/src/vdbetrace.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/vdbetrace.c
+
+vtab.lo: $(TOP)/src/vtab.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/vtab.c
+
+wal.lo: $(TOP)/src/wal.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/wal.c
+
+walker.lo: $(TOP)/src/walker.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/walker.c
+
+where.lo: $(TOP)/src/where.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/where.c
+
+wherecode.lo: $(TOP)/src/wherecode.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/wherecode.c
+
+whereexpr.lo: $(TOP)/src/whereexpr.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/whereexpr.c
+
+window.lo: $(TOP)/src/window.c $(HDR)
+ $(LTCOMPILE) $(TEMP_STORE) -c $(TOP)/src/window.c
+
+tclsqlite.lo: $(TOP)/src/tclsqlite.c $(HDR)
+ $(LTCOMPILE) -DUSE_TCL_STUBS=1 -c $(TOP)/src/tclsqlite.c
+
+tclsqlite-shell.lo: $(TOP)/src/tclsqlite.c $(HDR)
+ $(LTCOMPILE) -DTCLSH -o $@ -c $(TOP)/src/tclsqlite.c
+
+tclsqlite-stubs.lo: $(TOP)/src/tclsqlite.c $(HDR)
+ $(LTCOMPILE) -DUSE_TCL_STUBS=1 -o $@ -c $(TOP)/src/tclsqlite.c
+
+tclsqlcipher$(TEXE): tclsqlite-shell.lo libsqlcipher.la
+ $(LTLINK) -o $@ tclsqlite-shell.lo \
+ libsqlcipher.la $(LIBTCL)
+
+# Rules to build opcodes.c and opcodes.h
+#
+opcodes.c: opcodes.h $(TOP)/tool/mkopcodec.tcl
+ $(TCLSH_CMD) $(TOP)/tool/mkopcodec.tcl opcodes.h >opcodes.c
+
+opcodes.h: parse.h $(TOP)/src/vdbe.c $(TOP)/tool/mkopcodeh.tcl
+ cat parse.h $(TOP)/src/vdbe.c | $(TCLSH_CMD) $(TOP)/tool/mkopcodeh.tcl >opcodes.h
+
+# Rules to build parse.c and parse.h - the outputs of lemon.
+#
+parse.h: parse.c
+
+parse.c: $(TOP)/src/parse.y lemon$(BEXE)
+ cp $(TOP)/src/parse.y .
+ ./lemon$(BEXE) $(OPT_FEATURE_FLAGS) $(OPTS) parse.y
+
+sqlite3.h: $(TOP)/src/sqlite.h.in $(TOP)/manifest mksourceid$(BEXE) $(TOP)/VERSION
+ $(TCLSH_CMD) $(TOP)/tool/mksqlite3h.tcl $(TOP) >sqlite3.h
+
+keywordhash.h: $(TOP)/tool/mkkeywordhash.c
+ $(BCC) -o mkkeywordhash$(BEXE) $(OPT_FEATURE_FLAGS) $(OPTS) $(TOP)/tool/mkkeywordhash.c
+ ./mkkeywordhash$(BEXE) >keywordhash.h
+
+# Source files that go into making shell.c
+SHELL_SRC = \
+ $(TOP)/src/shell.c.in \
+ $(TOP)/ext/misc/appendvfs.c \
+ $(TOP)/ext/misc/shathree.c \
+ $(TOP)/ext/misc/fileio.c \
+ $(TOP)/ext/misc/completion.c \
+ $(TOP)/ext/misc/sqlar.c \
+ $(TOP)/ext/expert/sqlite3expert.c \
+ $(TOP)/ext/expert/sqlite3expert.h \
+ $(TOP)/ext/misc/zipfile.c \
+ $(TOP)/ext/misc/memtrace.c \
+ $(TOP)/src/test_windirent.c
+
+shell.c: $(SHELL_SRC) $(TOP)/tool/mkshellc.tcl
+ $(TCLSH_CMD) $(TOP)/tool/mkshellc.tcl >shell.c
+
+
+
+
+# Rules to build the extension objects.
+#
+icu.lo: $(TOP)/ext/icu/icu.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/icu/icu.c
+
+fts2.lo: $(TOP)/ext/fts2/fts2.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/fts2/fts2.c
+
+fts2_hash.lo: $(TOP)/ext/fts2/fts2_hash.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/fts2/fts2_hash.c
+
+fts2_icu.lo: $(TOP)/ext/fts2/fts2_icu.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/fts2/fts2_icu.c
+
+fts2_porter.lo: $(TOP)/ext/fts2/fts2_porter.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/fts2/fts2_porter.c
+
+fts2_tokenizer.lo: $(TOP)/ext/fts2/fts2_tokenizer.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/fts2/fts2_tokenizer.c
+
+fts2_tokenizer1.lo: $(TOP)/ext/fts2/fts2_tokenizer1.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/fts2/fts2_tokenizer1.c
+
+fts3.lo: $(TOP)/ext/fts3/fts3.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/fts3/fts3.c
+
+fts3_aux.lo: $(TOP)/ext/fts3/fts3_aux.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/fts3/fts3_aux.c
+
+fts3_expr.lo: $(TOP)/ext/fts3/fts3_expr.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/fts3/fts3_expr.c
+
+fts3_hash.lo: $(TOP)/ext/fts3/fts3_hash.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/fts3/fts3_hash.c
+
+fts3_icu.lo: $(TOP)/ext/fts3/fts3_icu.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/fts3/fts3_icu.c
+
+fts3_porter.lo: $(TOP)/ext/fts3/fts3_porter.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/fts3/fts3_porter.c
+
+fts3_snippet.lo: $(TOP)/ext/fts3/fts3_snippet.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/fts3/fts3_snippet.c
+
+fts3_tokenizer.lo: $(TOP)/ext/fts3/fts3_tokenizer.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/fts3/fts3_tokenizer.c
+
+fts3_tokenizer1.lo: $(TOP)/ext/fts3/fts3_tokenizer1.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/fts3/fts3_tokenizer1.c
+
+fts3_tokenize_vtab.lo: $(TOP)/ext/fts3/fts3_tokenize_vtab.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/fts3/fts3_tokenize_vtab.c
+
+fts3_unicode.lo: $(TOP)/ext/fts3/fts3_unicode.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/fts3/fts3_unicode.c
+
+fts3_unicode2.lo: $(TOP)/ext/fts3/fts3_unicode2.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/fts3/fts3_unicode2.c
+
+fts3_write.lo: $(TOP)/ext/fts3/fts3_write.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/fts3/fts3_write.c
+
+rtree.lo: $(TOP)/ext/rtree/rtree.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/rtree/rtree.c
+
+userauth.lo: $(TOP)/ext/userauth/userauth.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/userauth/userauth.c
+
+sqlite3session.lo: $(TOP)/ext/session/sqlite3session.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/session/sqlite3session.c
+
+json1.lo: $(TOP)/ext/misc/json1.c
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/misc/json1.c
+
+stmt.lo: $(TOP)/ext/misc/stmt.c
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/misc/stmt.c
+
+# FTS5 things
+#
+FTS5_SRC = \
+ $(TOP)/ext/fts5/fts5.h \
+ $(TOP)/ext/fts5/fts5Int.h \
+ $(TOP)/ext/fts5/fts5_aux.c \
+ $(TOP)/ext/fts5/fts5_buffer.c \
+ $(TOP)/ext/fts5/fts5_main.c \
+ $(TOP)/ext/fts5/fts5_config.c \
+ $(TOP)/ext/fts5/fts5_expr.c \
+ $(TOP)/ext/fts5/fts5_hash.c \
+ $(TOP)/ext/fts5/fts5_index.c \
+ fts5parse.c fts5parse.h \
+ $(TOP)/ext/fts5/fts5_storage.c \
+ $(TOP)/ext/fts5/fts5_tokenize.c \
+ $(TOP)/ext/fts5/fts5_unicode2.c \
+ $(TOP)/ext/fts5/fts5_varint.c \
+ $(TOP)/ext/fts5/fts5_vocab.c \
+
+fts5parse.c: $(TOP)/ext/fts5/fts5parse.y lemon
+ cp $(TOP)/ext/fts5/fts5parse.y .
+ rm -f fts5parse.h
+ ./lemon$(BEXE) $(OPTS) fts5parse.y
+
+fts5parse.h: fts5parse.c
+
+fts5.c: $(FTS5_SRC)
+ $(TCLSH_CMD) $(TOP)/ext/fts5/tool/mkfts5c.tcl
+ cp $(TOP)/ext/fts5/fts5.h .
+
+fts5.lo: fts5.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c fts5.c
+
+sqlite3rbu.lo: $(TOP)/ext/rbu/sqlite3rbu.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)/ext/rbu/sqlite3rbu.c
+
+
+# Rules to build the 'testfixture' application.
+#
+# If using the amalgamation, use sqlite3.c directly to build the test
+# fixture. Otherwise link against libsqlcipher.la. (This distinction is
+# necessary because the test fixture requires non-API symbols which are
+# hidden when the library is built via the amalgamation).
+#
+TESTFIXTURE_FLAGS = -DSQLITE_TEST=1 -DSQLITE_CRASH_TEST=1
+TESTFIXTURE_FLAGS += -DTCLSH_INIT_PROC=sqlite3TestInit
+TESTFIXTURE_FLAGS += -DSQLITE_SERVER=1 -DSQLITE_PRIVATE="" -DSQLITE_CORE
+TESTFIXTURE_FLAGS += -DBUILD_sqlite
+TESTFIXTURE_FLAGS += -DSQLITE_SERIES_CONSTRAINT_VERIFY=1
+TESTFIXTURE_FLAGS += -DSQLITE_DEFAULT_PAGE_SIZE=1024
+TESTFIXTURE_FLAGS += -DSQLITE_ENABLE_STMTVTAB
+TESTFIXTURE_FLAGS += -DSQLITE_ENABLE_DBPAGE_VTAB
+TESTFIXTURE_FLAGS += -DSQLITE_ENABLE_DESERIALIZE
+
+TESTFIXTURE_SRC0 = $(TESTSRC2) libsqlcipher.la
+TESTFIXTURE_SRC1 = sqlite3.c
+TESTFIXTURE_SRC = $(TESTSRC) $(TOP)/src/tclsqlite.c
+TESTFIXTURE_SRC += $(TESTFIXTURE_SRC$(USE_AMALGAMATION))
+
+testfixture$(TEXE): $(TESTFIXTURE_SRC)
+ $(LTLINK) -DSQLITE_NO_SYNC=1 $(TEMP_STORE) $(TESTFIXTURE_FLAGS) \
+ -o $@ $(TESTFIXTURE_SRC) $(LIBTCL) $(TLIBS)
+
+coretestprogs: $(TESTPROGS)
+
+testprogs: coretestprogs srcck1$(BEXE) fuzzcheck$(TEXE) sessionfuzz$(TEXE)
+
+# A very detailed test running most or all test cases
+fulltest: $(TESTPROGS) fuzztest
+ ./testfixture$(TEXE) $(TOP)/test/all.test $(TESTOPTS)
+
+# Really really long testing
+soaktest: $(TESTPROGS)
+ ./testfixture$(TEXE) $(TOP)/test/all.test -soak=1 $(TESTOPTS)
+
+# Do extra testing but not everything.
+fulltestonly: $(TESTPROGS) fuzztest
+ ./testfixture$(TEXE) $(TOP)/test/full.test
+
+# Fuzz testing
+fuzztest: fuzzcheck$(TEXE) $(FUZZDATA) sessionfuzz$(TEXE) $(TOP)/test/sessionfuzz-data1.db
+ ./fuzzcheck$(TEXE) $(FUZZDATA)
+ ./sessionfuzz$(TEXE) run $(TOP)/test/sessionfuzz-data1.db
+
+fastfuzztest: fuzzcheck$(TEXE) $(FUZZDATA) sessionfuzz$(TEXE) $(TOP)/test/sessionfuzz-data1.db
+ ./fuzzcheck$(TEXE) --limit-mem 100M $(FUZZDATA)
+ ./sessionfuzz$(TEXE) run $(TOP)/test/sessionfuzz-data1.db
+
+valgrindfuzz: fuzzcheck$(TEXT) $(FUZZDATA) sessionfuzz$(TEXE) $(TOP)/test/sessionfuzz-data1.db
+ valgrind ./fuzzcheck$(TEXE) --cell-size-check --limit-mem 10M --timeout 600 $(FUZZDATA)
+ valgrind ./sessionfuzz$(TEXE) run $(TOP)/test/sessionfuzz-data1.db
+
+# The veryquick.test TCL tests.
+#
+tcltest: ./testfixture$(TEXE)
+ ./testfixture$(TEXE) $(TOP)/test/veryquick.test $(TESTOPTS)
+
+# Minimal testing that runs in less than 3 minutes
+#
+quicktest: ./testfixture$(TEXE)
+ ./testfixture$(TEXE) $(TOP)/test/extraquick.test $(TESTOPTS)
+
+# This is the common case. Run many tests that do not take too long,
+# including fuzzcheck, sqlite3_analyzer, and sqldiff tests.
+#
+test: fastfuzztest sourcetest $(TESTPROGS) tcltest
+
+# Run a test using valgrind. This can take a really long time
+# because valgrind is so much slower than a native machine.
+#
+valgrindtest: $(TESTPROGS) valgrindfuzz
+ OMIT_MISUSE=1 valgrind -v ./testfixture$(TEXE) $(TOP)/test/permutations.test valgrind $(TESTOPTS)
+
+# A very fast test that checks basic sanity. The name comes from
+# the 60s-era electronics testing: "Turn it on and see if smoke
+# comes out."
+#
+smoketest: $(TESTPROGS) fuzzcheck$(TEXE)
+ ./testfixture$(TEXE) $(TOP)/test/main.test $(TESTOPTS)
+
+sqlite3_analyzer.c: sqlite3.c $(TOP)/src/tclsqlite.c $(TOP)/tool/spaceanal.tcl $(TOP)/tool/mkccode.tcl $(TOP)/tool/sqlite3_analyzer.c.in
+ $(TCLSH_CMD) $(TOP)/tool/mkccode.tcl $(TOP)/tool/sqlite3_analyzer.c.in >sqlite3_analyzer.c
+
+sqlite3_analyzer$(TEXE): sqlite3_analyzer.c
+ $(LTLINK) sqlite3_analyzer.c -o $@ $(LIBTCL) $(TLIBS)
+
+sqltclsh.c: sqlite3.c $(TOP)/src/tclsqlite.c $(TOP)/tool/sqltclsh.tcl $(TOP)/ext/misc/appendvfs.c $(TOP)/tool/mkccode.tcl $(TOP)/tool/sqltclsh.c.in
+ $(TCLSH_CMD) $(TOP)/tool/mkccode.tcl $(TOP)/tool/sqltclsh.c.in >sqltclsh.c
+
+sqltclsh$(TEXE): sqltclsh.c
+ $(LTLINK) sqltclsh.c -o $@ $(LIBTCL) $(TLIBS)
+
+sqlite3_expert$(TEXE): $(TOP)/ext/expert/sqlite3expert.h $(TOP)/ext/expert/sqlite3expert.c $(TOP)/ext/expert/expert.c sqlite3.c
+ $(LTLINK) $(TOP)/ext/expert/sqlite3expert.h $(TOP)/ext/expert/sqlite3expert.c $(TOP)/ext/expert/expert.c sqlite3.c -o sqlite3_expert $(TLIBS)
+
+CHECKER_DEPS =\
+ $(TOP)/tool/mkccode.tcl \
+ sqlite3.c \
+ $(TOP)/src/tclsqlite.c \
+ $(TOP)/ext/repair/sqlite3_checker.tcl \
+ $(TOP)/ext/repair/checkindex.c \
+ $(TOP)/ext/repair/checkfreelist.c \
+ $(TOP)/ext/misc/btreeinfo.c \
+ $(TOP)/ext/repair/sqlite3_checker.c.in
+
+sqlite3_checker.c: $(CHECKER_DEPS)
+ $(TCLSH_CMD) $(TOP)/tool/mkccode.tcl $(TOP)/ext/repair/sqlite3_checker.c.in >$@
+
+sqlite3_checker$(TEXE): sqlite3_checker.c
+ $(LTLINK) sqlite3_checker.c -o $@ $(LIBTCL) $(TLIBS)
+
+dbdump$(TEXE): $(TOP)/ext/misc/dbdump.c sqlite3.lo
+ $(LTLINK) -DDBDUMP_STANDALONE -o $@ \
+ $(TOP)/ext/misc/dbdump.c sqlite3.lo $(TLIBS)
+
+dbtotxt$(TEXE): $(TOP)/tool/dbtotxt.c
+ $(LTLINK)-o $@ $(TOP)/tool/dbtotxt.c
+
+showdb$(TEXE): $(TOP)/tool/showdb.c sqlite3.lo
+ $(LTLINK) -o $@ $(TOP)/tool/showdb.c sqlite3.lo $(TLIBS)
+
+showstat4$(TEXE): $(TOP)/tool/showstat4.c sqlite3.lo
+ $(LTLINK) -o $@ $(TOP)/tool/showstat4.c sqlite3.lo $(TLIBS)
+
+showjournal$(TEXE): $(TOP)/tool/showjournal.c sqlite3.lo
+ $(LTLINK) -o $@ $(TOP)/tool/showjournal.c sqlite3.lo $(TLIBS)
+
+showwal$(TEXE): $(TOP)/tool/showwal.c sqlite3.lo
+ $(LTLINK) -o $@ $(TOP)/tool/showwal.c sqlite3.lo $(TLIBS)
+
+showshm$(TEXE): $(TOP)/tool/showshm.c
+ $(LTLINK) -o $@ $(TOP)/tool/showshm.c
+
+index_usage$(TEXE): $(TOP)/tool/index_usage.c sqlite3.lo
+ $(LTLINK) $(SHELL_OPT) -o $@ $(TOP)/tool/index_usage.c sqlite3.lo $(TLIBS)
+
+changeset$(TEXE): $(TOP)/ext/session/changeset.c sqlite3.lo
+ $(LTLINK) -o $@ $(TOP)/ext/session/changeset.c sqlite3.lo $(TLIBS)
+
+changesetfuzz$(TEXE): $(TOP)/ext/session/changesetfuzz.c sqlite3.lo
+ $(LTLINK) -o $@ $(TOP)/ext/session/changesetfuzz.c sqlite3.lo $(TLIBS)
+
+rollback-test$(TEXE): $(TOP)/tool/rollback-test.c sqlite3.lo
+ $(LTLINK) -o $@ $(TOP)/tool/rollback-test.c sqlite3.lo $(TLIBS)
+
+atrc$(TEXX): $(TOP)/test/atrc.c sqlite3.lo
+ $(LTLINK) -o $@ $(TOP)/test/atrc.c sqlite3.lo $(TLIBS)
+
+LogEst$(TEXE): $(TOP)/tool/logest.c sqlite3.h
+ $(LTLINK) -I. -o $@ $(TOP)/tool/logest.c
+
+wordcount$(TEXE): $(TOP)/test/wordcount.c sqlite3.lo
+ $(LTLINK) -o $@ $(TOP)/test/wordcount.c sqlite3.lo $(TLIBS)
+
+speedtest1$(TEXE): $(TOP)/test/speedtest1.c sqlite3.c
+ $(LTLINK) $(ST_OPT) -o $@ $(TOP)/test/speedtest1.c sqlite3.c $(TLIBS)
+
+KV_OPT += -DSQLITE_DIRECT_OVERFLOW_READ
+
+kvtest$(TEXE): $(TOP)/test/kvtest.c sqlite3.c
+ $(LTLINK) $(KV_OPT) -o $@ $(TOP)/test/kvtest.c sqlite3.c $(TLIBS)
+
+rbu$(EXE): $(TOP)/ext/rbu/rbu.c $(TOP)/ext/rbu/sqlite3rbu.c sqlite3.lo
+ $(LTLINK) -I. -o $@ $(TOP)/ext/rbu/rbu.c sqlite3.lo $(TLIBS)
+
+loadfts$(EXE): $(TOP)/tool/loadfts.c libsqlite3.la
+ $(LTLINK) $(TOP)/tool/loadfts.c libsqlite3.la -o $@ $(TLIBS)
+
+# This target will fail if the SQLite amalgamation contains any exported
+# symbols that do not begin with "sqlite3_". It is run as part of the
+# releasetest.tcl script.
+#
+VALIDIDS=' sqlite3(changeset|changegroup|session)?_'
+checksymbols: sqlite3.o
+ nm -g --defined-only sqlite3.o
+ nm -g --defined-only sqlite3.o | egrep -v $(VALIDIDS); test $$? -ne 0
+ echo '0 errors out of 1 tests'
+
+# Build the amalgamation-autoconf package. The amalamgation-tarball target builds
+# a tarball named for the version number. Ex: sqlite-autoconf-3110000.tar.gz.
+# The snapshot-tarball target builds a tarball named by the SHA1 hash
+#
+amalgamation-tarball: sqlite3.c
+ TOP=$(TOP) sh $(TOP)/tool/mkautoconfamal.sh --normal
+
+snapshot-tarball: sqlite3.c
+ TOP=$(TOP) sh $(TOP)/tool/mkautoconfamal.sh --snapshot
+
+# The next two rules are used to support the "threadtest" target. Building
+# threadtest runs a few thread-safety tests that are implemented in C. This
+# target is invoked by the releasetest.tcl script.
+#
+THREADTEST3_SRC = $(TOP)/test/threadtest3.c \
+ $(TOP)/test/tt3_checkpoint.c \
+ $(TOP)/test/tt3_index.c \
+ $(TOP)/test/tt3_vacuum.c \
+ $(TOP)/test/tt3_stress.c \
+ $(TOP)/test/tt3_lookaside1.c
+
+threadtest3$(TEXE): sqlite3.lo $(THREADTEST3_SRC)
+ $(LTLINK) $(TOP)/test/threadtest3.c $(TOP)/src/test_multiplex.c sqlite3.lo -o $@ $(TLIBS)
+
+threadtest: threadtest3$(TEXE)
+ ./threadtest3$(TEXE)
+
+releasetest:
+ $(TCLSH_CMD) $(TOP)/test/releasetest.tcl
+
+# Standard install and cleanup targets
+#
+lib_install: libsqlcipher.la
+ $(INSTALL) -d $(DESTDIR)$(libdir)
+ $(LTINSTALL) libsqlcipher.la $(DESTDIR)$(libdir)
+
+install: sqlcipher$(TEXE) lib_install sqlite3.h sqlcipher.pc ${HAVE_TCL:1=tcl_install}
+ $(INSTALL) -d $(DESTDIR)$(bindir)
+ $(LTINSTALL) sqlcipher$(TEXE) $(DESTDIR)$(bindir)
+ $(INSTALL) -d $(DESTDIR)$(includedir)
+ $(INSTALL) -m 0644 sqlite3.h $(DESTDIR)$(includedir)
+ $(INSTALL) -m 0644 $(TOP)/src/sqlite3ext.h $(DESTDIR)$(includedir)
+ $(INSTALL) -d $(DESTDIR)$(pkgconfigdir)
+ $(INSTALL) -m 0644 sqlcipher.pc $(DESTDIR)$(pkgconfigdir)
+
+pkgIndex.tcl:
+ echo 'package ifneeded sqlite3 $(RELEASE) [list load [file join $$dir libtclsqlite3[info sharedlibextension]] sqlite3]' > $@
+tcl_install: lib_install libtclsqlite3.la pkgIndex.tcl
+ $(INSTALL) -d $(DESTDIR)$(TCLLIBDIR)
+ $(LTINSTALL) libtclsqlite3.la $(DESTDIR)$(TCLLIBDIR)
+ rm -f $(DESTDIR)$(TCLLIBDIR)/libtclsqlite3.la $(DESTDIR)$(TCLLIBDIR)/libtclsqlite3.a
+ $(INSTALL) -m 0644 pkgIndex.tcl $(DESTDIR)$(TCLLIBDIR)
+
+clean:
+ rm -f *.lo *.la *.o sqlcipher$(TEXE) libsqlcipher.la
+ rm -f sqlite3.h opcodes.*
+ rm -rf .libs .deps
+ rm -f lemon$(BEXE) lempar.c parse.* sqlite*.tar.gz
+ rm -f mkkeywordhash$(BEXE) keywordhash.h
+ rm -f *.da *.bb *.bbg gmon.out
+ rm -rf tsrc .target_source
+ rm -f tclsqlcipher$(TEXE)
+ rm -f testfixture$(TEXE) test.db
+ rm -f LogEst$(TEXE) fts3view$(TEXE) rollback-test$(TEXE) showdb$(TEXE)
+ rm -f showjournal$(TEXE) showstat4$(TEXE) showwal$(TEXE) speedtest1$(TEXE)
+ rm -f wordcount$(TEXE) changeset$(TEXE)
+ rm -f sqlite3.dll sqlite3.lib sqlite3.exp sqlite3.def
+ rm -f sqlite3.c
+ rm -f sqlite3rc.h
+ rm -f shell.c sqlite3ext.h
+ rm -f sqlite3_analyzer$(TEXE) sqlite3_analyzer.c
+ rm -f sqlite-*-output.vsix
+ rm -f mptester mptester.exe
+ rm -f rbu rbu.exe
+ rm -f srcck1 srcck1.exe
+ rm -f fuzzershell fuzzershell.exe
+ rm -f fuzzcheck fuzzcheck.exe
+ rm -f sqldiff sqldiff.exe
+ rm -f dbhash dbhash.exe
+ rm -f fts5.* fts5parse.*
+
+distclean: clean
+ rm -f config.h config.log config.status libtool Makefile sqlcipher.pc
+
+#
+# Windows section
+#
+dll: sqlite3.dll
+
+REAL_LIBOBJ = $(LIBOBJ:%.lo=.libs/%.o)
+
+$(REAL_LIBOBJ): $(LIBOBJ)
+
+sqlite3.def: $(REAL_LIBOBJ)
+ echo 'EXPORTS' >sqlite3.def
+ nm $(REAL_LIBOBJ) | grep ' T ' | grep ' _sqlite3_' \
+ | sed 's/^.* _//' >>sqlite3.def
+
+sqlite3.dll: $(REAL_LIBOBJ) sqlite3.def
+ $(TCC) -shared -o $@ sqlite3.def \
+ -Wl,"--strip-all" $(REAL_LIBOBJ)
diff --git a/BotZone2.8v1 Android/sqlcipher/Makefile.linux-gcc b/BotZone2.8v1 Android/sqlcipher/Makefile.linux-gcc
new file mode 100644
index 0000000..1491a4b
--- /dev/null
+++ b/BotZone2.8v1 Android/sqlcipher/Makefile.linux-gcc
@@ -0,0 +1,123 @@
+#!/usr/make
+#
+# Makefile for SQLITE
+#
+# This is a template makefile for SQLite. Most people prefer to
+# use the autoconf generated "configure" script to generate the
+# makefile automatically. But that does not work for everybody
+# and in every situation. If you are having problems with the
+# "configure" script, you might want to try this makefile as an
+# alternative. Create a copy of this file, edit the parameters
+# below and type "make".
+#
+
+#### The toplevel directory of the source tree. This is the directory
+# that contains this "Makefile.in" and the "configure.in" script.
+#
+TOP = ../sqlite
+
+#### C Compiler and options for use in building executables that
+# will run on the platform that is doing the build.
+#
+BCC = gcc -g -O2
+#BCC = /opt/ancic/bin/c89 -0
+
+#### If the target operating system supports the "usleep()" system
+# call, then define the HAVE_USLEEP macro for all C modules.
+#
+#USLEEP =
+USLEEP = -DHAVE_USLEEP=1
+
+#### If you want the SQLite library to be safe for use within a
+# multi-threaded program, then define the following macro
+# appropriately:
+#
+#THREADSAFE = -DTHREADSAFE=1
+THREADSAFE = -DTHREADSAFE=0
+
+#### Specify any extra linker options needed to make the library
+# thread safe
+#
+#THREADLIB = -lpthread
+THREADLIB =
+
+#### Specify any extra libraries needed to access required functions.
+#
+#TLIBS = -lrt # fdatasync on Solaris 8
+TLIBS =
+
+#### Leave SQLITE_DEBUG undefined for maximum speed. Use SQLITE_DEBUG=1
+# to check for memory leaks. Use SQLITE_DEBUG=2 to print a log of all
+# malloc()s and free()s in order to track down memory leaks.
+#
+# SQLite uses some expensive assert() statements in the inner loop.
+# You can make the library go almost twice as fast if you compile
+# with -DNDEBUG=1
+#
+#OPTS = -DSQLITE_DEBUG=2
+#OPTS = -DSQLITE_DEBUG=1
+#OPTS =
+OPTS = -DNDEBUG=1
+OPTS += -DHAVE_FDATASYNC=1
+
+#### The suffix to add to executable files. ".exe" for windows.
+# Nothing for unix.
+#
+#EXE = .exe
+EXE =
+
+#### C Compile and options for use in building executables that
+# will run on the target platform. This is usually the same
+# as BCC, unless you are cross-compiling.
+#
+TCC = gcc -O6
+#TCC = gcc -g -O0 -Wall
+#TCC = gcc -g -O0 -Wall -fprofile-arcs -ftest-coverage
+#TCC = /opt/mingw/bin/i386-mingw32-gcc -O6
+#TCC = /opt/ansic/bin/c89 -O +z -Wl,-a,archive
+
+#### Tools used to build a static library.
+#
+AR = ar cr
+#AR = /opt/mingw/bin/i386-mingw32-ar cr
+RANLIB = ranlib
+#RANLIB = /opt/mingw/bin/i386-mingw32-ranlib
+
+MKSHLIB = gcc -shared
+SO = so
+SHPREFIX = lib
+# SO = dll
+# SHPREFIX =
+
+#### Extra compiler options needed for programs that use the TCL library.
+#
+#TCL_FLAGS =
+#TCL_FLAGS = -DSTATIC_BUILD=1
+TCL_FLAGS = -I/home/drh/tcltk/8.5linux
+#TCL_FLAGS = -I/home/drh/tcltk/8.5win -DSTATIC_BUILD=1
+#TCL_FLAGS = -I/home/drh/tcltk/8.3hpux
+
+#### Linker options needed to link against the TCL library.
+#
+#LIBTCL = -ltcl -lm -ldl
+LIBTCL = /home/drh/tcltk/8.5linux/libtcl8.5g.a -lm -ldl
+#LIBTCL = /home/drh/tcltk/8.5win/libtcl85s.a -lmsvcrt
+#LIBTCL = /home/drh/tcltk/8.3hpux/libtcl8.3.a -ldld -lm -lc
+
+#### Additional objects for SQLite library when TCL support is enabled.
+#TCLOBJ =
+TCLOBJ = tclsqlite.o
+
+#### Compiler options needed for programs that use the readline() library.
+#
+READLINE_FLAGS =
+#READLINE_FLAGS = -DHAVE_READLINE=1 -I/usr/include/readline
+
+#### Linker options needed by programs using readline() must link against.
+#
+LIBREADLINE =
+#LIBREADLINE = -static -lreadline -ltermcap
+
+# You should not have to change anything below this line
+###############################################################################
+include $(TOP)/main.mk
diff --git a/BotZone2.8v1 Android/sqlcipher/Makefile.msc b/BotZone2.8v1 Android/sqlcipher/Makefile.msc
new file mode 100644
index 0000000..2180936
--- /dev/null
+++ b/BotZone2.8v1 Android/sqlcipher/Makefile.msc
@@ -0,0 +1,2588 @@
+#
+# nmake Makefile for SQLite
+#
+###############################################################################
+############################## START OF OPTIONS ###############################
+###############################################################################
+
+# The toplevel directory of the source tree. This is the directory
+# that contains this "Makefile.msc".
+#
+TOP = .
+
+# <>
+# Set this non-0 to create and use the SQLite amalgamation file.
+#
+!IFNDEF USE_AMALGAMATION
+USE_AMALGAMATION = 1
+!ENDIF
+# <>
+
+# Set this non-0 to enable full warnings (-W4, etc) when compiling.
+#
+!IFNDEF USE_FULLWARN
+USE_FULLWARN = 1
+!ENDIF
+
+# Set this non-0 to enable treating warnings as errors (-WX, etc) when
+# compiling.
+#
+!IFNDEF USE_FATAL_WARN
+USE_FATAL_WARN = 0
+!ENDIF
+
+# Set this non-0 to enable full runtime error checks (-RTC1, etc). This
+# has no effect if (any) optimizations are enabled.
+#
+!IFNDEF USE_RUNTIME_CHECKS
+USE_RUNTIME_CHECKS = 0
+!ENDIF
+
+# Set this non-0 to create a SQLite amalgamation file that excludes the
+# various built-in extensions.
+#
+!IFNDEF MINIMAL_AMALGAMATION
+MINIMAL_AMALGAMATION = 0
+!ENDIF
+
+# Set this non-0 to use "stdcall" calling convention for the core library
+# and shell executable.
+#
+!IFNDEF USE_STDCALL
+USE_STDCALL = 0
+!ENDIF
+
+# Set this non-0 to have the shell executable link against the core dynamic
+# link library.
+#
+!IFNDEF DYNAMIC_SHELL
+DYNAMIC_SHELL = 0
+!ENDIF
+
+# Set this non-0 to enable extra code that attempts to detect misuse of the
+# SQLite API.
+#
+!IFNDEF API_ARMOR
+API_ARMOR = 0
+!ENDIF
+
+# If necessary, create a list of harmless compiler warnings to disable when
+# compiling the various tools. For the SQLite source code itself, warnings,
+# if any, will be disabled from within it.
+#
+!IFNDEF NO_WARN
+!IF $(USE_FULLWARN)!=0
+NO_WARN = -wd4054 -wd4055 -wd4100 -wd4127 -wd4130 -wd4152 -wd4189 -wd4206
+NO_WARN = $(NO_WARN) -wd4210 -wd4232 -wd4305 -wd4306 -wd4702 -wd4706
+!ENDIF
+!ENDIF
+
+# Set this non-0 to use the library paths and other options necessary for
+# Windows Phone 8.1.
+#
+!IFNDEF USE_WP81_OPTS
+USE_WP81_OPTS = 0
+!ENDIF
+
+# Set this non-0 to split the SQLite amalgamation file into chunks to
+# be used for debugging with Visual Studio.
+#
+!IFNDEF SPLIT_AMALGAMATION
+SPLIT_AMALGAMATION = 0
+!ENDIF
+
+# <>
+# Set this non-0 to have this makefile assume the Tcl shell executable
+# (tclsh*.exe) is available in the PATH. By default, this is disabled
+# for compatibility with older build environments. This setting only
+# applies if TCLSH_CMD is not set manually.
+#
+!IFNDEF USE_TCLSH_IN_PATH
+USE_TCLSH_IN_PATH = 0
+!ENDIF
+
+# Set this non-0 to use zlib, possibly compiling it from source code.
+#
+!IFNDEF USE_ZLIB
+USE_ZLIB = 0
+!ENDIF
+
+# Set this non-0 to build zlib from source code. This is enabled by
+# default and in that case it will be assumed that the ZLIBDIR macro
+# points to the top-level source code directory for zlib.
+#
+!IFNDEF BUILD_ZLIB
+BUILD_ZLIB = 1
+!ENDIF
+
+# Set this non-0 to use the International Components for Unicode (ICU).
+#
+!IFNDEF USE_ICU
+USE_ICU = 0
+!ENDIF
+# <>
+
+# Set this non-0 to dynamically link to the MSVC runtime library.
+#
+!IFNDEF USE_CRT_DLL
+USE_CRT_DLL = 0
+!ENDIF
+
+# Set this non-0 to link to the RPCRT4 library.
+#
+!IFNDEF USE_RPCRT4_LIB
+USE_RPCRT4_LIB = 0
+!ENDIF
+
+# Set this non-0 to generate assembly code listings for the source code
+# files.
+#
+!IFNDEF USE_LISTINGS
+USE_LISTINGS = 0
+!ENDIF
+
+# Set this non-0 to attempt setting the native compiler automatically
+# for cross-compiling the command line tools needed during the compilation
+# process.
+#
+!IFNDEF XCOMPILE
+XCOMPILE = 0
+!ENDIF
+
+# Set this non-0 to use the native libraries paths for cross-compiling
+# the command line tools needed during the compilation process.
+#
+!IFNDEF USE_NATIVE_LIBPATHS
+USE_NATIVE_LIBPATHS = 0
+!ENDIF
+
+# Set this 0 to skip the compiling and embedding of version resources.
+#
+!IFNDEF USE_RC
+USE_RC = 1
+!ENDIF
+
+# Set this non-0 to compile binaries suitable for the WinRT environment.
+# This setting does not apply to any binaries that require Tcl to operate
+# properly (i.e. the text fixture, etc).
+#
+!IFNDEF FOR_WINRT
+FOR_WINRT = 0
+!ENDIF
+
+# Set this non-0 to compile binaries suitable for the UWP environment.
+# This setting does not apply to any binaries that require Tcl to operate
+# properly (i.e. the text fixture, etc).
+#
+!IFNDEF FOR_UWP
+FOR_UWP = 0
+!ENDIF
+
+# Set this non-0 to compile binaries suitable for the Windows 10 platform.
+#
+!IFNDEF FOR_WIN10
+FOR_WIN10 = 0
+!ENDIF
+
+# <>
+# Set this non-0 to skip attempting to look for and/or link with the Tcl
+# runtime library.
+#
+!IFNDEF NO_TCL
+NO_TCL = 0
+!ENDIF
+# <>
+
+# Set this to non-0 to create and use PDBs.
+#
+!IFNDEF SYMBOLS
+SYMBOLS = 1
+!ENDIF
+
+# Set this to non-0 to use the SQLite debugging heap subsystem.
+#
+!IFNDEF MEMDEBUG
+MEMDEBUG = 0
+!ENDIF
+
+# Set this to non-0 to use the Win32 native heap subsystem.
+#
+!IFNDEF WIN32HEAP
+WIN32HEAP = 0
+!ENDIF
+
+# Set this to non-0 to enable OSTRACE() macros, which can be useful when
+# debugging.
+#
+!IFNDEF OSTRACE
+OSTRACE = 0
+!ENDIF
+
+# Set this to one of the following values to enable various debugging
+# features. Each level includes the debugging options from the previous
+# levels. Currently, the recognized values for DEBUG are:
+#
+# 0 == NDEBUG: Disables assert() and other runtime diagnostics.
+# 1 == SQLITE_ENABLE_API_ARMOR: extra attempts to detect misuse of the API.
+# 2 == Disables NDEBUG and all optimizations and then enables PDBs.
+# 3 == SQLITE_DEBUG: Enables various diagnostics messages and code.
+# 4 == SQLITE_WIN32_MALLOC_VALIDATE: Validate the Win32 native heap per call.
+# 5 == SQLITE_DEBUG_OS_TRACE: Enables output from the OSTRACE() macros.
+# 6 == SQLITE_ENABLE_IOTRACE: Enables output from the IOTRACE() macros.
+#
+!IFNDEF DEBUG
+DEBUG = 0
+!ENDIF
+
+# Enable use of available compiler optimizations? Normally, this should be
+# non-zero. Setting this to zero, thus disabling all compiler optimizations,
+# can be useful for testing.
+#
+!IFNDEF OPTIMIZATIONS
+OPTIMIZATIONS = 2
+!ENDIF
+
+# Set this to non-0 to enable support for the session extension.
+#
+!IFNDEF SESSION
+SESSION = 0
+!ENDIF
+
+# Set the source code file to be used by executables and libraries when
+# they need the amalgamation.
+#
+!IFNDEF SQLITE3C
+!IF $(SPLIT_AMALGAMATION)!=0
+SQLITE3C = sqlite3-all.c
+!ELSE
+SQLITE3C = sqlite3.c
+!ENDIF
+!ENDIF
+
+# Set the include code file to be used by executables and libraries when
+# they need SQLite.
+#
+!IFNDEF SQLITE3H
+SQLITE3H = sqlite3.h
+!ENDIF
+
+# This is the name to use for the SQLite dynamic link library (DLL).
+#
+!IFNDEF SQLITE3DLL
+!IF $(FOR_WIN10)!=0
+SQLITE3DLL = winsqlite3.dll
+!ELSE
+SQLITE3DLL = sqlite3.dll
+!ENDIF
+!ENDIF
+
+# This is the name to use for the SQLite import library (LIB).
+#
+!IFNDEF SQLITE3LIB
+!IF $(FOR_WIN10)!=0
+SQLITE3LIB = winsqlite3.lib
+!ELSE
+SQLITE3LIB = sqlite3.lib
+!ENDIF
+!ENDIF
+
+# This is the name to use for the SQLite shell executable (EXE).
+#
+!IFNDEF SQLITE3EXE
+!IF $(FOR_WIN10)!=0
+SQLITE3EXE = winsqlite3shell.exe
+!ELSE
+SQLITE3EXE = sqlite3.exe
+!ENDIF
+!ENDIF
+
+# This is the argument used to set the program database (PDB) file for the
+# SQLite shell executable (EXE).
+#
+!IFNDEF SQLITE3EXEPDB
+!IF $(FOR_WIN10)!=0
+SQLITE3EXEPDB =
+!ELSE
+SQLITE3EXEPDB = /pdb:sqlite3sh.pdb
+!ENDIF
+!ENDIF
+
+# <>
+# These are the names of the customized Tcl header files used by various parts
+# of this makefile when the stdcall calling convention is in use. It is not
+# used for any other purpose.
+#
+!IFNDEF SQLITETCLH
+SQLITETCLH = sqlite_tcl.h
+!ENDIF
+
+!IFNDEF SQLITETCLDECLSH
+SQLITETCLDECLSH = sqlite_tclDecls.h
+!ENDIF
+
+# This is the name to use for the dynamic link library (DLL) containing the
+# Tcl bindings for SQLite.
+#
+!IFNDEF SQLITE3TCLDLL
+SQLITE3TCLDLL = tclsqlite3.dll
+!ENDIF
+
+# These are the additional targets that the targets that integrate with the
+# Tcl library should depend on when compiling, etc.
+#
+!IFNDEF SQLITE_TCL_DEP
+!IF $(USE_STDCALL)!=0 || $(FOR_WIN10)!=0
+SQLITE_TCL_DEP = $(SQLITETCLDECLSH) $(SQLITETCLH)
+!ELSE
+SQLITE_TCL_DEP =
+!ENDIF
+!ENDIF
+# <>
+
+# These are the "standard" SQLite compilation options used when compiling for
+# the Windows platform.
+#
+!IFNDEF OPT_FEATURE_FLAGS
+!IF $(MINIMAL_AMALGAMATION)==0
+OPT_FEATURE_FLAGS = $(OPT_FEATURE_FLAGS) -DSQLITE_ENABLE_FTS3=1
+OPT_FEATURE_FLAGS = $(OPT_FEATURE_FLAGS) -DSQLITE_ENABLE_RTREE=1
+OPT_FEATURE_FLAGS = $(OPT_FEATURE_FLAGS) -DSQLITE_ENABLE_GEOPOLY=1
+OPT_FEATURE_FLAGS = $(OPT_FEATURE_FLAGS) -DSQLITE_ENABLE_JSON1=1
+OPT_FEATURE_FLAGS = $(OPT_FEATURE_FLAGS) -DSQLITE_ENABLE_STMTVTAB=1
+OPT_FEATURE_FLAGS = $(OPT_FEATURE_FLAGS) -DSQLITE_ENABLE_DBPAGE_VTAB=1
+OPT_FEATURE_FLAGS = $(OPT_FEATURE_FLAGS) -DSQLITE_ENABLE_DBSTAT_VTAB=1
+OPT_FEATURE_FLAGS = $(OPT_FEATURE_FLAGS) -DSQLITE_INTROSPECTION_PRAGMAS=1
+OPT_FEATURE_FLAGS = $(OPT_FEATURE_FLAGS) -DSQLITE_ENABLE_DESERIALIZE=1
+!ENDIF
+OPT_FEATURE_FLAGS = $(OPT_FEATURE_FLAGS) -DSQLITE_ENABLE_COLUMN_METADATA=1
+!ENDIF
+
+# Should the session extension be enabled? If so, add compilation options
+# to enable it.
+#
+!IF $(SESSION)!=0
+OPT_FEATURE_FLAGS = $(OPT_FEATURE_FLAGS) -DSQLITE_ENABLE_SESSION=1
+OPT_FEATURE_FLAGS = $(OPT_FEATURE_FLAGS) -DSQLITE_ENABLE_PREUPDATE_HOOK=1
+!ENDIF
+
+# These are the "extended" SQLite compilation options used when compiling for
+# the Windows 10 platform.
+#
+!IFNDEF EXT_FEATURE_FLAGS
+!IF $(FOR_WIN10)!=0
+EXT_FEATURE_FLAGS = $(EXT_FEATURE_FLAGS) -DSQLITE_ENABLE_FTS4=1
+EXT_FEATURE_FLAGS = $(EXT_FEATURE_FLAGS) -DSQLITE_SYSTEM_MALLOC=1
+EXT_FEATURE_FLAGS = $(EXT_FEATURE_FLAGS) -DSQLITE_OMIT_LOCALTIME=1
+!ELSE
+EXT_FEATURE_FLAGS =
+!ENDIF
+!ENDIF
+
+###############################################################################
+############################### END OF OPTIONS ################################
+###############################################################################
+
+# When compiling for the Windows 10 platform, the PLATFORM macro must be set
+# to an appropriate value (e.g. x86, x64, arm, arm64, etc).
+#
+!IF $(FOR_WIN10)!=0
+!IFNDEF PLATFORM
+!ERROR Using the FOR_WIN10 option requires a value for PLATFORM.
+!ENDIF
+!ENDIF
+
+# This assumes that MSVC is always installed in 32-bit Program Files directory
+# and sets the variable for use in locating other 32-bit installs accordingly.
+#
+PROGRAMFILES_X86 = $(VCINSTALLDIR)\..\..
+PROGRAMFILES_X86 = $(PROGRAMFILES_X86:\\=\)
+
+# Check for the predefined command macro CC. This should point to the compiler
+# binary for the target platform. If it is not defined, simply define it to
+# the legacy default value 'cl.exe'.
+#
+!IFNDEF CC
+CC = cl.exe
+!ENDIF
+
+# Check for the predefined command macro CSC. This should point to a working
+# C Sharp compiler binary. If it is not defined, simply define it to the
+# legacy default value 'csc.exe'.
+#
+!IFNDEF CSC
+CSC = csc.exe
+!ENDIF
+
+# Check for the command macro LD. This should point to the linker binary for
+# the target platform. If it is not defined, simply define it to the legacy
+# default value 'link.exe'.
+#
+!IFNDEF LD
+LD = link.exe
+!ENDIF
+
+# Check for the predefined command macro RC. This should point to the resource
+# compiler binary for the target platform. If it is not defined, simply define
+# it to the legacy default value 'rc.exe'.
+#
+!IFNDEF RC
+RC = rc.exe
+!ENDIF
+
+# Check for the MSVC runtime library path macro. Otherwise, this value will
+# default to the 'lib' directory underneath the MSVC installation directory.
+#
+!IFNDEF CRTLIBPATH
+CRTLIBPATH = $(VCINSTALLDIR)\lib
+!ENDIF
+
+CRTLIBPATH = $(CRTLIBPATH:\\=\)
+
+# Check for the command macro NCC. This should point to the compiler binary
+# for the platform the compilation process is taking place on. If it is not
+# defined, simply define it to have the same value as the CC macro. When
+# cross-compiling, it is suggested that this macro be modified via the command
+# line (since nmake itself does not provide a built-in method to guess it).
+# For example, to use the x86 compiler when cross-compiling for x64, a command
+# line similar to the following could be used (all on one line):
+#
+# nmake /f Makefile.msc sqlite3.dll
+# XCOMPILE=1 USE_NATIVE_LIBPATHS=1
+#
+# Alternatively, the full path and file name to the compiler binary for the
+# platform the compilation process is taking place may be specified (all on
+# one line):
+#
+# nmake /f Makefile.msc sqlite3.dll
+# "NCC=""%VCINSTALLDIR%\bin\cl.exe"""
+# USE_NATIVE_LIBPATHS=1
+#
+!IFDEF NCC
+NCC = $(NCC:\\=\)
+!ELSEIF $(XCOMPILE)!=0
+NCC = "$(VCINSTALLDIR)\bin\$(CC)"
+NCC = $(NCC:\\=\)
+!ELSE
+NCC = $(CC)
+!ENDIF
+
+# Check for the MSVC native runtime library path macro. Otherwise,
+# this value will default to the 'lib' directory underneath the MSVC
+# installation directory.
+#
+!IFNDEF NCRTLIBPATH
+NCRTLIBPATH = $(VCINSTALLDIR)\lib
+!ENDIF
+
+NCRTLIBPATH = $(NCRTLIBPATH:\\=\)
+
+# Check for the Platform SDK library path macro. Otherwise, this
+# value will default to the 'lib' directory underneath the Windows
+# SDK installation directory (the environment variable used appears
+# to be available when using Visual C++ 2008 or later via the
+# command line).
+#
+!IFNDEF NSDKLIBPATH
+NSDKLIBPATH = $(WINDOWSSDKDIR)\lib
+!ENDIF
+
+NSDKLIBPATH = $(NSDKLIBPATH:\\=\)
+
+# Check for the UCRT library path macro. Otherwise, this value will
+# default to the version-specific, platform-specific 'lib' directory
+# underneath the Windows SDK installation directory.
+#
+!IFNDEF UCRTLIBPATH
+UCRTLIBPATH = $(WINDOWSSDKDIR)\lib\$(WINDOWSSDKLIBVERSION)\ucrt\$(PLATFORM)
+!ENDIF
+
+UCRTLIBPATH = $(UCRTLIBPATH:\\=\)
+
+# C compiler and options for use in building executables that
+# will run on the platform that is doing the build.
+#
+!IF $(USE_FULLWARN)!=0
+BCC = $(NCC) -nologo -W4 -Fd$*.pdb $(CCOPTS) $(BCCOPTS)
+!ELSE
+BCC = $(NCC) -nologo -W3 -Fd$*.pdb $(CCOPTS) $(BCCOPTS)
+!ENDIF
+
+# Check if assembly code listings should be generated for the source
+# code files to be compiled.
+#
+!IF $(USE_LISTINGS)!=0
+BCC = $(BCC) -FAcs
+!ENDIF
+
+# Check if the native library paths should be used when compiling
+# the command line tools used during the compilation process. If
+# so, set the necessary macro now.
+#
+!IF $(USE_NATIVE_LIBPATHS)!=0
+NLTLIBPATHS = "/LIBPATH:$(NCRTLIBPATH)" "/LIBPATH:$(NSDKLIBPATH)"
+
+!IFDEF NUCRTLIBPATH
+NUCRTLIBPATH = $(NUCRTLIBPATH:\\=\)
+NLTLIBPATHS = $(NLTLIBPATHS) "/LIBPATH:$(NUCRTLIBPATH)"
+!ENDIF
+!ENDIF
+
+# C compiler and options for use in building executables that
+# will run on the target platform. (BCC and TCC are usually the
+# same unless your are cross-compiling.)
+#
+!IF $(USE_FULLWARN)!=0
+TCC = $(CC) -nologo -W4 -DINCLUDE_MSVC_H=1 $(CCOPTS) $(TCCOPTS)
+!ELSE
+TCC = $(CC) -nologo -W3 $(CCOPTS) $(TCCOPTS)
+!ENDIF
+
+# Check if warnings should be treated as errors when compiling.
+#
+!IF $(USE_FATAL_WARN)!=0
+TCC = $(TCC) -WX
+!ENDIF
+
+TCC = $(TCC) -DSQLITE_OS_WIN=1 -I. -I$(TOP) -I$(TOP)\src -fp:precise
+RCC = $(RC) -DSQLITE_OS_WIN=1 -I. -I$(TOP) -I$(TOP)\src $(RCOPTS) $(RCCOPTS)
+
+# Check if we want to use the "stdcall" calling convention when compiling.
+# This is not supported by the compilers for non-x86 platforms. It should
+# also be noted here that building any target with these "stdcall" options
+# will most likely fail if the Tcl library is also required. This is due
+# to how the Tcl library functions are declared and exported (i.e. without
+# an explicit calling convention, which results in "cdecl").
+#
+!IF $(USE_STDCALL)!=0 || $(FOR_WIN10)!=0
+!IF "$(PLATFORM)"=="x86"
+CORE_CCONV_OPTS = -Gz -DSQLITE_CDECL=__cdecl -DSQLITE_APICALL=__stdcall -DSQLITE_CALLBACK=__stdcall -DSQLITE_SYSAPI=__stdcall
+SHELL_CCONV_OPTS = -Gz -DSQLITE_CDECL=__cdecl -DSQLITE_APICALL=__stdcall -DSQLITE_CALLBACK=__stdcall -DSQLITE_SYSAPI=__stdcall
+# <>
+TEST_CCONV_OPTS = -Gz -DSQLITE_CDECL=__cdecl -DSQLITE_APICALL=__stdcall -DSQLITE_CALLBACK=__stdcall -DSQLITE_SYSAPI=__stdcall -DINCLUDE_SQLITE_TCL_H=1 -DSQLITE_TCLAPI=__cdecl
+# <>
+!ELSE
+!IFNDEF PLATFORM
+CORE_CCONV_OPTS = -Gz -DSQLITE_CDECL=__cdecl -DSQLITE_APICALL=__stdcall -DSQLITE_CALLBACK=__stdcall -DSQLITE_SYSAPI=__stdcall
+SHELL_CCONV_OPTS = -Gz -DSQLITE_CDECL=__cdecl -DSQLITE_APICALL=__stdcall -DSQLITE_CALLBACK=__stdcall -DSQLITE_SYSAPI=__stdcall
+# <>
+TEST_CCONV_OPTS = -Gz -DSQLITE_CDECL=__cdecl -DSQLITE_APICALL=__stdcall -DSQLITE_CALLBACK=__stdcall -DSQLITE_SYSAPI=__stdcall -DINCLUDE_SQLITE_TCL_H=1 -DSQLITE_TCLAPI=__cdecl
+# <>
+!ELSE
+CORE_CCONV_OPTS =
+SHELL_CCONV_OPTS =
+# <>
+TEST_CCONV_OPTS =
+# <>
+!ENDIF
+!ENDIF
+!ELSE
+CORE_CCONV_OPTS =
+SHELL_CCONV_OPTS =
+# <>
+TEST_CCONV_OPTS =
+# <>
+!ENDIF
+
+# These are additional compiler options used for the core library.
+#
+!IFNDEF CORE_COMPILE_OPTS
+!IF $(DYNAMIC_SHELL)!=0 || $(FOR_WIN10)!=0
+CORE_COMPILE_OPTS = $(CORE_CCONV_OPTS) -DSQLITE_API=__declspec(dllexport)
+!ELSE
+CORE_COMPILE_OPTS = $(CORE_CCONV_OPTS)
+!ENDIF
+!ENDIF
+
+# These are the additional targets that the core library should depend on
+# when linking.
+#
+!IFNDEF CORE_LINK_DEP
+!IF $(DYNAMIC_SHELL)!=0
+CORE_LINK_DEP =
+!ELSEIF $(FOR_WIN10)==0 || "$(PLATFORM)"=="x86"
+CORE_LINK_DEP = sqlite3.def
+!ELSE
+CORE_LINK_DEP =
+!ENDIF
+!ENDIF
+
+# These are additional linker options used for the core library.
+#
+!IFNDEF CORE_LINK_OPTS
+!IF $(DYNAMIC_SHELL)!=0
+CORE_LINK_OPTS =
+!ELSEIF $(FOR_WIN10)==0 || "$(PLATFORM)"=="x86"
+CORE_LINK_OPTS = /DEF:sqlite3.def
+!ELSE
+CORE_LINK_OPTS =
+!ENDIF
+!ENDIF
+
+# These are additional compiler options used for the shell executable.
+#
+!IFNDEF SHELL_COMPILE_OPTS
+!IF $(DYNAMIC_SHELL)!=0 || $(FOR_WIN10)!=0
+SHELL_COMPILE_OPTS = $(SHELL_CCONV_OPTS) -DSQLITE_API=__declspec(dllimport)
+!ELSE
+SHELL_COMPILE_OPTS = $(SHELL_CCONV_OPTS)
+!ENDIF
+!ENDIF
+
+# This is the source code that the shell executable should be compiled
+# with.
+#
+!IFNDEF SHELL_CORE_SRC
+!IF $(DYNAMIC_SHELL)!=0 || $(FOR_WIN10)!=0
+SHELL_CORE_SRC =
+# <>
+!ELSEIF $(USE_AMALGAMATION)==0
+SHELL_CORE_SRC =
+# <>
+!ELSE
+SHELL_CORE_SRC = $(SQLITE3C)
+!ENDIF
+!ENDIF
+
+# This is the core library that the shell executable should depend on.
+#
+!IFNDEF SHELL_CORE_DEP
+!IF $(DYNAMIC_SHELL)!=0 || $(FOR_WIN10)!=0
+SHELL_CORE_DEP = $(SQLITE3DLL)
+# <>
+!ELSEIF $(USE_AMALGAMATION)==0
+SHELL_CORE_DEP = libsqlite3.lib
+# <>
+!ELSE
+SHELL_CORE_DEP =
+!ENDIF
+!ENDIF
+
+# <>
+# If zlib support is enabled, add the dependencies for it.
+#
+!IF $(USE_ZLIB)!=0 && $(BUILD_ZLIB)!=0
+SHELL_CORE_DEP = zlib $(SHELL_CORE_DEP)
+TESTFIXTURE_DEP = zlib $(TESTFIXTURE_DEP)
+!ENDIF
+# <>
+
+# This is the core library that the shell executable should link with.
+#
+!IFNDEF SHELL_CORE_LIB
+!IF $(DYNAMIC_SHELL)!=0 || $(FOR_WIN10)!=0
+SHELL_CORE_LIB = $(SQLITE3LIB)
+# <>
+!ELSEIF $(USE_AMALGAMATION)==0
+SHELL_CORE_LIB = libsqlite3.lib
+# <>
+!ELSE
+SHELL_CORE_LIB =
+!ENDIF
+!ENDIF
+
+# These are additional linker options used for the shell executable.
+#
+!IFNDEF SHELL_LINK_OPTS
+SHELL_LINK_OPTS = $(SHELL_CORE_LIB)
+!ENDIF
+
+# Check if assembly code listings should be generated for the source
+# code files to be compiled.
+#
+!IF $(USE_LISTINGS)!=0
+TCC = $(TCC) -FAcs
+!ENDIF
+
+# When compiling the library for use in the WinRT environment,
+# the following compile-time options must be used as well to
+# disable use of Win32 APIs that are not available and to enable
+# use of Win32 APIs that are specific to Windows 8 and/or WinRT.
+#
+!IF $(FOR_WINRT)!=0
+TCC = $(TCC) -DSQLITE_OS_WINRT=1
+RCC = $(RCC) -DSQLITE_OS_WINRT=1
+TCC = $(TCC) -DWINAPI_FAMILY=WINAPI_FAMILY_APP
+RCC = $(RCC) -DWINAPI_FAMILY=WINAPI_FAMILY_APP
+!ENDIF
+
+# C compiler options for the Windows 10 platform (needs MSVC 2015).
+#
+!IF $(FOR_WIN10)!=0
+TCC = $(TCC) /d2guard4 -D_ARM_WINAPI_PARTITION_DESKTOP_SDK_AVAILABLE
+BCC = $(BCC) /d2guard4 -D_ARM_WINAPI_PARTITION_DESKTOP_SDK_AVAILABLE
+!ENDIF
+
+# Also, we need to dynamically link to the correct MSVC runtime
+# when compiling for WinRT (e.g. debug or release) OR if the
+# USE_CRT_DLL option is set to force dynamically linking to the
+# MSVC runtime library.
+#
+!IF $(FOR_WINRT)!=0 || $(USE_CRT_DLL)!=0
+!IF $(DEBUG)>1
+TCC = $(TCC) -MDd
+BCC = $(BCC) -MDd
+!ELSE
+TCC = $(TCC) -MD
+BCC = $(BCC) -MD
+!ENDIF
+!ELSE
+!IF $(DEBUG)>1
+TCC = $(TCC) -MTd
+BCC = $(BCC) -MTd
+!ELSE
+TCC = $(TCC) -MT
+BCC = $(BCC) -MT
+!ENDIF
+!ENDIF
+
+# <>
+# The mksqlite3c.tcl and mksqlite3h.tcl scripts will pull in
+# any extension header files by default. For non-amalgamation
+# builds, we need to make sure the compiler can find these.
+#
+!IF $(USE_AMALGAMATION)==0
+TCC = $(TCC) -I$(TOP)\ext\fts3
+RCC = $(RCC) -I$(TOP)\ext\fts3
+TCC = $(TCC) -I$(TOP)\ext\rtree
+RCC = $(RCC) -I$(TOP)\ext\rtree
+TCC = $(TCC) -I$(TOP)\ext\session
+RCC = $(RCC) -I$(TOP)\ext\session
+!ENDIF
+
+# The mksqlite3c.tcl script accepts some options on the command
+# line. When compiling with debugging enabled, some of these
+# options are necessary in order to allow debugging symbols to
+# work correctly with Visual Studio when using the amalgamation.
+#
+!IFNDEF MKSQLITE3C_TOOL
+!IF $(MINIMAL_AMALGAMATION)!=0
+MKSQLITE3C_TOOL = $(TOP)\tool\mksqlite3c-noext.tcl
+!ELSE
+MKSQLITE3C_TOOL = $(TOP)\tool\mksqlite3c.tcl
+!ENDIF
+!ENDIF
+
+!IFNDEF MKSQLITE3C_ARGS
+!IF $(DEBUG)>1
+MKSQLITE3C_ARGS = --linemacros
+!ELSE
+MKSQLITE3C_ARGS =
+!ENDIF
+!IF $(USE_STDCALL)!=0 || $(FOR_WIN10)!=0
+MKSQLITE3C_ARGS = $(MKSQLITE3C_ARGS) --useapicall
+!ENDIF
+!ENDIF
+
+# The mksqlite3h.tcl script accepts some options on the command line.
+# When compiling with stdcall support, some of these options are
+# necessary.
+#
+!IFNDEF MKSQLITE3H_ARGS
+!IF $(USE_STDCALL)!=0 || $(FOR_WIN10)!=0
+MKSQLITE3H_ARGS = --useapicall
+!ELSE
+MKSQLITE3H_ARGS =
+!ENDIF
+!ENDIF
+# <>
+
+# Define -DNDEBUG to compile without debugging (i.e., for production usage)
+# Omitting the define will cause extra debugging code to be inserted and
+# includes extra comments when "EXPLAIN stmt" is used.
+#
+!IF $(DEBUG)==0
+TCC = $(TCC) -DNDEBUG
+BCC = $(BCC) -DNDEBUG
+RCC = $(RCC) -DNDEBUG
+!ENDIF
+
+!IF $(DEBUG)>0 || $(API_ARMOR)!=0 || $(FOR_WIN10)!=0
+TCC = $(TCC) -DSQLITE_ENABLE_API_ARMOR=1
+RCC = $(RCC) -DSQLITE_ENABLE_API_ARMOR=1
+!ENDIF
+
+!IF $(DEBUG)>2
+TCC = $(TCC) -DSQLITE_DEBUG=1
+RCC = $(RCC) -DSQLITE_DEBUG=1
+!IF $(DYNAMIC_SHELL)==0
+TCC = $(TCC) -DSQLITE_ENABLE_WHERETRACE -DSQLITE_ENABLE_SELECTTRACE
+RCC = $(RCC) -DSQLITE_ENABLE_WHERETRACE -DSQLITE_ENABLE_SELECTTRACE
+!ENDIF
+!ENDIF
+
+!IF $(DEBUG)>4 || $(OSTRACE)!=0
+TCC = $(TCC) -DSQLITE_FORCE_OS_TRACE=1 -DSQLITE_DEBUG_OS_TRACE=1
+RCC = $(RCC) -DSQLITE_FORCE_OS_TRACE=1 -DSQLITE_DEBUG_OS_TRACE=1
+!ENDIF
+
+!IF $(DEBUG)>5
+TCC = $(TCC) -DSQLITE_ENABLE_IOTRACE=1
+RCC = $(RCC) -DSQLITE_ENABLE_IOTRACE=1
+!ENDIF
+
+# Prevent warnings about "insecure" MSVC runtime library functions
+# being used.
+#
+TCC = $(TCC) -D_CRT_SECURE_NO_DEPRECATE -D_CRT_SECURE_NO_WARNINGS
+BCC = $(BCC) -D_CRT_SECURE_NO_DEPRECATE -D_CRT_SECURE_NO_WARNINGS
+RCC = $(RCC) -D_CRT_SECURE_NO_DEPRECATE -D_CRT_SECURE_NO_WARNINGS
+
+# Prevent warnings about "deprecated" POSIX functions being used.
+#
+TCC = $(TCC) -D_CRT_NONSTDC_NO_DEPRECATE -D_CRT_NONSTDC_NO_WARNINGS
+BCC = $(BCC) -D_CRT_NONSTDC_NO_DEPRECATE -D_CRT_NONSTDC_NO_WARNINGS
+RCC = $(RCC) -D_CRT_NONSTDC_NO_DEPRECATE -D_CRT_NONSTDC_NO_WARNINGS
+
+# Use the SQLite debugging heap subsystem?
+#
+!IF $(MEMDEBUG)!=0
+TCC = $(TCC) -DSQLITE_MEMDEBUG=1
+RCC = $(RCC) -DSQLITE_MEMDEBUG=1
+
+# Use native Win32 heap subsystem instead of malloc/free?
+#
+!ELSEIF $(WIN32HEAP)!=0
+TCC = $(TCC) -DSQLITE_WIN32_MALLOC=1
+RCC = $(RCC) -DSQLITE_WIN32_MALLOC=1
+
+# Validate the heap on every call into the native Win32 heap subsystem?
+#
+!IF $(DEBUG)>3
+TCC = $(TCC) -DSQLITE_WIN32_MALLOC_VALIDATE=1
+RCC = $(RCC) -DSQLITE_WIN32_MALLOC_VALIDATE=1
+!ENDIF
+!ENDIF
+
+# <>
+# The locations of the Tcl header and library files. Also, the library that
+# non-stubs enabled programs using Tcl must link against. These variables
+# (TCLINCDIR, TCLLIBDIR, and LIBTCL) may be overridden via the environment
+# prior to running nmake in order to match the actual installed location and
+# version on this machine.
+#
+!IFNDEF TCLVERSION
+TCLVERSION = 86
+!ENDIF
+
+!IFNDEF TCLSUFFIX
+TCLSUFFIX =
+!ENDIF
+
+!IFNDEF TCLDIR
+TCLDIR = $(TOP)\compat\tcl
+!ENDIF
+
+!IFNDEF TCLINCDIR
+TCLINCDIR = $(TCLDIR)\include
+!ENDIF
+
+!IFNDEF TCLLIBDIR
+TCLLIBDIR = $(TCLDIR)\lib
+!ENDIF
+
+!IFNDEF LIBTCL
+LIBTCL = tcl$(TCLVERSION)$(TCLSUFFIX).lib
+!ENDIF
+
+!IFNDEF LIBTCLSTUB
+LIBTCLSTUB = tclstub$(TCLVERSION)$(TCLSUFFIX).lib
+!ENDIF
+
+!IFNDEF LIBTCLPATH
+LIBTCLPATH = $(TCLDIR)\bin
+!ENDIF
+
+# The locations of the zlib header and library files. These variables
+# (ZLIBINCDIR, ZLIBLIBDIR, and ZLIBLIB) may be overridden via the environment
+# prior to running nmake in order to match the actual installed (or source
+# code) location on this machine.
+#
+!IFNDEF ZLIBDIR
+ZLIBDIR = $(TOP)\compat\zlib
+!ENDIF
+
+!IFNDEF ZLIBINCDIR
+ZLIBINCDIR = $(ZLIBDIR)
+!ENDIF
+
+!IFNDEF ZLIBLIBDIR
+ZLIBLIBDIR = $(ZLIBDIR)
+!ENDIF
+
+!IFNDEF ZLIBLIB
+!IF $(DYNAMIC_SHELL)!=0
+ZLIBLIB = zdll.lib
+!ELSE
+ZLIBLIB = zlib.lib
+!ENDIF
+!ENDIF
+
+# The locations of the ICU header and library files. These variables
+# (ICUINCDIR, ICULIBDIR, and LIBICU) may be overridden via the environment
+# prior to running nmake in order to match the actual installed location on
+# this machine.
+#
+!IFNDEF ICUDIR
+ICUDIR = $(TOP)\compat\icu
+!ENDIF
+
+!IFNDEF ICUINCDIR
+ICUINCDIR = $(ICUDIR)\include
+!ENDIF
+
+!IFNDEF ICULIBDIR
+ICULIBDIR = $(ICUDIR)\lib
+!ENDIF
+
+!IFNDEF LIBICU
+LIBICU = icuuc.lib icuin.lib
+!ENDIF
+
+# This is the command to use for tclsh - normally just "tclsh", but we may
+# know the specific version we want to use. This variable (TCLSH_CMD) may be
+# overridden via the environment prior to running nmake in order to select a
+# specific Tcl shell to use.
+#
+!IFNDEF TCLSH_CMD
+!IF $(USE_TCLSH_IN_PATH)!=0 || !EXIST("$(TCLDIR)\bin\tclsh.exe")
+TCLSH_CMD = tclsh
+!ELSE
+TCLSH_CMD = $(TCLDIR)\bin\tclsh.exe
+!ENDIF
+!ENDIF
+# <>
+
+# Compiler options needed for programs that use the readline() library.
+#
+!IFNDEF READLINE_FLAGS
+READLINE_FLAGS = -DHAVE_READLINE=0
+!ENDIF
+
+# The library that programs using readline() must link against.
+#
+!IFNDEF LIBREADLINE
+LIBREADLINE =
+!ENDIF
+
+# Should the database engine be compiled threadsafe
+#
+TCC = $(TCC) -DSQLITE_THREADSAFE=1
+RCC = $(RCC) -DSQLITE_THREADSAFE=1
+
+# Do threads override each others locks by default (1), or do we test (-1)
+#
+TCC = $(TCC) -DSQLITE_THREAD_OVERRIDE_LOCK=-1
+RCC = $(RCC) -DSQLITE_THREAD_OVERRIDE_LOCK=-1
+
+# Any target libraries which libsqlite must be linked against
+#
+!IFNDEF TLIBS
+TLIBS =
+!ENDIF
+
+# Flags controlling use of the in memory btree implementation
+#
+# SQLITE_TEMP_STORE is 0 to force temporary tables to be in a file, 1 to
+# default to file, 2 to default to memory, and 3 to force temporary
+# tables to always be in memory.
+#
+TCC = $(TCC) -DSQLITE_TEMP_STORE=1
+RCC = $(RCC) -DSQLITE_TEMP_STORE=1
+
+# Enable/disable loadable extensions, and other optional features
+# based on configuration. (-DSQLITE_OMIT*, -DSQLITE_ENABLE*).
+# The same set of OMIT and ENABLE flags should be passed to the
+# LEMON parser generator and the mkkeywordhash tool as well.
+
+# These are the required SQLite compilation options used when compiling for
+# the Windows platform.
+#
+REQ_FEATURE_FLAGS = $(REQ_FEATURE_FLAGS) -DSQLITE_MAX_TRIGGER_DEPTH=100
+
+# If we are linking to the RPCRT4 library, enable features that need it.
+#
+!IF $(USE_RPCRT4_LIB)!=0
+REQ_FEATURE_FLAGS = $(REQ_FEATURE_FLAGS) -DSQLITE_WIN32_USE_UUID=1
+!ENDIF
+
+# Add the required and optional SQLite compilation options into the command
+# lines used to invoke the MSVC code and resource compilers.
+#
+TCC = $(TCC) $(REQ_FEATURE_FLAGS) $(OPT_FEATURE_FLAGS) $(EXT_FEATURE_FLAGS)
+RCC = $(RCC) $(REQ_FEATURE_FLAGS) $(OPT_FEATURE_FLAGS) $(EXT_FEATURE_FLAGS)
+
+# Add in any optional parameters specified on the commane line, e.g.
+# nmake /f Makefile.msc all "OPTS=-DSQLITE_ENABLE_FOO=1 -DSQLITE_OMIT_FOO=1"
+#
+TCC = $(TCC) $(OPTS)
+RCC = $(RCC) $(OPTS)
+
+# If compiling for debugging, add some defines.
+#
+!IF $(DEBUG)>1
+TCC = $(TCC) -D_DEBUG
+BCC = $(BCC) -D_DEBUG
+RCC = $(RCC) -D_DEBUG
+!ENDIF
+
+# If optimizations are enabled or disabled (either implicitly or
+# explicitly), add the necessary flags.
+#
+!IF $(DEBUG)>1 || $(OPTIMIZATIONS)==0
+TCC = $(TCC) -Od
+BCC = $(BCC) -Od
+!IF $(USE_RUNTIME_CHECKS)!=0
+TCC = $(TCC) -RTC1
+BCC = $(BCC) -RTC1
+!ENDIF
+!ELSEIF $(OPTIMIZATIONS)>=3
+TCC = $(TCC) -Ox
+BCC = $(BCC) -Ox
+!ELSEIF $(OPTIMIZATIONS)==2
+TCC = $(TCC) -O2
+BCC = $(BCC) -O2
+!ELSEIF $(OPTIMIZATIONS)==1
+TCC = $(TCC) -O1
+BCC = $(BCC) -O1
+!ENDIF
+
+# If symbols are enabled (or compiling for debugging), enable PDBs.
+#
+!IF $(DEBUG)>1 || $(SYMBOLS)!=0
+TCC = $(TCC) -Zi
+BCC = $(BCC) -Zi
+!ENDIF
+
+# <>
+# If zlib support is enabled, add the compiler options for it.
+#
+!IF $(USE_ZLIB)!=0
+TCC = $(TCC) -DSQLITE_HAVE_ZLIB=1
+RCC = $(RCC) -DSQLITE_HAVE_ZLIB=1
+TCC = $(TCC) -I$(ZLIBINCDIR)
+RCC = $(RCC) -I$(ZLIBINCDIR)
+!ENDIF
+
+# If ICU support is enabled, add the compiler options for it.
+#
+!IF $(USE_ICU)!=0
+TCC = $(TCC) -DSQLITE_ENABLE_ICU=1
+RCC = $(RCC) -DSQLITE_ENABLE_ICU=1
+TCC = $(TCC) -I$(TOP)\ext\icu
+RCC = $(RCC) -I$(TOP)\ext\icu
+TCC = $(TCC) -I$(ICUINCDIR)
+RCC = $(RCC) -I$(ICUINCDIR)
+!ENDIF
+# <>
+
+# Command line prefixes for compiling code, compiling resources,
+# linking, etc.
+#
+LTCOMPILE = $(TCC) -Fo$@ -Fd$*.pdb
+LTRCOMPILE = $(RCC) -r
+LTLIB = lib.exe
+LTLINK = $(TCC) -Fe$@
+
+# If requested, link to the RPCRT4 library.
+#
+!IF $(USE_RPCRT4_LIB)!=0
+LTLIBS = $(LTLIBS) rpcrt4.lib
+!ENDIF
+
+# If a platform was set, force the linker to target that.
+# Note that the vcvars*.bat family of batch files typically
+# set this for you. Otherwise, the linker will attempt
+# to deduce the binary type based on the object files.
+!IFDEF PLATFORM
+LTLINKOPTS = $(LTLINKOPTS) /NOLOGO /MACHINE:$(PLATFORM)
+LTLIBOPTS = /NOLOGO /MACHINE:$(PLATFORM)
+!ELSEIF "$(VISUALSTUDIOVERSION)"=="12.0" || \
+ "$(VISUALSTUDIOVERSION)"=="14.0" || \
+ "$(VISUALSTUDIOVERSION)"=="15.0"
+LTLINKOPTS = /NOLOGO /MACHINE:x86
+LTLIBOPTS = /NOLOGO /MACHINE:x86
+!ELSE
+LTLINKOPTS = $(LTLINKOPTS) /NOLOGO
+LTLIBOPTS = /NOLOGO
+!ENDIF
+
+# When compiling for use in the WinRT environment, the following
+# linker option must be used to mark the executable as runnable
+# only in the context of an application container.
+#
+!IF $(FOR_WINRT)!=0
+LTLINKOPTS = $(LTLINKOPTS) /APPCONTAINER
+!IF "$(VISUALSTUDIOVERSION)"=="12.0" || "$(VISUALSTUDIOVERSION)"=="14.0"
+!IFNDEF STORELIBPATH
+!IF "$(PLATFORM)"=="x86"
+STORELIBPATH = $(CRTLIBPATH)\store
+!ELSEIF "$(PLATFORM)"=="x64"
+STORELIBPATH = $(CRTLIBPATH)\store\amd64
+!ELSEIF "$(PLATFORM)"=="ARM"
+STORELIBPATH = $(CRTLIBPATH)\store\arm
+!ELSE
+STORELIBPATH = $(CRTLIBPATH)\store
+!ENDIF
+!ENDIF
+STORELIBPATH = $(STORELIBPATH:\\=\)
+LTLINKOPTS = $(LTLINKOPTS) "/LIBPATH:$(STORELIBPATH)"
+!ENDIF
+!ENDIF
+
+# When compiling for Windows Phone 8.1, an extra library path is
+# required.
+#
+!IF $(USE_WP81_OPTS)!=0
+!IFNDEF WP81LIBPATH
+!IF "$(PLATFORM)"=="x86"
+WP81LIBPATH = $(PROGRAMFILES_X86)\Windows Phone Kits\8.1\lib\x86
+!ELSEIF "$(PLATFORM)"=="ARM"
+WP81LIBPATH = $(PROGRAMFILES_X86)\Windows Phone Kits\8.1\lib\ARM
+!ELSE
+WP81LIBPATH = $(PROGRAMFILES_X86)\Windows Phone Kits\8.1\lib\x86
+!ENDIF
+!ENDIF
+!ENDIF
+
+# When compiling for Windows Phone 8.1, some extra linker options
+# are also required.
+#
+!IF $(USE_WP81_OPTS)!=0
+!IFDEF WP81LIBPATH
+LTLINKOPTS = $(LTLINKOPTS) "/LIBPATH:$(WP81LIBPATH)"
+!ENDIF
+LTLINKOPTS = $(LTLINKOPTS) /DYNAMICBASE
+LTLINKOPTS = $(LTLINKOPTS) WindowsPhoneCore.lib RuntimeObject.lib PhoneAppModelHost.lib
+LTLINKOPTS = $(LTLINKOPTS) /NODEFAULTLIB:kernel32.lib /NODEFAULTLIB:ole32.lib
+!ENDIF
+
+# When compiling for UWP or the Windows 10 platform, some extra linker
+# options are also required.
+#
+!IF $(FOR_UWP)!=0 || $(FOR_WIN10)!=0
+LTLINKOPTS = $(LTLINKOPTS) /DYNAMICBASE /NODEFAULTLIB:kernel32.lib
+LTLINKOPTS = $(LTLINKOPTS) mincore.lib
+!IFDEF PSDKLIBPATH
+LTLINKOPTS = $(LTLINKOPTS) "/LIBPATH:$(PSDKLIBPATH)"
+!ENDIF
+!ENDIF
+
+!IF $(FOR_WIN10)!=0
+LTLINKOPTS = $(LTLINKOPTS) /guard:cf "/LIBPATH:$(UCRTLIBPATH)"
+!IF $(DEBUG)>1
+LTLINKOPTS = $(LTLINKOPTS) /NODEFAULTLIB:libucrtd.lib /DEFAULTLIB:ucrtd.lib
+!ELSE
+LTLINKOPTS = $(LTLINKOPTS) /NODEFAULTLIB:libucrt.lib /DEFAULTLIB:ucrt.lib
+!ENDIF
+!ENDIF
+
+# If either debugging or symbols are enabled, enable PDBs.
+#
+!IF $(DEBUG)>1 || $(SYMBOLS)!=0
+LDFLAGS = /DEBUG $(LDOPTS)
+!ELSE
+LDFLAGS = $(LDOPTS)
+!ENDIF
+
+# <>
+# Start with the Tcl related linker options.
+#
+!IF $(NO_TCL)==0
+TCLLIBPATHS = $(TCLLIBPATHS) /LIBPATH:$(TCLLIBDIR)
+TCLLIBS = $(TCLLIBS) $(LIBTCL)
+!ENDIF
+
+# If zlib support is enabled, add the linker options for it.
+#
+!IF $(USE_ZLIB)!=0
+LTLIBPATHS = $(LTLIBPATHS) /LIBPATH:$(ZLIBLIBDIR)
+LTLIBS = $(LTLIBS) $(ZLIBLIB)
+!ENDIF
+
+# If ICU support is enabled, add the linker options for it.
+#
+!IF $(USE_ICU)!=0
+LTLIBPATHS = $(LTLIBPATHS) /LIBPATH:$(ICULIBDIR)
+LTLIBS = $(LTLIBS) $(LIBICU)
+!ENDIF
+# <>
+
+# You should not have to change anything below this line
+###############################################################################
+
+# <>
+# Object files for the SQLite library (non-amalgamation).
+#
+LIBOBJS0 = vdbe.lo parse.lo alter.lo analyze.lo attach.lo auth.lo \
+ backup.lo bitvec.lo btmutex.lo btree.lo build.lo \
+ callback.lo complete.lo ctime.lo \
+ date.lo dbpage.lo dbstat.lo delete.lo \
+ expr.lo fault.lo fkey.lo \
+ fts3.lo fts3_aux.lo fts3_expr.lo fts3_hash.lo fts3_icu.lo \
+ fts3_porter.lo fts3_snippet.lo fts3_tokenizer.lo fts3_tokenizer1.lo \
+ fts3_tokenize_vtab.lo fts3_unicode.lo fts3_unicode2.lo fts3_write.lo \
+ fts5.lo \
+ func.lo global.lo hash.lo \
+ icu.lo insert.lo json1.lo legacy.lo loadext.lo \
+ main.lo malloc.lo mem0.lo mem1.lo mem2.lo mem3.lo mem5.lo \
+ memdb.lo memjournal.lo \
+ mutex.lo mutex_noop.lo mutex_unix.lo mutex_w32.lo \
+ notify.lo opcodes.lo os.lo os_unix.lo os_win.lo \
+ pager.lo pcache.lo pcache1.lo pragma.lo prepare.lo printf.lo \
+ random.lo resolve.lo rowset.lo rtree.lo \
+ sqlite3session.lo select.lo sqlite3rbu.lo status.lo stmt.lo \
+ table.lo threads.lo tokenize.lo treeview.lo trigger.lo \
+ update.lo upsert.lo util.lo vacuum.lo \
+ vdbeapi.lo vdbeaux.lo vdbeblob.lo vdbemem.lo vdbesort.lo \
+ vdbetrace.lo wal.lo walker.lo where.lo wherecode.lo whereexpr.lo \
+ window.lo utf.lo vtab.lo
+# <>
+
+# Object files for the amalgamation.
+#
+LIBOBJS1 = sqlite3.lo
+
+# Determine the real value of LIBOBJ based on the 'configure' script
+#
+# <>
+!IF $(USE_AMALGAMATION)==0
+LIBOBJ = $(LIBOBJS0)
+!ELSE
+# <>
+LIBOBJ = $(LIBOBJS1)
+# <>
+!ENDIF
+# <>
+
+# Determine if embedded resource compilation and usage are enabled.
+#
+!IF $(USE_RC)!=0
+LIBRESOBJS = sqlite3res.lo
+!ELSE
+LIBRESOBJS =
+!ENDIF
+
+# <>
+# Core source code files, part 1.
+#
+SRC00 = \
+ $(TOP)\src\crypto.c \
+ $(TOP)\src\crypto_cc.c \
+ $(TOP)\src\crypto_impl.c \
+ $(TOP)\src\crypto_libtomcrypt.c \
+ $(TOP)\src\crypto_openssl.c \
+ $(TOP)\src\crypto.h \
+ $(TOP)\src\sqlcipher.h \
+ $(TOP)\src\alter.c \
+ $(TOP)\src\analyze.c \
+ $(TOP)\src\attach.c \
+ $(TOP)\src\auth.c \
+ $(TOP)\src\backup.c \
+ $(TOP)\src\bitvec.c \
+ $(TOP)\src\btmutex.c \
+ $(TOP)\src\btree.c \
+ $(TOP)\src\build.c \
+ $(TOP)\src\callback.c \
+ $(TOP)\src\complete.c \
+ $(TOP)\src\ctime.c \
+ $(TOP)\src\date.c \
+ $(TOP)\src\dbpage.c \
+ $(TOP)\src\dbstat.c \
+ $(TOP)\src\delete.c \
+ $(TOP)\src\expr.c \
+ $(TOP)\src\fault.c \
+ $(TOP)\src\fkey.c \
+ $(TOP)\src\func.c \
+ $(TOP)\src\global.c \
+ $(TOP)\src\hash.c \
+ $(TOP)\src\insert.c \
+ $(TOP)\src\legacy.c \
+ $(TOP)\src\loadext.c \
+ $(TOP)\src\main.c \
+ $(TOP)\src\malloc.c \
+ $(TOP)\src\mem0.c \
+ $(TOP)\src\mem1.c \
+ $(TOP)\src\mem2.c \
+ $(TOP)\src\mem3.c \
+ $(TOP)\src\mem5.c \
+ $(TOP)\src\memdb.c \
+ $(TOP)\src\memjournal.c \
+ $(TOP)\src\mutex.c \
+ $(TOP)\src\mutex_noop.c \
+ $(TOP)\src\mutex_unix.c \
+ $(TOP)\src\mutex_w32.c \
+ $(TOP)\src\notify.c \
+ $(TOP)\src\os.c \
+ $(TOP)\src\os_unix.c \
+ $(TOP)\src\os_win.c
+
+# Core source code files, part 2.
+#
+SRC01 = \
+ $(TOP)\src\pager.c \
+ $(TOP)\src\pcache.c \
+ $(TOP)\src\pcache1.c \
+ $(TOP)\src\pragma.c \
+ $(TOP)\src\prepare.c \
+ $(TOP)\src\printf.c \
+ $(TOP)\src\random.c \
+ $(TOP)\src\resolve.c \
+ $(TOP)\src\rowset.c \
+ $(TOP)\src\select.c \
+ $(TOP)\src\status.c \
+ $(TOP)\src\table.c \
+ $(TOP)\src\threads.c \
+ $(TOP)\src\tclsqlite.c \
+ $(TOP)\src\tokenize.c \
+ $(TOP)\src\treeview.c \
+ $(TOP)\src\trigger.c \
+ $(TOP)\src\utf.c \
+ $(TOP)\src\update.c \
+ $(TOP)\src\upsert.c \
+ $(TOP)\src\util.c \
+ $(TOP)\src\vacuum.c \
+ $(TOP)\src\vdbe.c \
+ $(TOP)\src\vdbeapi.c \
+ $(TOP)\src\vdbeaux.c \
+ $(TOP)\src\vdbeblob.c \
+ $(TOP)\src\vdbemem.c \
+ $(TOP)\src\vdbesort.c \
+ $(TOP)\src\vdbetrace.c \
+ $(TOP)\src\vtab.c \
+ $(TOP)\src\wal.c \
+ $(TOP)\src\walker.c \
+ $(TOP)\src\where.c \
+ $(TOP)\src\wherecode.c \
+ $(TOP)\src\whereexpr.c \
+ $(TOP)\src\window.c
+
+# Core miscellaneous files.
+#
+SRC03 = \
+ $(TOP)\src\parse.y
+
+# Core header files, part 1.
+#
+SRC04 = \
+ $(TOP)\src\btree.h \
+ $(TOP)\src\btreeInt.h \
+ $(TOP)\src\hash.h \
+ $(TOP)\src\hwtime.h \
+ $(TOP)\src\msvc.h \
+ $(TOP)\src\mutex.h \
+ $(TOP)\src\os.h \
+ $(TOP)\src\os_common.h \
+ $(TOP)\src\os_setup.h \
+ $(TOP)\src\os_win.h
+
+# Core header files, part 2.
+#
+SRC05 = \
+ $(TOP)\src\pager.h \
+ $(TOP)\src\pcache.h \
+ $(TOP)\src\pragma.h \
+ $(TOP)\src\sqlite.h.in \
+ $(TOP)\src\sqlite3ext.h \
+ $(TOP)\src\sqliteInt.h \
+ $(TOP)\src\sqliteLimit.h \
+ $(TOP)\src\vdbe.h \
+ $(TOP)\src\vdbeInt.h \
+ $(TOP)\src\vxworks.h \
+ $(TOP)\src\wal.h \
+ $(TOP)\src\whereInt.h
+
+# Extension source code files, part 1.
+#
+SRC06 = \
+ $(TOP)\ext\fts1\fts1.c \
+ $(TOP)\ext\fts1\fts1_hash.c \
+ $(TOP)\ext\fts1\fts1_porter.c \
+ $(TOP)\ext\fts1\fts1_tokenizer1.c \
+ $(TOP)\ext\fts2\fts2.c \
+ $(TOP)\ext\fts2\fts2_hash.c \
+ $(TOP)\ext\fts2\fts2_icu.c \
+ $(TOP)\ext\fts2\fts2_porter.c \
+ $(TOP)\ext\fts2\fts2_tokenizer.c \
+ $(TOP)\ext\fts2\fts2_tokenizer1.c
+
+# Extension source code files, part 2.
+#
+SRC07 = \
+ $(TOP)\ext\fts3\fts3.c \
+ $(TOP)\ext\fts3\fts3_aux.c \
+ $(TOP)\ext\fts3\fts3_expr.c \
+ $(TOP)\ext\fts3\fts3_hash.c \
+ $(TOP)\ext\fts3\fts3_icu.c \
+ $(TOP)\ext\fts3\fts3_porter.c \
+ $(TOP)\ext\fts3\fts3_snippet.c \
+ $(TOP)\ext\fts3\fts3_tokenizer.c \
+ $(TOP)\ext\fts3\fts3_tokenizer1.c \
+ $(TOP)\ext\fts3\fts3_tokenize_vtab.c \
+ $(TOP)\ext\fts3\fts3_unicode.c \
+ $(TOP)\ext\fts3\fts3_unicode2.c \
+ $(TOP)\ext\fts3\fts3_write.c \
+ $(TOP)\ext\icu\icu.c \
+ $(TOP)\ext\rtree\rtree.c \
+ $(TOP)\ext\session\sqlite3session.c \
+ $(TOP)\ext\rbu\sqlite3rbu.c \
+ $(TOP)\ext\misc\json1.c \
+ $(TOP)\ext\misc\stmt.c
+
+# Extension header files, part 1.
+#
+SRC08 = \
+ $(TOP)\ext\fts1\fts1.h \
+ $(TOP)\ext\fts1\fts1_hash.h \
+ $(TOP)\ext\fts1\fts1_tokenizer.h \
+ $(TOP)\ext\fts2\fts2.h \
+ $(TOP)\ext\fts2\fts2_hash.h \
+ $(TOP)\ext\fts2\fts2_tokenizer.h
+
+# Extension header files, part 2.
+#
+SRC09 = \
+ $(TOP)\ext\fts3\fts3.h \
+ $(TOP)\ext\fts3\fts3Int.h \
+ $(TOP)\ext\fts3\fts3_hash.h \
+ $(TOP)\ext\fts3\fts3_tokenizer.h \
+ $(TOP)\ext\icu\sqliteicu.h \
+ $(TOP)\ext\rtree\rtree.h \
+ $(TOP)\ext\rtree\geopoly.c \
+ $(TOP)\ext\rbu\sqlite3rbu.h \
+ $(TOP)\ext\session\sqlite3session.h
+
+# Generated source code files
+#
+SRC10 = \
+ opcodes.c \
+ parse.c
+
+# Generated header files
+#
+SRC11 = \
+ keywordhash.h \
+ opcodes.h \
+ parse.h \
+ shell.c \
+ $(SQLITE3H)
+
+# Generated Tcl header files
+#
+!IF $(USE_STDCALL)!=0 || $(FOR_WIN10)!=0
+SRC12 = \
+ $(SQLITETCLH) \
+ $(SQLITETCLDECLSH)
+!ELSE
+SRC12 =
+!ENDIF
+
+# All source code files.
+#
+SRC = $(SRC00) $(SRC01) $(SRC03) $(SRC04) $(SRC05) $(SRC06) $(SRC07) $(SRC08) $(SRC09) $(SRC10) $(SRC11)
+
+# Source code to the test files.
+#
+TESTSRC = \
+ $(TOP)\src\test1.c \
+ $(TOP)\src\test2.c \
+ $(TOP)\src\test3.c \
+ $(TOP)\src\test4.c \
+ $(TOP)\src\test5.c \
+ $(TOP)\src\test6.c \
+ $(TOP)\src\test7.c \
+ $(TOP)\src\test8.c \
+ $(TOP)\src\test9.c \
+ $(TOP)\src\test_autoext.c \
+ $(TOP)\src\test_async.c \
+ $(TOP)\src\test_backup.c \
+ $(TOP)\src\test_bestindex.c \
+ $(TOP)\src\test_blob.c \
+ $(TOP)\src\test_btree.c \
+ $(TOP)\src\test_config.c \
+ $(TOP)\src\test_delete.c \
+ $(TOP)\src\test_demovfs.c \
+ $(TOP)\src\test_devsym.c \
+ $(TOP)\src\test_fs.c \
+ $(TOP)\src\test_func.c \
+ $(TOP)\src\test_hexio.c \
+ $(TOP)\src\test_init.c \
+ $(TOP)\src\test_intarray.c \
+ $(TOP)\src\test_journal.c \
+ $(TOP)\src\test_malloc.c \
+ $(TOP)\src\test_md5.c \
+ $(TOP)\src\test_multiplex.c \
+ $(TOP)\src\test_mutex.c \
+ $(TOP)\src\test_onefile.c \
+ $(TOP)\src\test_osinst.c \
+ $(TOP)\src\test_pcache.c \
+ $(TOP)\src\test_quota.c \
+ $(TOP)\src\test_rtree.c \
+ $(TOP)\src\test_schema.c \
+ $(TOP)\src\test_server.c \
+ $(TOP)\src\test_superlock.c \
+ $(TOP)\src\test_syscall.c \
+ $(TOP)\src\test_tclsh.c \
+ $(TOP)\src\test_tclvar.c \
+ $(TOP)\src\test_thread.c \
+ $(TOP)\src\test_vdbecov.c \
+ $(TOP)\src\test_vfs.c \
+ $(TOP)\src\test_windirent.c \
+ $(TOP)\src\test_window.c \
+ $(TOP)\src\test_wsd.c \
+ $(TOP)\ext\fts3\fts3_term.c \
+ $(TOP)\ext\fts3\fts3_test.c \
+ $(TOP)\ext\rbu\test_rbu.c \
+ $(TOP)\ext\session\test_session.c
+
+# Statically linked extensions.
+#
+TESTEXT = \
+ $(TOP)\ext\expert\sqlite3expert.c \
+ $(TOP)\ext\expert\test_expert.c \
+ $(TOP)\ext\misc\amatch.c \
+ $(TOP)\ext\misc\carray.c \
+ $(TOP)\ext\misc\closure.c \
+ $(TOP)\ext\misc\csv.c \
+ $(TOP)\ext\misc\eval.c \
+ $(TOP)\ext\misc\explain.c \
+ $(TOP)\ext\misc\fileio.c \
+ $(TOP)\ext\misc\fuzzer.c \
+ $(TOP)\ext\fts5\fts5_tcl.c \
+ $(TOP)\ext\fts5\fts5_test_mi.c \
+ $(TOP)\ext\fts5\fts5_test_tok.c \
+ $(TOP)\ext\misc\ieee754.c \
+ $(TOP)\ext\misc\mmapwarm.c \
+ $(TOP)\ext\misc\nextchar.c \
+ $(TOP)\ext\misc\normalize.c \
+ $(TOP)\ext\misc\percentile.c \
+ $(TOP)\ext\misc\prefixes.c \
+ $(TOP)\ext\misc\regexp.c \
+ $(TOP)\ext\misc\remember.c \
+ $(TOP)\ext\misc\series.c \
+ $(TOP)\ext\misc\spellfix.c \
+ $(TOP)\ext\misc\totype.c \
+ $(TOP)\ext\misc\unionvtab.c \
+ $(TOP)\ext\misc\wholenumber.c
+
+# If use of zlib is enabled, add the "zipfile.c" source file.
+#
+!IF $(USE_ZLIB)!=0
+TESTEXT = $(TESTEXT) $(TOP)\ext\misc\zipfile.c
+!ENDIF
+
+# Source code to the library files needed by the test fixture
+# (non-amalgamation)
+#
+TESTSRC2 = \
+ $(SRC00) \
+ $(SRC01) \
+ $(SRC06) \
+ $(SRC07) \
+ $(SRC10) \
+ $(TOP)\ext\async\sqlite3async.c
+
+# Header files used by all library source files.
+#
+HDR = \
+ $(TOP)\src\btree.h \
+ $(TOP)\src\btreeInt.h \
+ $(TOP)\src\hash.h \
+ $(TOP)\src\hwtime.h \
+ keywordhash.h \
+ $(TOP)\src\msvc.h \
+ $(TOP)\src\mutex.h \
+ opcodes.h \
+ $(TOP)\src\os.h \
+ $(TOP)\src\os_common.h \
+ $(TOP)\src\os_setup.h \
+ $(TOP)\src\os_win.h \
+ $(TOP)\src\pager.h \
+ $(TOP)\src\pcache.h \
+ parse.h \
+ $(TOP)\src\pragma.h \
+ $(SQLITE3H) \
+ sqlite3ext.h \
+ $(TOP)\src\sqliteInt.h \
+ $(TOP)\src\sqliteLimit.h \
+ $(TOP)\src\vdbe.h \
+ $(TOP)\src\vdbeInt.h \
+ $(TOP)\src\vxworks.h \
+ $(TOP)\src\whereInt.h
+
+# Header files used by extensions
+#
+EXTHDR = $(EXTHDR) \
+ $(TOP)\ext\fts1\fts1.h \
+ $(TOP)\ext\fts1\fts1_hash.h \
+ $(TOP)\ext\fts1\fts1_tokenizer.h
+EXTHDR = $(EXTHDR) \
+ $(TOP)\ext\fts2\fts2.h \
+ $(TOP)\ext\fts2\fts2_hash.h \
+ $(TOP)\ext\fts2\fts2_tokenizer.h
+EXTHDR = $(EXTHDR) \
+ $(TOP)\ext\fts3\fts3.h \
+ $(TOP)\ext\fts3\fts3Int.h \
+ $(TOP)\ext\fts3\fts3_hash.h \
+ $(TOP)\ext\fts3\fts3_tokenizer.h
+EXTHDR = $(EXTHDR) \
+ $(TOP)\ext\rtree\rtree.h \
+ $(TOP)\ext\rtree\geopoly.c
+EXTHDR = $(EXTHDR) \
+ $(TOP)\ext\icu\sqliteicu.h
+EXTHDR = $(EXTHDR) \
+ $(TOP)\ext\rtree\sqlite3rtree.h
+EXTHDR = $(EXTHDR) \
+ $(TOP)\ext\session\sqlite3session.h
+
+# executables needed for testing
+#
+TESTPROGS = \
+ testfixture.exe \
+ $(SQLITE3EXE) \
+ sqlite3_analyzer.exe \
+ sqlite3_checker.exe \
+ sqldiff.exe \
+ dbhash.exe \
+ sqltclsh.exe
+
+# Databases containing fuzzer test cases
+#
+FUZZDATA = \
+ $(TOP)\test\fuzzdata1.db \
+ $(TOP)\test\fuzzdata2.db \
+ $(TOP)\test\fuzzdata3.db \
+ $(TOP)\test\fuzzdata4.db \
+ $(TOP)\test\fuzzdata5.db \
+ $(TOP)\test\fuzzdata6.db \
+ $(TOP)\test\fuzzdata7.db \
+ $(TOP)\test\fuzzdata8.db
+# <>
+
+# Additional compiler options for the shell. These are only effective
+# when the shell is not being dynamically linked.
+#
+!IF $(DYNAMIC_SHELL)==0 && $(FOR_WIN10)==0
+SHELL_COMPILE_OPTS = $(SHELL_COMPILE_OPTS) -DSQLITE_ENABLE_FTS4=1
+SHELL_COMPILE_OPTS = $(SHELL_COMPILE_OPTS) -DSQLITE_ENABLE_EXPLAIN_COMMENTS=1
+SHELL_COMPILE_OPTS = $(SHELL_COMPILE_OPTS) -DSQLITE_ENABLE_OFFSET_SQL_FUNC=1
+SHELL_COMPILE_OPTS = $(SHELL_COMPILE_OPTS) -DSQLITE_ENABLE_DESERIALIZE=1
+!ENDIF
+
+# <>
+# Extra compiler options for various test tools.
+#
+MPTESTER_COMPILE_OPTS = -DSQLITE_ENABLE_JSON1 -DSQLITE_ENABLE_FTS5
+FUZZERSHELL_COMPILE_OPTS = -DSQLITE_ENABLE_JSON1
+FUZZCHECK_OPTS = -DSQLITE_ENABLE_JSON1 -DSQLITE_ENABLE_MEMSYS5 -DSQLITE_OSS_FUZZ -DSQLITE_MAX_MEMORY=50000000 -DSQLITE_PRINTF_PRECISION_LIMIT=1000
+FUZZCHECK_OPTS = $(FUZZCHECK_OPTS) -DSQLITE_ENABLE_DESERIALIZE
+FUZZCHECK_OPTS = $(FUZZCHECK_OPTS) -DSQLITE_ENABLE_FTS4
+FUZZCHECK_OPTS = $(FUZZCHECK_OPTS) -DSQLITE_ENABLE_RTREE
+FUZZCHECK_OPTS = $(FUZZCHECK_OPTS) -DSQLITE_ENABLE_GEOPOLY
+FUZZCHECK_OPTS = $(FUZZCHECK_OPTS) -DSQLITE_ENABLE_DBSTAT_VTAB
+
+FUZZCHECK_SRC = $(TOP)\test\fuzzcheck.c $(TOP)\test\ossfuzz.c
+OSSSHELL_SRC = $(TOP)\test\ossshell.c $(TOP)\test\ossfuzz.c
+DBFUZZ_COMPILE_OPTS = -DSQLITE_THREADSAFE=0 -DSQLITE_OMIT_LOAD_EXTENSION
+KV_COMPILE_OPTS = -DSQLITE_THREADSAFE=0 -DSQLITE_DIRECT_OVERFLOW_READ
+ST_COMPILE_OPTS = -DSQLITE_THREADSAFE=0
+
+# Standard options to testfixture.
+#
+TESTOPTS = --verbose=file --output=test-out.txt
+
+# Extra targets for the "all" target that require Tcl.
+#
+!IF $(NO_TCL)==0
+ALL_TCL_TARGETS = $(SQLITE3TCLDLL)
+!ELSE
+ALL_TCL_TARGETS =
+!ENDIF
+# <>
+
+# This is the default Makefile target. The objects listed here
+# are what get build when you type just "make" with no arguments.
+#
+core: dll libsqlite3.lib shell
+
+# Targets that require the Tcl library.
+#
+tcl: $(ALL_TCL_TARGETS)
+
+# This Makefile target builds all of the standard binaries.
+#
+all: core tcl
+
+# Dynamic link library section.
+#
+dll: $(SQLITE3DLL)
+
+# Shell executable.
+#
+shell: $(SQLITE3EXE)
+
+# <>
+libsqlite3.lib: $(LIBOBJ)
+ $(LTLIB) $(LTLIBOPTS) /OUT:$@ $(LIBOBJ) $(TLIBS)
+
+libtclsqlite3.lib: tclsqlite.lo libsqlite3.lib
+ $(LTLIB) $(LTLIBOPTS) $(TCLLIBPATHS) $(LTLIBPATHS) /OUT:$@ tclsqlite.lo libsqlite3.lib $(LIBTCLSTUB) $(TLIBS)
+
+tclsqlite3.def: tclsqlite.lo
+ echo EXPORTS > tclsqlite3.def
+ dumpbin /all tclsqlite.lo \
+ | $(TCLSH_CMD) $(TOP)\tool\replace.tcl include "^\s+/EXPORT:_?((?:Sqlite3|Tclsqlite3)_[^@]*)(?:@\d+)?$$" \1 \
+ | sort >> tclsqlite3.def
+
+pkgIndex.tcl: $(TOP)\VERSION
+ for /F %%V in ('type "$(TOP)\VERSION"') do ( \
+ echo package ifneeded sqlite3 @version@ [list load [file join $$dir $(SQLITE3TCLDLL)] sqlite3] \
+ | $(TCLSH_CMD) $(TOP)\tool\replace.tcl exact @version@ %%V > pkgIndex.tcl \
+ )
+
+$(SQLITE3TCLDLL): libtclsqlite3.lib $(LIBRESOBJS) tclsqlite3.def pkgIndex.tcl
+ $(LD) $(LDFLAGS) $(LTLINKOPTS) $(LTLIBPATHS) /DLL /DEF:tclsqlite3.def /OUT:$@ libtclsqlite3.lib $(LIBRESOBJS) $(LTLIBS) $(TLIBS)
+# <>
+
+$(SQLITE3DLL): $(LIBOBJ) $(LIBRESOBJS) $(CORE_LINK_DEP)
+ $(LD) $(LDFLAGS) $(LTLINKOPTS) $(LTLIBPATHS) /DLL $(CORE_LINK_OPTS) /OUT:$@ $(LIBOBJ) $(LIBRESOBJS) $(LTLIBS) $(TLIBS)
+
+# <>
+sqlite3.def: libsqlite3.lib
+ echo EXPORTS > sqlite3.def
+ dumpbin /all libsqlite3.lib \
+ | $(TCLSH_CMD) $(TOP)\tool\replace.tcl include "^\s+1 _?(sqlite3(?:session|changeset|changegroup|rebaser)?_[^@]*)(?:@\d+)?$$" \1 \
+ | sort >> sqlite3.def
+# <>
+
+$(SQLITE3EXE): shell.c $(SHELL_CORE_DEP) $(LIBRESOBJS) $(SHELL_CORE_SRC) $(SQLITE3H)
+ $(LTLINK) $(SHELL_COMPILE_OPTS) $(READLINE_FLAGS) shell.c $(SHELL_CORE_SRC) \
+ /link $(SQLITE3EXEPDB) $(LDFLAGS) $(LTLINKOPTS) $(SHELL_LINK_OPTS) $(LTLIBPATHS) $(LIBRESOBJS) $(LIBREADLINE) $(LTLIBS) $(TLIBS)
+
+# <>
+sqldiff.exe: $(TOP)\tool\sqldiff.c $(SQLITE3C) $(SQLITE3H)
+ $(LTLINK) $(NO_WARN) $(TOP)\tool\sqldiff.c $(SQLITE3C) /link $(LDFLAGS) $(LTLINKOPTS)
+
+dbhash.exe: $(TOP)\tool\dbhash.c $(SQLITE3C) $(SQLITE3H)
+ $(LTLINK) $(NO_WARN) $(TOP)\tool\dbhash.c $(SQLITE3C) /link $(LDFLAGS) $(LTLINKOPTS)
+
+scrub.exe: $(TOP)\ext\misc\scrub.c $(SQLITE3C) $(SQLITE3H)
+ $(LTLINK) $(NO_WARN) -DSCRUB_STANDALONE=1 $(TOP)\ext\misc\scrub.c $(SQLITE3C) /link $(LDFLAGS) $(LTLINKOPTS)
+
+srcck1.exe: $(TOP)\tool\srcck1.c
+ $(BCC) $(NO_WARN) -Fe$@ $(TOP)\tool\srcck1.c
+
+sourcetest: srcck1.exe $(SQLITE3C)
+ srcck1.exe $(SQLITE3C)
+
+fuzzershell.exe: $(TOP)\tool\fuzzershell.c $(SQLITE3C) $(SQLITE3H)
+ $(LTLINK) $(NO_WARN) $(FUZZERSHELL_COMPILE_OPTS) $(TOP)\tool\fuzzershell.c $(SQLITE3C) /link $(LDFLAGS) $(LTLINKOPTS)
+
+dbfuzz.exe: $(TOP)\test\dbfuzz.c $(SQLITE3C) $(SQLITE3H)
+ $(LTLINK) $(NO_WARN) $(DBFUZZ_COMPILE_OPTS) $(TOP)\test\dbfuzz.c $(SQLITE3C) /link $(LDFLAGS) $(LTLINKOPTS)
+
+fuzzcheck.exe: $(FUZZCHECK_SRC) $(SQLITE3C) $(SQLITE3H)
+ $(LTLINK) $(NO_WARN) $(FUZZCHECK_OPTS) $(FUZZCHECK_SRC) $(SQLITE3C) /link $(LDFLAGS) $(LTLINKOPTS)
+
+ossshell.exe: $(OSSSHELL_SRC) $(SQLITE3C) $(SQLITE3H)
+ $(LTLINK) $(NO_WARN) $(FUZZCHECK_OPTS) $(OSSSHELL_SRC) $(SQLITE3C) /link $(LDFLAGS) $(LTLINKOPTS)
+
+sessionfuzz.exe: zlib $(TOP)\test\sessionfuzz.c $(SQLITE3C) $(SQLITE3H)
+ $(LTLINK) $(NO_WARN) -I$(ZLIBINCDIR) $(TOP)\test\sessionfuzz.c /link $(LDFLAGS) $(LTLINKOPTS) /LIBPATH:$(ZLIBLIBDIR) $(ZLIBLIB)
+
+mptester.exe: $(TOP)\mptest\mptest.c $(SQLITE3C) $(SQLITE3H)
+ $(LTLINK) $(NO_WARN) $(MPTESTER_COMPILE_OPTS) $(TOP)\mptest\mptest.c $(SQLITE3C) /link $(LDFLAGS) $(LTLINKOPTS)
+
+MPTEST1 = mptester mptest.db $(TOP)\mptest\crash01.test --repeat 20
+MPTEST2 = mptester mptest.db $(TOP)\mptest\multiwrite01.test --repeat 20
+
+mptest: mptester.exe
+ del /Q mptest.db 2>NUL
+ $(MPTEST1) --journalmode DELETE
+ $(MPTEST2) --journalmode WAL
+ $(MPTEST1) --journalmode WAL
+ $(MPTEST2) --journalmode PERSIST
+ $(MPTEST1) --journalmode PERSIST
+ $(MPTEST2) --journalmode TRUNCATE
+ $(MPTEST1) --journalmode TRUNCATE
+ $(MPTEST2) --journalmode DELETE
+
+# This target creates a directory named "tsrc" and fills it with
+# copies of all of the C source code and header files needed to
+# build on the target system. Some of the C source code and header
+# files are automatically generated. This target takes care of
+# all that automatic generation.
+#
+.target_source: $(SRC) $(TOP)\tool\vdbe-compress.tcl fts5.c $(SQLITE_TCL_DEP)
+ -rmdir /Q/S tsrc 2>NUL
+ -mkdir tsrc
+ for %i in ($(SRC00)) do copy /Y %i tsrc
+ for %i in ($(SRC01)) do copy /Y %i tsrc
+ for %i in ($(SRC03)) do copy /Y %i tsrc
+ for %i in ($(SRC04)) do copy /Y %i tsrc
+ for %i in ($(SRC05)) do copy /Y %i tsrc
+ for %i in ($(SRC06)) do copy /Y %i tsrc
+ for %i in ($(SRC07)) do copy /Y %i tsrc
+ for %i in ($(SRC08)) do copy /Y %i tsrc
+ for %i in ($(SRC09)) do copy /Y %i tsrc
+ for %i in ($(SRC10)) do copy /Y %i tsrc
+ for %i in ($(SRC11)) do copy /Y %i tsrc
+ for %i in ($(SRC12)) do copy /Y %i tsrc
+ copy /Y fts5.c tsrc
+ copy /Y fts5.h tsrc
+ del /Q tsrc\sqlite.h.in tsrc\parse.y 2>NUL
+ $(TCLSH_CMD) $(TOP)\tool\vdbe-compress.tcl $(OPTS) < tsrc\vdbe.c > vdbe.new
+ move vdbe.new tsrc\vdbe.c
+ echo > .target_source
+
+sqlite3.c: .target_source sqlite3ext.h $(MKSQLITE3C_TOOL)
+ $(TCLSH_CMD) $(MKSQLITE3C_TOOL) $(MKSQLITE3C_ARGS)
+ copy $(TOP)\ext\session\sqlite3session.h .
+
+sqlite3-all.c: sqlite3.c $(TOP)\tool\split-sqlite3c.tcl
+ $(TCLSH_CMD) $(TOP)\tool\split-sqlite3c.tcl
+# <>
+
+# Rule to build the amalgamation
+#
+sqlite3.lo: $(SQLITE3C)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(SQLITE3C)
+
+# <>
+# Rules to build the LEMON compiler generator
+#
+lempar.c: $(TOP)\tool\lempar.c
+ copy $(TOP)\tool\lempar.c .
+
+lemon.exe: $(TOP)\tool\lemon.c lempar.c
+ $(BCC) $(NO_WARN) -Daccess=_access \
+ -Fe$@ $(TOP)\tool\lemon.c /link $(LDFLAGS) $(NLTLINKOPTS) $(NLTLIBPATHS)
+
+# <>
+# Rules to build the source-id generator tool
+#
+mksourceid.exe: $(TOP)\tool\mksourceid.c
+ $(BCC) $(NO_WARN) -Fe$@ $(TOP)\tool\mksourceid.c /link $(LDFLAGS) $(NLTLINKOPTS) $(NLTLIBPATHS)
+
+# Rules to build individual *.lo files from generated *.c files. This
+# applies to:
+#
+# parse.lo
+# opcodes.lo
+#
+parse.lo: parse.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c parse.c
+
+opcodes.lo: opcodes.c
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c opcodes.c
+# <>
+
+# Rule to build the Win32 resources object file.
+#
+!IF $(USE_RC)!=0
+# <>
+$(LIBRESOBJS): $(TOP)\src\sqlite3.rc $(SQLITE3H) $(TOP)\VERSION
+ echo #ifndef SQLITE_RESOURCE_VERSION > sqlite3rc.h
+ for /F %%V in ('type "$(TOP)\VERSION"') do ( \
+ echo #define SQLITE_RESOURCE_VERSION %%V \
+ | $(TCLSH_CMD) $(TOP)\tool\replace.tcl exact . ^, >> sqlite3rc.h \
+ )
+ echo #endif >> sqlite3rc.h
+ $(LTRCOMPILE) -fo $(LIBRESOBJS) $(TOP)\src\sqlite3.rc
+# <>
+!ENDIF
+
+# <>
+# Rules to build individual *.lo files from files in the src directory.
+#
+alter.lo: $(TOP)\src\alter.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\alter.c
+
+analyze.lo: $(TOP)\src\analyze.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\analyze.c
+
+attach.lo: $(TOP)\src\attach.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\attach.c
+
+auth.lo: $(TOP)\src\auth.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\auth.c
+
+backup.lo: $(TOP)\src\backup.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\backup.c
+
+bitvec.lo: $(TOP)\src\bitvec.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\bitvec.c
+
+btmutex.lo: $(TOP)\src\btmutex.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\btmutex.c
+
+btree.lo: $(TOP)\src\btree.c $(HDR) $(TOP)\src\pager.h
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\btree.c
+
+build.lo: $(TOP)\src\build.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\build.c
+
+callback.lo: $(TOP)\src\callback.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\callback.c
+
+complete.lo: $(TOP)\src\complete.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\complete.c
+
+ctime.lo: $(TOP)\src\ctime.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\ctime.c
+
+date.lo: $(TOP)\src\date.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\date.c
+
+dbpage.lo: $(TOP)\src\dbpage.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\dbpage.c
+
+dbstat.lo: $(TOP)\src\dbstat.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\dbstat.c
+
+delete.lo: $(TOP)\src\delete.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\delete.c
+
+expr.lo: $(TOP)\src\expr.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\expr.c
+
+fault.lo: $(TOP)\src\fault.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\fault.c
+
+fkey.lo: $(TOP)\src\fkey.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\fkey.c
+
+func.lo: $(TOP)\src\func.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\func.c
+
+global.lo: $(TOP)\src\global.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\global.c
+
+hash.lo: $(TOP)\src\hash.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\hash.c
+
+insert.lo: $(TOP)\src\insert.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\insert.c
+
+legacy.lo: $(TOP)\src\legacy.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\legacy.c
+
+loadext.lo: $(TOP)\src\loadext.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\loadext.c
+
+main.lo: $(TOP)\src\main.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\main.c
+
+malloc.lo: $(TOP)\src\malloc.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\malloc.c
+
+mem0.lo: $(TOP)\src\mem0.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\mem0.c
+
+mem1.lo: $(TOP)\src\mem1.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\mem1.c
+
+mem2.lo: $(TOP)\src\mem2.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\mem2.c
+
+mem3.lo: $(TOP)\src\mem3.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\mem3.c
+
+mem5.lo: $(TOP)\src\mem5.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\mem5.c
+
+memdb.lo: $(TOP)\src\memdb.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\memdb.c
+
+memjournal.lo: $(TOP)\src\memjournal.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\memjournal.c
+
+mutex.lo: $(TOP)\src\mutex.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\mutex.c
+
+mutex_noop.lo: $(TOP)\src\mutex_noop.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\mutex_noop.c
+
+mutex_unix.lo: $(TOP)\src\mutex_unix.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\mutex_unix.c
+
+mutex_w32.lo: $(TOP)\src\mutex_w32.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\mutex_w32.c
+
+notify.lo: $(TOP)\src\notify.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\notify.c
+
+pager.lo: $(TOP)\src\pager.c $(HDR) $(TOP)\src\pager.h
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\pager.c
+
+pcache.lo: $(TOP)\src\pcache.c $(HDR) $(TOP)\src\pcache.h
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\pcache.c
+
+pcache1.lo: $(TOP)\src\pcache1.c $(HDR) $(TOP)\src\pcache.h
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\pcache1.c
+
+os.lo: $(TOP)\src\os.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\os.c
+
+os_unix.lo: $(TOP)\src\os_unix.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\os_unix.c
+
+os_win.lo: $(TOP)\src\os_win.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\os_win.c
+
+pragma.lo: $(TOP)\src\pragma.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\pragma.c
+
+prepare.lo: $(TOP)\src\prepare.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\prepare.c
+
+printf.lo: $(TOP)\src\printf.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\printf.c
+
+random.lo: $(TOP)\src\random.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\random.c
+
+resolve.lo: $(TOP)\src\resolve.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\resolve.c
+
+rowset.lo: $(TOP)\src\rowset.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\rowset.c
+
+select.lo: $(TOP)\src\select.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\select.c
+
+status.lo: $(TOP)\src\status.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\status.c
+
+table.lo: $(TOP)\src\table.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\table.c
+
+threads.lo: $(TOP)\src\threads.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\threads.c
+
+tokenize.lo: $(TOP)\src\tokenize.c keywordhash.h $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\tokenize.c
+
+treeview.lo: $(TOP)\src\treeview.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\treeview.c
+
+trigger.lo: $(TOP)\src\trigger.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\trigger.c
+
+update.lo: $(TOP)\src\update.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\update.c
+
+upsert.lo: $(TOP)\src\upsert.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\upsert.c
+
+utf.lo: $(TOP)\src\utf.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\utf.c
+
+util.lo: $(TOP)\src\util.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\util.c
+
+vacuum.lo: $(TOP)\src\vacuum.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\vacuum.c
+
+vdbe.lo: $(TOP)\src\vdbe.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\vdbe.c
+
+vdbeapi.lo: $(TOP)\src\vdbeapi.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\vdbeapi.c
+
+vdbeaux.lo: $(TOP)\src\vdbeaux.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\vdbeaux.c
+
+vdbeblob.lo: $(TOP)\src\vdbeblob.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\vdbeblob.c
+
+vdbemem.lo: $(TOP)\src\vdbemem.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\vdbemem.c
+
+vdbesort.lo: $(TOP)\src\vdbesort.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\vdbesort.c
+
+vdbetrace.lo: $(TOP)\src\vdbetrace.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\vdbetrace.c
+
+vtab.lo: $(TOP)\src\vtab.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\vtab.c
+
+wal.lo: $(TOP)\src\wal.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\wal.c
+
+walker.lo: $(TOP)\src\walker.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\walker.c
+
+where.lo: $(TOP)\src\where.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\where.c
+
+wherecode.lo: $(TOP)\src\wherecode.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\wherecode.c
+
+whereexpr.lo: $(TOP)\src\whereexpr.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\whereexpr.c
+
+window.lo: $(TOP)\src\window.c $(HDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) -c $(TOP)\src\window.c
+
+tclsqlite.lo: $(TOP)\src\tclsqlite.c $(HDR) $(SQLITE_TCL_DEP)
+ $(LTCOMPILE) $(NO_WARN) -DUSE_TCL_STUBS=1 -DBUILD_sqlite -I$(TCLINCDIR) -c $(TOP)\src\tclsqlite.c
+
+tclsqlite-shell.lo: $(TOP)\src\tclsqlite.c $(HDR) $(SQLITE_TCL_DEP)
+ $(LTCOMPILE) $(NO_WARN) -DTCLSH -DBUILD_sqlite -I$(TCLINCDIR) -c $(TOP)\src\tclsqlite.c
+
+tclsqlite3.exe: tclsqlite-shell.lo $(SQLITE3C) $(SQLITE3H) $(LIBRESOBJS)
+ $(LTLINK) $(SQLITE3C) /link $(LDFLAGS) $(LTLINKOPTS) $(TCLLIBPATHS) $(LTLIBPATHS) /OUT:$@ tclsqlite-shell.lo $(LIBRESOBJS) $(TCLLIBS) $(LTLIBS) $(TLIBS)
+
+# Rules to build opcodes.c and opcodes.h
+#
+opcodes.c: opcodes.h $(TOP)\tool\mkopcodec.tcl
+ $(TCLSH_CMD) $(TOP)\tool\mkopcodec.tcl opcodes.h > opcodes.c
+
+opcodes.h: parse.h $(TOP)\src\vdbe.c $(TOP)\tool\mkopcodeh.tcl
+ type parse.h $(TOP)\src\vdbe.c | $(TCLSH_CMD) $(TOP)\tool\mkopcodeh.tcl > opcodes.h
+
+# Rules to build parse.c and parse.h - the outputs of lemon.
+#
+parse.h: parse.c
+
+parse.c: $(TOP)\src\parse.y lemon.exe
+ del /Q parse.y parse.h parse.h.temp 2>NUL
+ copy $(TOP)\src\parse.y .
+ .\lemon.exe $(REQ_FEATURE_FLAGS) $(OPT_FEATURE_FLAGS) $(EXT_FEATURE_FLAGS) $(OPTS) parse.y
+
+$(SQLITE3H): $(TOP)\src\sqlite.h.in $(TOP)\manifest mksourceid.exe $(TOP)\VERSION
+ $(TCLSH_CMD) $(TOP)\tool\mksqlite3h.tcl $(TOP:\=/) > $(SQLITE3H) $(MKSQLITE3H_ARGS)
+
+sqlite3ext.h: .target_source
+!IF $(USE_STDCALL)!=0 || $(FOR_WIN10)!=0
+ type tsrc\sqlite3ext.h | $(TCLSH_CMD) $(TOP)\tool\replace.tcl regsub "\(\*\)" "(SQLITE_CALLBACK *)" \
+ | $(TCLSH_CMD) $(TOP)\tool\replace.tcl regsub "\(\*" "(SQLITE_APICALL *" > sqlite3ext.h
+ copy /Y sqlite3ext.h tsrc\sqlite3ext.h
+!ELSE
+ copy /Y tsrc\sqlite3ext.h sqlite3ext.h
+!ENDIF
+
+mkkeywordhash.exe: $(TOP)\tool\mkkeywordhash.c
+ $(BCC) $(NO_WARN) -Fe$@ $(REQ_FEATURE_FLAGS) $(OPT_FEATURE_FLAGS) $(EXT_FEATURE_FLAGS) $(OPTS) \
+ $(TOP)\tool\mkkeywordhash.c /link $(LDFLAGS) $(NLTLINKOPTS) $(NLTLIBPATHS)
+
+keywordhash.h: $(TOP)\tool\mkkeywordhash.c mkkeywordhash.exe
+ .\mkkeywordhash.exe > keywordhash.h
+
+# Source files that go into making shell.c
+SHELL_SRC = \
+ $(TOP)\src\shell.c.in \
+ $(TOP)\ext\misc\appendvfs.c \
+ $(TOP)\ext\misc\shathree.c \
+ $(TOP)\ext\misc\fileio.c \
+ $(TOP)\ext\misc\completion.c \
+ $(TOP)\ext\expert\sqlite3expert.c \
+ $(TOP)\ext\expert\sqlite3expert.h \
+ $(TOP)\ext\misc\memtrace.c \
+ $(TOP)\src\test_windirent.c
+
+# If use of zlib is enabled, add the "zipfile.c" source file.
+#
+!IF $(USE_ZLIB)!=0
+SHELL_SRC = $(SHELL_SRC) $(TOP)\ext\misc\sqlar.c
+SHELL_SRC = $(SHELL_SRC) $(TOP)\ext\misc\zipfile.c
+!ENDIF
+
+shell.c: $(SHELL_SRC) $(TOP)\tool\mkshellc.tcl
+ $(TCLSH_CMD) $(TOP)\tool\mkshellc.tcl > shell.c
+
+zlib:
+ pushd $(ZLIBDIR) && $(MAKE) /f win32\Makefile.msc clean $(ZLIBLIB) && popd
+
+# Rules to build the extension objects.
+#
+icu.lo: $(TOP)\ext\icu\icu.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) $(NO_WARN) -DSQLITE_CORE -c $(TOP)\ext\icu\icu.c
+
+fts2.lo: $(TOP)\ext\fts2\fts2.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) $(NO_WARN) -DSQLITE_CORE -c $(TOP)\ext\fts2\fts2.c
+
+fts2_hash.lo: $(TOP)\ext\fts2\fts2_hash.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) $(NO_WARN) -DSQLITE_CORE -c $(TOP)\ext\fts2\fts2_hash.c
+
+fts2_icu.lo: $(TOP)\ext\fts2\fts2_icu.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) $(NO_WARN) -DSQLITE_CORE -c $(TOP)\ext\fts2\fts2_icu.c
+
+fts2_porter.lo: $(TOP)\ext\fts2\fts2_porter.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) $(NO_WARN) -DSQLITE_CORE -c $(TOP)\ext\fts2\fts2_porter.c
+
+fts2_tokenizer.lo: $(TOP)\ext\fts2\fts2_tokenizer.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) $(NO_WARN) -DSQLITE_CORE -c $(TOP)\ext\fts2\fts2_tokenizer.c
+
+fts2_tokenizer1.lo: $(TOP)\ext\fts2\fts2_tokenizer1.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) $(NO_WARN) -DSQLITE_CORE -c $(TOP)\ext\fts2\fts2_tokenizer1.c
+
+fts3.lo: $(TOP)\ext\fts3\fts3.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) $(NO_WARN) -DSQLITE_CORE -c $(TOP)\ext\fts3\fts3.c
+
+fts3_aux.lo: $(TOP)\ext\fts3\fts3_aux.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) $(NO_WARN) -DSQLITE_CORE -c $(TOP)\ext\fts3\fts3_aux.c
+
+fts3_expr.lo: $(TOP)\ext\fts3\fts3_expr.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) $(NO_WARN) -DSQLITE_CORE -c $(TOP)\ext\fts3\fts3_expr.c
+
+fts3_hash.lo: $(TOP)\ext\fts3\fts3_hash.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) $(NO_WARN) -DSQLITE_CORE -c $(TOP)\ext\fts3\fts3_hash.c
+
+fts3_icu.lo: $(TOP)\ext\fts3\fts3_icu.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) $(NO_WARN) -DSQLITE_CORE -c $(TOP)\ext\fts3\fts3_icu.c
+
+fts3_snippet.lo: $(TOP)\ext\fts3\fts3_snippet.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) $(NO_WARN) -DSQLITE_CORE -c $(TOP)\ext\fts3\fts3_snippet.c
+
+fts3_porter.lo: $(TOP)\ext\fts3\fts3_porter.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) $(NO_WARN) -DSQLITE_CORE -c $(TOP)\ext\fts3\fts3_porter.c
+
+fts3_tokenizer.lo: $(TOP)\ext\fts3\fts3_tokenizer.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) $(NO_WARN) -DSQLITE_CORE -c $(TOP)\ext\fts3\fts3_tokenizer.c
+
+fts3_tokenizer1.lo: $(TOP)\ext\fts3\fts3_tokenizer1.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) $(NO_WARN) -DSQLITE_CORE -c $(TOP)\ext\fts3\fts3_tokenizer1.c
+
+fts3_tokenize_vtab.lo: $(TOP)\ext\fts3\fts3_tokenize_vtab.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) $(NO_WARN) -DSQLITE_CORE -c $(TOP)\ext\fts3\fts3_tokenize_vtab.c
+
+fts3_unicode.lo: $(TOP)\ext\fts3\fts3_unicode.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) $(NO_WARN) -DSQLITE_CORE -c $(TOP)\ext\fts3\fts3_unicode.c
+
+fts3_unicode2.lo: $(TOP)\ext\fts3\fts3_unicode2.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) $(NO_WARN) -DSQLITE_CORE -c $(TOP)\ext\fts3\fts3_unicode2.c
+
+fts3_write.lo: $(TOP)\ext\fts3\fts3_write.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) $(NO_WARN) -DSQLITE_CORE -c $(TOP)\ext\fts3\fts3_write.c
+
+json1.lo: $(TOP)\ext\misc\json1.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) $(NO_WARN) -DSQLITE_CORE -c $(TOP)\ext\misc\json1.c
+
+stmt.lo: $(TOP)\ext\misc\stmt.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) $(NO_WARN) -DSQLITE_CORE -c $(TOP)\ext\misc\stmt.c
+
+rtree.lo: $(TOP)\ext\rtree\rtree.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) $(NO_WARN) -DSQLITE_CORE -c $(TOP)\ext\rtree\rtree.c
+
+sqlite3session.lo: $(TOP)\ext\session\sqlite3session.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) $(NO_WARN) -DSQLITE_CORE -c $(TOP)\ext\session\sqlite3session.c
+
+# FTS5 things
+#
+FTS5_SRC = \
+ $(TOP)\ext\fts5\fts5.h \
+ $(TOP)\ext\fts5\fts5Int.h \
+ $(TOP)\ext\fts5\fts5_aux.c \
+ $(TOP)\ext\fts5\fts5_buffer.c \
+ $(TOP)\ext\fts5\fts5_main.c \
+ $(TOP)\ext\fts5\fts5_config.c \
+ $(TOP)\ext\fts5\fts5_expr.c \
+ $(TOP)\ext\fts5\fts5_hash.c \
+ $(TOP)\ext\fts5\fts5_index.c \
+ fts5parse.c fts5parse.h \
+ $(TOP)\ext\fts5\fts5_storage.c \
+ $(TOP)\ext\fts5\fts5_tokenize.c \
+ $(TOP)\ext\fts5\fts5_unicode2.c \
+ $(TOP)\ext\fts5\fts5_varint.c \
+ $(TOP)\ext\fts5\fts5_vocab.c
+
+LSM1_SRC = \
+ $(TOP)\ext\lsm1\lsm.h \
+ $(TOP)\ext\lsm1\lsmInt.h \
+ $(TOP)\ext\lsm1\lsm_ckpt.c \
+ $(TOP)\ext\lsm1\lsm_file.c \
+ $(TOP)\ext\lsm1\lsm_log.c \
+ $(TOP)\ext\lsm1\lsm_main.c \
+ $(TOP)\ext\lsm1\lsm_mem.c \
+ $(TOP)\ext\lsm1\lsm_mutex.c \
+ $(TOP)\ext\lsm1\lsm_shared.c \
+ $(TOP)\ext\lsm1\lsm_sorted.c \
+ $(TOP)\ext\lsm1\lsm_str.c \
+ $(TOP)\ext\lsm1\lsm_tree.c \
+ $(TOP)\ext\lsm1\lsm_unix.c \
+ $(TOP)\ext\lsm1\lsm_varint.c \
+ $(TOP)\ext\lsm1\lsm_vtab.c \
+ $(TOP)\ext\lsm1\lsm_win32.c
+
+fts5parse.c: $(TOP)\ext\fts5\fts5parse.y lemon.exe
+ copy $(TOP)\ext\fts5\fts5parse.y .
+ del /Q fts5parse.h 2>NUL
+ .\lemon.exe $(REQ_FEATURE_FLAGS) $(OPT_FEATURE_FLAGS) $(EXT_FEATURE_FLAGS) $(OPTS) fts5parse.y
+
+fts5parse.h: fts5parse.c
+
+fts5.c: $(FTS5_SRC)
+ $(TCLSH_CMD) $(TOP)\ext\fts5\tool\mkfts5c.tcl
+ copy $(TOP)\ext\fts5\fts5.h .
+
+lsm1.c: $(LSM1_SRC)
+ $(TCLSH_CMD) $(TOP)\ext\lsm1\tool\mklsm1c.tcl
+ copy $(TOP)\ext\lsm1\lsm.h .
+
+fts5.lo: fts5.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(CORE_COMPILE_OPTS) $(NO_WARN) -DSQLITE_CORE -c fts5.c
+
+fts5_ext.lo: fts5.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) $(NO_WARN) -c fts5.c
+
+fts5.dll: fts5_ext.lo
+ $(LD) $(LDFLAGS) $(LTLINKOPTS) $(LTLIBPATHS) /DLL /OUT:$@ fts5_ext.lo
+
+sqlite3rbu.lo: $(TOP)\ext\rbu\sqlite3rbu.c $(HDR) $(EXTHDR)
+ $(LTCOMPILE) -DSQLITE_CORE -c $(TOP)\ext\rbu\sqlite3rbu.c
+
+# Rules to build the 'testfixture' application.
+#
+# If using the amalgamation, use sqlite3.c directly to build the test
+# fixture. Otherwise link against libsqlite3.lib. (This distinction is
+# necessary because the test fixture requires non-API symbols which are
+# hidden when the library is built via the amalgamation).
+#
+TESTFIXTURE_FLAGS = -DTCLSH_INIT_PROC=sqlite3TestInit -DSQLITE_TEST=1 -DSQLITE_CRASH_TEST=1
+TESTFIXTURE_FLAGS = $(TESTFIXTURE_FLAGS) -DSQLITE_SERVER=1 -DSQLITE_PRIVATE=""
+TESTFIXTURE_FLAGS = $(TESTFIXTURE_FLAGS) -DSQLITE_CORE $(NO_WARN)
+TESTFIXTURE_FLAGS = $(TESTFIXTURE_FLAGS) -DSQLITE_SERIES_CONSTRAINT_VERIFY=1
+TESTFIXTURE_FLAGS = $(TESTFIXTURE_FLAGS) -DSQLITE_DEFAULT_PAGE_SIZE=1024
+TESTFIXTURE_FLAGS = $(TESTFIXTURE_FLAGS) -DSQLITE_ENABLE_STMTVTAB=1
+TESTFIXTURE_FLAGS = $(TESTFIXTURE_FLAGS) -DSQLITE_ENABLE_DBPAGE_VTAB=1
+TESTFIXTURE_FLAGS = $(TESTFIXTURE_FLAGS) -DSQLITE_ENABLE_JSON1=1
+TESTFIXTURE_FLAGS = $(TESTFIXTURE_FLAGS) -DSQLITE_ENABLE_DESERIALIZE=1
+TESTFIXTURE_FLAGS = $(TESTFIXTURE_FLAGS) $(TEST_CCONV_OPTS)
+
+TESTFIXTURE_SRC0 = $(TESTEXT) $(TESTSRC2)
+TESTFIXTURE_SRC1 = $(TESTEXT) $(SQLITE3C)
+!IF $(USE_AMALGAMATION)==0
+TESTFIXTURE_SRC = $(TESTSRC) $(TOP)\src\tclsqlite.c $(TESTFIXTURE_SRC0)
+!ELSE
+TESTFIXTURE_SRC = $(TESTSRC) $(TOP)\src\tclsqlite.c $(TESTFIXTURE_SRC1)
+!ENDIF
+
+!IF $(USE_STDCALL)!=0 || $(FOR_WIN10)!=0
+sqlite_tclDecls.h:
+ echo #ifndef SQLITE_TCLAPI > $(SQLITETCLDECLSH)
+ echo # define SQLITE_TCLAPI >> $(SQLITETCLDECLSH)
+ echo #endif >> $(SQLITETCLDECLSH)
+ type "$(TCLINCDIR)\tclDecls.h" \
+ | $(TCLSH_CMD) $(TOP)\tool\replace.tcl regsub "^(EXTERN(?: CONST\d+?)?\s+?[^\(]*?\s+?)Tcl_" "\1 SQLITE_TCLAPI Tcl_" \
+ | $(TCLSH_CMD) $(TOP)\tool\replace.tcl regsub "^(EXTERN\s+?(?:void|VOID)\s+?)TclFreeObj" "\1 SQLITE_TCLAPI TclFreeObj" \
+ | $(TCLSH_CMD) $(TOP)\tool\replace.tcl regsub "\(\*tcl_" "(SQLITE_TCLAPI *tcl_" \
+ | $(TCLSH_CMD) $(TOP)\tool\replace.tcl regsub "\(\*tclFreeObj" "(SQLITE_TCLAPI *tclFreeObj" \
+ | $(TCLSH_CMD) $(TOP)\tool\replace.tcl regsub "\(\*" "(SQLITE_TCLAPI *" >> $(SQLITETCLDECLSH)
+
+sqlite_tcl.h:
+ type "$(TCLINCDIR)\tcl.h" | $(TCLSH_CMD) $(TOP)\tool\replace.tcl exact tclDecls.h sqlite_tclDecls.h \
+ | $(TCLSH_CMD) $(TOP)\tool\replace.tcl regsub "typedef (.*?)\(Tcl_" "typedef \1 (SQLITE_TCLAPI Tcl_" \
+ | $(TCLSH_CMD) $(TOP)\tool\replace.tcl exact "void (*freeProc)" "void (SQLITE_TCLAPI *freeProc)" \
+ | $(TCLSH_CMD) $(TOP)\tool\replace.tcl exact "Tcl_HashEntry *(*findProc)" "Tcl_HashEntry *(SQLITE_TCLAPI *findProc)" \
+ | $(TCLSH_CMD) $(TOP)\tool\replace.tcl exact "Tcl_HashEntry *(*createProc)" "Tcl_HashEntry *(SQLITE_TCLAPI *createProc)" >> $(SQLITETCLH)
+!ENDIF
+
+testfixture.exe: $(TESTFIXTURE_SRC) $(TESTFIXTURE_DEP) $(SQLITE3H) $(LIBRESOBJS) $(HDR) $(SQLITE_TCL_DEP)
+ $(LTLINK) -DSQLITE_NO_SYNC=1 $(TESTFIXTURE_FLAGS) \
+ -DBUILD_sqlite -I$(TCLINCDIR) \
+ $(TESTFIXTURE_SRC) \
+ /link $(LDFLAGS) $(LTLINKOPTS) $(TCLLIBPATHS) $(LTLIBPATHS) $(LIBRESOBJS) $(TCLLIBS) $(LTLIBS) $(TLIBS)
+
+extensiontest: testfixture.exe testloadext.dll
+ @set PATH=$(LIBTCLPATH);$(PATH)
+ .\testfixture.exe $(TOP)\test\loadext.test $(TESTOPTS)
+
+coretestprogs: $(TESTPROGS)
+
+testprogs: coretestprogs srcck1.exe fuzzcheck.exe sessionfuzz.exe
+
+fulltest: $(TESTPROGS) fuzztest
+ @set PATH=$(LIBTCLPATH);$(PATH)
+ .\testfixture.exe $(TOP)\test\all.test $(TESTOPTS)
+
+soaktest: $(TESTPROGS)
+ @set PATH=$(LIBTCLPATH);$(PATH)
+ .\testfixture.exe $(TOP)\test\all.test -soak=1 $(TESTOPTS)
+
+fulltestonly: $(TESTPROGS) fuzztest
+ @set PATH=$(LIBTCLPATH);$(PATH)
+ .\testfixture.exe $(TOP)\test\full.test
+
+queryplantest: testfixture.exe shell
+ @set PATH=$(LIBTCLPATH);$(PATH)
+ .\testfixture.exe $(TOP)\test\permutations.test queryplanner $(TESTOPTS)
+
+fuzztest: fuzzcheck.exe
+ .\fuzzcheck.exe $(FUZZDATA)
+
+fastfuzztest: fuzzcheck.exe
+ .\fuzzcheck.exe --limit-mem 100M $(FUZZDATA)
+
+# Minimal testing that runs in less than 3 minutes (on a fast machine)
+#
+quicktest: testfixture.exe sourcetest
+ @set PATH=$(LIBTCLPATH);$(PATH)
+ .\testfixture.exe $(TOP)\test\extraquick.test $(TESTOPTS)
+
+# This is the common case. Run many tests that do not take too long,
+# including fuzzcheck, sqlite3_analyzer, and sqldiff tests.
+#
+test: $(TESTPROGS) sourcetest fastfuzztest
+ @set PATH=$(LIBTCLPATH);$(PATH)
+ .\testfixture.exe $(TOP)\test\veryquick.test $(TESTOPTS)
+
+smoketest: $(TESTPROGS)
+ @set PATH=$(LIBTCLPATH);$(PATH)
+ .\testfixture.exe $(TOP)\test\main.test $(TESTOPTS)
+
+sqlite3_analyzer.c: $(SQLITE3C) $(SQLITE3H) $(TOP)\src\tclsqlite.c $(TOP)\tool\spaceanal.tcl $(TOP)\tool\mkccode.tcl $(TOP)\tool\sqlite3_analyzer.c.in $(SQLITE_TCL_DEP)
+ $(TCLSH_CMD) $(TOP)\tool\mkccode.tcl $(TOP)\tool\sqlite3_analyzer.c.in > $@
+
+sqlite3_analyzer.exe: sqlite3_analyzer.c $(LIBRESOBJS)
+ $(LTLINK) $(NO_WARN) -DBUILD_sqlite -I$(TCLINCDIR) sqlite3_analyzer.c \
+ /link $(LDFLAGS) $(LTLINKOPTS) $(TCLLIBPATHS) $(LTLIBPATHS) $(LIBRESOBJS) $(TCLLIBS) $(LTLIBS) $(TLIBS)
+
+sqltclsh.c: sqlite3.c $(TOP)\src\tclsqlite.c $(TOP)\tool\sqltclsh.tcl $(TOP)\ext\misc\appendvfs.c $(TOP)\tool\mkccode.tcl $(TOP)\tool\sqltclsh.c.in
+ $(TCLSH_CMD) $(TOP)\tool\mkccode.tcl $(TOP)\tool\sqltclsh.c.in >sqltclsh.c
+
+sqltclsh.exe: sqltclsh.c $(SHELL_CORE_DEP) $(LIBRESOBJS)
+ $(LTLINK) $(NO_WARN) -DBUILD_sqlite -I$(TCLINCDIR) sqltclsh.c \
+ /link $(LDFLAGS) $(LTLINKOPTS) $(TCLLIBPATHS) $(LTLIBPATHS) $(LIBRESOBJS) $(TCLLIBS) $(LTLIBS) $(TLIBS)
+
+sqlite3_expert.exe: $(SQLITE3C) $(TOP)\ext\expert\sqlite3expert.h $(TOP)\ext\expert\sqlite3expert.c $(TOP)\ext\expert\expert.c
+ $(LTLINK) $(NO_WARN) $(TOP)\ext\expert\sqlite3expert.c $(TOP)\ext\expert\expert.c $(SQLITE3C) $(TLIBS)
+
+CHECKER_DEPS =\
+ $(TOP)/tool/mkccode.tcl \
+ sqlite3.c \
+ $(TOP)/src/tclsqlite.c \
+ $(TOP)/ext/repair/sqlite3_checker.tcl \
+ $(TOP)/ext/repair/checkindex.c \
+ $(TOP)/ext/repair/checkfreelist.c \
+ $(TOP)/ext/misc/btreeinfo.c \
+ $(TOP)/ext/repair/sqlite3_checker.c.in
+
+sqlite3_checker.c: $(CHECKER_DEPS)
+ $(TCLSH_CMD) $(TOP)\tool\mkccode.tcl $(TOP)\ext\repair\sqlite3_checker.c.in > $@
+
+sqlite3_checker.exe: sqlite3_checker.c $(LIBRESOBJS)
+ $(LTLINK) $(NO_WARN) -DBUILD_sqlite -I$(TCLINCDIR) sqlite3_checker.c \
+ /link $(LDFLAGS) $(LTLINKOPTS) $(TCLLIBPATHS) $(LTLIBPATHS) $(LIBRESOBJS) $(TCLLIBS) $(LTLIBS) $(TLIBS)
+
+dbdump.exe: $(TOP)\ext\misc\dbdump.c $(SQLITE3C) $(SQLITE3H) $(LIBRESOBJS)
+ $(LTLINK) $(NO_WARN) -DDBDUMP_STANDALONE $(TOP)\ext\misc\dbdump.c $(SQLITE3C) \
+ /link $(LDFLAGS) $(LTLINKOPTS) $(LTLIBPATHS) $(LIBRESOBJS) $(LTLIBS)
+
+testloadext.lo: $(TOP)\src\test_loadext.c $(SQLITE3H)
+ $(LTCOMPILE) $(NO_WARN) -c $(TOP)\src\test_loadext.c
+
+testloadext.dll: testloadext.lo
+ $(LD) $(LDFLAGS) $(LTLINKOPTS) $(LTLIBPATHS) /DLL /OUT:$@ testloadext.lo
+
+dbtotxt.exe: $(TOP)\tool\dbtotxt.c
+ $(LTLINK) $(NO_WARN) $(TOP)\tool\dbtotxt.c /link $(LDFLAGS) $(LTLINKOPTS)
+
+showdb.exe: $(TOP)\tool\showdb.c $(SQLITE3C) $(SQLITE3H)
+ $(LTLINK) $(NO_WARN) -DSQLITE_THREADSAFE=0 -DSQLITE_OMIT_LOAD_EXTENSION \
+ $(TOP)\tool\showdb.c $(SQLITE3C) /link $(LDFLAGS) $(LTLINKOPTS)
+
+showstat4.exe: $(TOP)\tool\showstat4.c $(SQLITE3C) $(SQLITE3H)
+ $(LTLINK) $(NO_WARN) -DSQLITE_THREADSAFE=0 -DSQLITE_OMIT_LOAD_EXTENSION \
+ $(TOP)\tool\showstat4.c $(SQLITE3C) /link $(LDFLAGS) $(LTLINKOPTS)
+
+showjournal.exe: $(TOP)\tool\showjournal.c $(SQLITE3C) $(SQLITE3H)
+ $(LTLINK) $(NO_WARN) -DSQLITE_THREADSAFE=0 -DSQLITE_OMIT_LOAD_EXTENSION \
+ $(TOP)\tool\showjournal.c $(SQLITE3C) /link $(LDFLAGS) $(LTLINKOPTS)
+
+showwal.exe: $(TOP)\tool\showwal.c $(SQLITE3C) $(SQLITE3H)
+ $(LTLINK) $(NO_WARN) -DSQLITE_THREADSAFE=0 -DSQLITE_OMIT_LOAD_EXTENSION \
+ $(TOP)\tool\showwal.c $(SQLITE3C) /link $(LDFLAGS) $(LTLINKOPTS)
+
+showshm.exe: $(TOP)\tool\showshm.c
+ $(LTLINK) $(NO_WARN) $(TOP)\tool\showshm.c /link $(LDFLAGS) $(LTLINKOPTS)
+
+index_usage.exe: $(TOP)\tool\index_usage.c $(SQLITE3C) $(SQLITE3H)
+ $(LTLINK) $(NO_WARN) -DSQLITE_THREADSAFE=0 -DSQLITE_OMIT_LOAD_EXTENSION \
+ $(TOP)\tool\index_usage.c $(SQLITE3C) /link $(LDFLAGS) $(LTLINKOPTS)
+
+changeset.exe: $(TOP)\ext\session\changeset.c $(SQLITE3C) $(SQLITE3H)
+ $(LTLINK) $(NO_WARN) -DSQLITE_THREADSAFE=0 -DSQLITE_OMIT_LOAD_EXTENSION \
+ -DSQLITE_ENABLE_SESSION=1 -DSQLITE_ENABLE_PREUPDATE_HOOK=1 \
+ $(TOP)\ext\session\changeset.c $(SQLITE3C) /link $(LDFLAGS) $(LTLINKOPTS)
+
+changesetfuzz.exe: $(TOP)\ext\session\changesetfuzz.c $(SQLITE3C) $(SQLITE3H)
+ $(LTLINK) $(NO_WARN) -DSQLITE_THREADSAFE=0 -DSQLITE_OMIT_LOAD_EXTENSION \
+ -DSQLITE_ENABLE_SESSION=1 -DSQLITE_ENABLE_PREUPDATE_HOOK=1 \
+ $(TOP)\ext\session\changesetfuzz.c $(SQLITE3C) /link $(LDFLAGS) $(LTLINKOPTS)
+
+fts3view.exe: $(TOP)\ext\fts3\tool\fts3view.c $(SQLITE3C) $(SQLITE3H)
+ $(LTLINK) $(NO_WARN) -DSQLITE_THREADSAFE=0 -DSQLITE_OMIT_LOAD_EXTENSION \
+ $(TOP)\ext\fts3\tool\fts3view.c $(SQLITE3C) /link $(LDFLAGS) $(LTLINKOPTS)
+
+rollback-test.exe: $(TOP)\tool\rollback-test.c $(SQLITE3C) $(SQLITE3H)
+ $(LTLINK) $(NO_WARN) -DSQLITE_THREADSAFE=0 -DSQLITE_OMIT_LOAD_EXTENSION \
+ $(TOP)\tool\rollback-test.c $(SQLITE3C) /link $(LDFLAGS) $(LTLINKOPTS)
+
+atrc.exe: $(TOP)\test\atrc.c $(SQLITE3C) $(SQLITE3H)
+ $(LTLINK) $(NO_WARN) -DSQLITE_THREADSAFE=0 -DSQLITE_OMIT_LOAD_EXTENSION \
+ $(TOP)\test\atrc.c $(SQLITE3C) /link $(LDFLAGS) $(LTLINKOPTS)
+
+LogEst.exe: $(TOP)\tool\logest.c $(SQLITE3H)
+ $(LTLINK) $(NO_WARN) $(TOP)\tool\LogEst.c /link $(LDFLAGS) $(LTLINKOPTS)
+
+wordcount.exe: $(TOP)\test\wordcount.c $(SQLITE3C) $(SQLITE3H)
+ $(LTLINK) $(NO_WARN) -DSQLITE_THREADSAFE=0 -DSQLITE_OMIT_LOAD_EXTENSION \
+ $(TOP)\test\wordcount.c $(SQLITE3C) /link $(LDFLAGS) $(LTLINKOPTS)
+
+speedtest1.exe: $(TOP)\test\speedtest1.c $(SQLITE3C) $(SQLITE3H)
+ $(LTLINK) $(NO_WARN) $(ST_COMPILE_OPTS) -DSQLITE_OMIT_LOAD_EXTENSION \
+ $(TOP)\test\speedtest1.c $(SQLITE3C) /link $(LDFLAGS) $(LTLINKOPTS)
+
+kvtest.exe: $(TOP)\test\kvtest.c $(SQLITE3C) $(SQLITE3H)
+ $(LTLINK) $(NO_WARN) $(KV_COMPILE_OPTS) \
+ $(TOP)\test\kvtest.c $(SQLITE3C) /link $(LDFLAGS) $(LTLINKOPTS)
+
+rbu.exe: $(TOP)\ext\rbu\rbu.c $(TOP)\ext\rbu\sqlite3rbu.c $(SQLITE3C) $(SQLITE3H)
+ $(LTLINK) $(NO_WARN) -DSQLITE_ENABLE_RBU \
+ $(TOP)\ext\rbu\rbu.c $(SQLITE3C) /link $(LDFLAGS) $(LTLINKOPTS)
+
+LSMDIR=$(TOP)\ext\lsm1
+!INCLUDE $(LSMDIR)\Makefile.msc
+
+moreclean: clean
+ del /Q $(SQLITE3C) $(SQLITE3H) 2>NUL
+# <>
+
+clean:
+ del /Q *.exp *.lo *.ilk *.lib *.obj *.ncb *.pdb *.sdf *.suo 2>NUL
+ del /Q *.bsc *.def *.cod *.da *.bb *.bbg *.vc gmon.out 2>NUL
+ del /Q $(SQLITE3EXE) $(SQLITE3DLL) Replace.exe 2>NUL
+# <>
+ del /Q $(SQLITE3TCLDLL) pkgIndex.tcl 2>NUL
+ del /Q opcodes.c opcodes.h 2>NUL
+ del /Q lemon.* lempar.c parse.* 2>NUL
+ del /Q mksourceid.* mkkeywordhash.* keywordhash.h 2>NUL
+ del /Q notasharedlib.* 2>NUL
+ -rmdir /Q/S .deps 2>NUL
+ -rmdir /Q/S .libs 2>NUL
+ -rmdir /Q/S tsrc 2>NUL
+ del /Q .target_source 2>NUL
+ del /Q tclsqlite3.exe $(SQLITETCLH) $(SQLITETCLDECLSH) 2>NUL
+ del /Q lsm.dll lsmtest.exe 2>NUL
+ del /Q atrc.exe changesetfuzz.exe dbtotxt.exe index_usage.exe 2>NUL
+ del /Q testloadext.dll 2>NUL
+ del /Q testfixture.exe test.db 2>NUL
+ del /Q LogEst.exe fts3view.exe rollback-test.exe showdb.exe dbdump.exe 2>NUL
+ del /Q changeset.exe 2>NUL
+ del /Q showjournal.exe showstat4.exe showwal.exe speedtest1.exe 2>NUL
+ del /Q mptester.exe wordcount.exe rbu.exe srcck1.exe 2>NUL
+ del /Q sqlite3.c sqlite3-*.c sqlite3.h 2>NUL
+ del /Q sqlite3rc.h 2>NUL
+ del /Q shell.c sqlite3ext.h sqlite3session.h 2>NUL
+ del /Q sqlite3_analyzer.exe sqlite3_analyzer.c 2>NUL
+ del /Q sqlite-*-output.vsix 2>NUL
+ del /Q fuzzershell.exe fuzzcheck.exe sqldiff.exe dbhash.exe 2>NUL
+ del /Q sqltclsh.* 2>NUL
+ del /Q dbfuzz.exe sessionfuzz.exe 2>NUL
+ del /Q kvtest.exe ossshell.exe scrub.exe 2>NUL
+ del /Q showshm.exe sqlite3_checker.* sqlite3_expert.exe 2>NUL
+ del /Q fts5.* fts5parse.* 2>NUL
+ del /Q lsm.h lsm1.c 2>NUL
+# <>
diff --git a/BotZone2.8v1 Android/sqlcipher/README.md b/BotZone2.8v1 Android/sqlcipher/README.md
new file mode 100644
index 0000000..7b39234
--- /dev/null
+++ b/BotZone2.8v1 Android/sqlcipher/README.md
@@ -0,0 +1,475 @@
+## SQLCipher
+
+SQLCipher extends the [SQLite](https://www.sqlite.org) database library to add security enhancements that make it more suitable for encrypted local data storage such as on-the-fly encryption, tamper evidence, and key derivation. Based on SQLite, SQLCipher closely tracks SQLite and periodically integrates stable SQLite release features.
+
+SQLCipher is maintained by Zetetic, LLC, and additional information and documentation is available on the official [SQLCipher site](https://www.zetetic.net/sqlcipher/).
+
+## Features
+
+- Fast performance with as little as 5-15% overhead for encryption on many operations
+- 100% of data in the database file is encrypted
+- Good security practices (CBC mode, HMAC, key derivation)
+- Zero-configuration and application level cryptography
+- Algorithms provided by the peer reviewed OpenSSL crypto library.
+- Configurable crypto providers
+
+## Compatibility
+
+SQLCipher maintains database format compatibility within the same major version number so an application on any platform can open databases created by any other application provided the major version of SQLCipher is the same between them. However, major version updates (e.g. from 3.x to 4.x) often include changes to default settings. This means that newer major versions of SQLCipher will not open databases created by older versions without using special settings. For example, SQLCipher 4 introduces many new performance and security enhancements. The new default algorithms, increased KDF iterations, and larger page size mean that SQLCipher 4 will not open databases created by SQLCipher 1.x, 2.x, or 3.x by default. Instead, an application would either need to migrate the older databases to use the new format or enable a special backwards-compatibility mode. The available options are described in SQLCipher's [upgrade documentation](https://discuss.zetetic.net/t/upgrading-to-sqlcipher-4/3283).
+
+SQLCipher is also compatible with standard SQLite databases. When a key is not provided, SQLCipher will behave just like the standard SQLite library. It is also possible to convert from a plaintext database (standard SQLite) to an encrypted SQLCipher database using [ATTACH and the sqlcipher_export() convenience function](https://discuss.zetetic.net/t/how-to-encrypt-a-plaintext-sqlite-database-to-use-sqlcipher-and-avoid-file-is-encrypted-or-is-not-a-database-errors/868).
+
+## Contributions
+
+The SQLCipher team welcomes contributions to the core library. All contributions including pull requests and patches should be based on the `prerelease` branch, and must be accompanied by a [contributor agreement](https://www.zetetic.net/contributions/). For large changes we strongly encourage [discussion](https://discuss.zetetic.net/c/sqlcipher) of the proposed change prior to development and submission.
+
+## Compiling
+
+Building SQLCipher is almost the same as compiling a regular version of
+SQLite with two small exceptions:
+
+ 1. You *must* define `SQLITE_HAS_CODEC` and `SQLITE_TEMP_STORE=2` when building sqlcipher.
+ 2. If compiling against the default OpenSSL crypto provider, you will need to link libcrypto
+
+Example Static linking (replace /opt/local/lib with the path to libcrypto.a). Note in this
+example, `--enable-tempstore=yes` is setting `SQLITE_TEMP_STORE=2` for the build.
+
+ $ ./configure --enable-tempstore=yes CFLAGS="-DSQLITE_HAS_CODEC" \
+ LDFLAGS="/opt/local/lib/libcrypto.a"
+ $ make
+
+Example Dynamic linking
+
+ $ ./configure --enable-tempstore=yes CFLAGS="-DSQLITE_HAS_CODEC" \
+ LDFLAGS="-lcrypto"
+ $ make
+
+## Encrypting a database
+
+To specify an encryption passphrase for the database via the SQL interface you
+use a pragma. The passphrase you enter is passed through PBKDF2 key derivation to
+obtain the encryption key for the database
+
+ PRAGMA key = 'passphrase';
+
+Alternately, you can specify an exact byte sequence using a blob literal. If you
+use this method it is your responsibility to ensure that the data you provide is a
+64 character hex string, which will be converted directly to 32 bytes (256 bits) of
+key data without key derivation.
+
+ PRAGMA key = "x'2DD29CA851E7B56E4697B0E1F08507293D761A05CE4D1B628663F411A8086D99'";
+
+To encrypt a database programmatically you can use the `sqlite3_key` function.
+The data provided in `pKey` is converted to an encryption key according to the
+same rules as `PRAGMA key`.
+
+ int sqlite3_key(sqlite3 *db, const void *pKey, int nKey);
+
+`PRAGMA key` or `sqlite3_key` should be called as the first operation when a database is open.
+
+## Changing a database key
+
+To change the encryption passphrase for an existing database you may use the rekey pragma
+after you've supplied the correct database password;
+
+ PRAGMA key = 'passphrase'; -- start with the existing database passphrase
+ PRAGMA rekey = 'new-passphrase'; -- rekey will reencrypt with the new passphrase
+
+The hex rekey pragma may be used to rekey to a specific binary value
+
+ PRAGMA rekey = "x'2DD29CA851E7B56E4697B0E1F08507293D761A05CE4D1B628663F411A8086D99'";
+
+This can be accomplished programmatically by using sqlite3_rekey;
+
+ sqlite3_rekey(sqlite3 *db, const void *pKey, int nKey)
+
+## Support
+
+The primary avenue for support and discussions is the SQLCipher discuss site:
+
+https://discuss.zetetic.net/c/sqlcipher
+
+Issues or support questions on using SQLCipher should be entered into the
+GitHub Issue tracker:
+
+https://github.com/sqlcipher/sqlcipher/issues
+
+Please DO NOT post issues, support questions, or other problems to blog
+posts about SQLCipher as we do not monitor them frequently.
+
+If you are using SQLCipher in your own software please let us know at
+support@zetetic.net!
+
+## License
+
+Copyright (c) 2016, ZETETIC LLC
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ * Neither the name of the ZETETIC LLC nor the
+ names of its contributors may be used to endorse or promote products
+ derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY ZETETIC LLC ''AS IS'' AND ANY
+EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL ZETETIC LLC BE LIABLE FOR ANY
+DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Begin SQLite README.md
+
+
SQLite Source Repository
+
+This repository contains the complete source code for the
+[SQLite database engine](https://sqlite.org/). Some test scripts
+are also included. However, many other test scripts
+and most of the documentation are managed separately.
+
+## Version Control
+
+SQLite sources are managed using the
+[Fossil](https://www.fossil-scm.org/), a distributed version control system
+that was specifically designed and written to support SQLite development.
+The [Fossil repository](https://sqlite.org/src/timeline) contains the urtext.
+
+If you are reading this on GitHub or some other Git repository or service,
+then you are looking at a mirror. The names of check-ins and
+other artifacts in a Git mirror are different from the official
+names for those objects. The offical names for check-ins are
+found in a footer on the check-in comment for authorized mirrors.
+The official check-in name can also be seen in the `manifest.uuid` file
+in the root of the tree. Always use the official name, not the
+Git-name, when communicating about an SQLite check-in.
+
+If you pulled your SQLite source code from a secondary source and want to
+verify its integrity, there are hints on how to do that in the
+[Verifying Code Authenticity](#vauth) section below.
+
+## Obtaining The Code
+
+If you do not want to use Fossil, you can download tarballs or ZIP
+archives or [SQLite archives](https://sqlite.org/cli.html#sqlar) as follows:
+
+ * Lastest trunk check-in as
+ [Tarball](https://www.sqlite.org/src/tarball/sqlite.tar.gz),
+ [ZIP-archive](https://www.sqlite.org/src/zip/sqlite.zip), or
+ [SQLite-archive](https://www.sqlite.org/src/sqlar/sqlite.sqlar).
+
+ * Latest release as
+ [Tarball](https://www.sqlite.org/src/tarball/sqlite.tar.gz?r=release),
+ [ZIP-archive](https://www.sqlite.org/src/zip/sqlite.zip?r=release), or
+ [SQLite-archive](https://www.sqlite.org/src/sqlar/sqlite.sqlar?r=release).
+
+ * For other check-ins, substitute an appropriate branch name or
+ tag or hash prefix in place of "release" in the URLs of the previous
+ bullet. Or browse the [timeline](https://www.sqlite.org/src/timeline)
+ to locate the check-in desired, click on its information page link,
+ then click on the "Tarball" or "ZIP Archive" links on the information
+ page.
+
+If you do want to use Fossil to check out the source tree,
+first install Fossil version 2.0 or later.
+(Source tarballs and precompiled binaries available
+[here](https://www.fossil-scm.org/fossil/uv/download.html). Fossil is
+a stand-alone program. To install, simply download or build the single
+executable file and put that file someplace on your $PATH.)
+Then run commands like this:
+
+ mkdir ~/sqlite
+ cd ~/sqlite
+ fossil clone https://www.sqlite.org/src sqlite.fossil
+ fossil open sqlite.fossil
+
+After setting up a repository using the steps above, you can always
+update to the lastest version using:
+
+ fossil update trunk ;# latest trunk check-in
+ fossil update release ;# latest official release
+
+Or type "fossil ui" to get a web-based user interface.
+
+## Compiling
+
+First create a directory in which to place
+the build products. It is recommended, but not required, that the
+build directory be separate from the source directory. Cd into the
+build directory and then from the build directory run the configure
+script found at the root of the source tree. Then run "make".
+
+For example:
+
+ tar xzf sqlite.tar.gz ;# Unpack the source tree into "sqlite"
+ mkdir bld ;# Build will occur in a sibling directory
+ cd bld ;# Change to the build directory
+ ../sqlite/configure ;# Run the configure script
+ make ;# Run the makefile.
+ make sqlite3.c ;# Build the "amalgamation" source file
+ make test ;# Run some tests (requires Tcl)
+
+See the makefile for additional targets.
+
+The configure script uses autoconf 2.61 and libtool. If the configure
+script does not work out for you, there is a generic makefile named
+"Makefile.linux-gcc" in the top directory of the source tree that you
+can copy and edit to suit your needs. Comments on the generic makefile
+show what changes are needed.
+
+## Using MSVC
+
+On Windows, all applicable build products can be compiled with MSVC.
+First open the command prompt window associated with the desired compiler
+version (e.g. "Developer Command Prompt for VS2013"). Next, use NMAKE
+with the provided "Makefile.msc" to build one of the supported targets.
+
+For example:
+
+ mkdir bld
+ cd bld
+ nmake /f Makefile.msc TOP=..\sqlite
+ nmake /f Makefile.msc sqlite3.c TOP=..\sqlite
+ nmake /f Makefile.msc sqlite3.dll TOP=..\sqlite
+ nmake /f Makefile.msc sqlite3.exe TOP=..\sqlite
+ nmake /f Makefile.msc test TOP=..\sqlite
+
+There are several build options that can be set via the NMAKE command
+line. For example, to build for WinRT, simply add "FOR_WINRT=1" argument
+to the "sqlite3.dll" command line above. When debugging into the SQLite
+code, adding the "DEBUG=1" argument to one of the above command lines is
+recommended.
+
+SQLite does not require [Tcl](http://www.tcl.tk/) to run, but a Tcl installation
+is required by the makefiles (including those for MSVC). SQLite contains
+a lot of generated code and Tcl is used to do much of that code generation.
+
+## Source Code Tour
+
+Most of the core source files are in the **src/** subdirectory. The
+**src/** folder also contains files used to build the "testfixture" test
+harness. The names of the source files used by "testfixture" all begin
+with "test".
+The **src/** also contains the "shell.c" file
+which is the main program for the "sqlite3.exe"
+[command-line shell](https://sqlite.org/cli.html) and
+the "tclsqlite.c" file which implements the
+[Tcl bindings](https://sqlite.org/tclsqlite.html) for SQLite.
+(Historical note: SQLite began as a Tcl
+extension and only later escaped to the wild as an independent library.)
+
+Test scripts and programs are found in the **test/** subdirectory.
+Addtional test code is found in other source repositories.
+See [How SQLite Is Tested](http://www.sqlite.org/testing.html) for
+additional information.
+
+The **ext/** subdirectory contains code for extensions. The
+Full-text search engine is in **ext/fts3**. The R-Tree engine is in
+**ext/rtree**. The **ext/misc** subdirectory contains a number of
+smaller, single-file extensions, such as a REGEXP operator.
+
+The **tool/** subdirectory contains various scripts and programs used
+for building generated source code files or for testing or for generating
+accessory programs such as "sqlite3_analyzer(.exe)".
+
+### Generated Source Code Files
+
+Several of the C-language source files used by SQLite are generated from
+other sources rather than being typed in manually by a programmer. This
+section will summarize those automatically-generated files. To create all
+of the automatically-generated files, simply run "make target_source".
+The "target_source" make target will create a subdirectory "tsrc/" and
+fill it with all the source files needed to build SQLite, both
+manually-edited files and automatically-generated files.
+
+The SQLite interface is defined by the **sqlite3.h** header file, which is
+generated from src/sqlite.h.in, ./manifest.uuid, and ./VERSION. The
+[Tcl script](http://www.tcl.tk) at tool/mksqlite3h.tcl does the conversion.
+The manifest.uuid file contains the SHA3 hash of the particular check-in
+and is used to generate the SQLITE\_SOURCE\_ID macro. The VERSION file
+contains the current SQLite version number. The sqlite3.h header is really
+just a copy of src/sqlite.h.in with the source-id and version number inserted
+at just the right spots. Note that comment text in the sqlite3.h file is
+used to generate much of the SQLite API documentation. The Tcl scripts
+used to generate that documentation are in a separate source repository.
+
+The SQL language parser is **parse.c** which is generate from a grammar in
+the src/parse.y file. The conversion of "parse.y" into "parse.c" is done
+by the [lemon](./doc/lemon.html) LALR(1) parser generator. The source code
+for lemon is at tool/lemon.c. Lemon uses the tool/lempar.c file as a
+template for generating its parser.
+Lemon also generates the **parse.h** header file, at the same time it
+generates parse.c.
+
+The **opcodes.h** header file contains macros that define the numbers
+corresponding to opcodes in the "VDBE" virtual machine. The opcodes.h
+file is generated by the scanning the src/vdbe.c source file. The
+Tcl script at ./mkopcodeh.tcl does this scan and generates opcodes.h.
+A second Tcl script, ./mkopcodec.tcl, then scans opcodes.h to generate
+the **opcodes.c** source file, which contains a reverse mapping from
+opcode-number to opcode-name that is used for EXPLAIN output.
+
+The **keywordhash.h** header file contains the definition of a hash table
+that maps SQL language keywords (ex: "CREATE", "SELECT", "INDEX", etc.) into
+the numeric codes used by the parse.c parser. The keywordhash.h file is
+generated by a C-language program at tool mkkeywordhash.c.
+
+The **pragma.h** header file contains various definitions used to parse
+and implement the PRAGMA statements. The header is generated by a
+script **tool/mkpragmatab.tcl**. If you want to add a new PRAGMA, edit
+the **tool/mkpragmatab.tcl** file to insert the information needed by the
+parser for your new PRAGMA, then run the script to regenerate the
+**pragma.h** header file.
+
+### The Amalgamation
+
+All of the individual C source code and header files (both manually-edited
+and automatically-generated) can be combined into a single big source file
+**sqlite3.c** called "the amalgamation". The amalgamation is the recommended
+way of using SQLite in a larger application. Combining all individual
+source code files into a single big source code file allows the C compiler
+to perform more cross-procedure analysis and generate better code. SQLite
+runs about 5% faster when compiled from the amalgamation versus when compiled
+from individual source files.
+
+The amalgamation is generated from the tool/mksqlite3c.tcl Tcl script.
+First, all of the individual source files must be gathered into the tsrc/
+subdirectory (using the equivalent of "make target_source") then the
+tool/mksqlite3c.tcl script is run to copy them all together in just the
+right order while resolving internal "#include" references.
+
+The amalgamation source file is more than 200K lines long. Some symbolic
+debuggers (most notably MSVC) are unable to deal with files longer than 64K
+lines. To work around this, a separate Tcl script, tool/split-sqlite3c.tcl,
+can be run on the amalgamation to break it up into a single small C file
+called **sqlite3-all.c** that does #include on about seven other files
+named **sqlite3-1.c**, **sqlite3-2.c**, ..., **sqlite3-7.c**. In this way,
+all of the source code is contained within a single translation unit so
+that the compiler can do extra cross-procedure optimization, but no
+individual source file exceeds 32K lines in length.
+
+## How It All Fits Together
+
+SQLite is modular in design.
+See the [architectural description](http://www.sqlite.org/arch.html)
+for details. Other documents that are useful in
+(helping to understand how SQLite works include the
+[file format](http://www.sqlite.org/fileformat2.html) description,
+the [virtual machine](http://www.sqlite.org/opcode.html) that runs
+prepared statements, the description of
+[how transactions work](http://www.sqlite.org/atomiccommit.html), and
+the [overview of the query planner](http://www.sqlite.org/optoverview.html).
+
+Years of effort have gone into optimizating SQLite, both
+for small size and high performance. And optimizations tend to result in
+complex code. So there is a lot of complexity in the current SQLite
+implementation. It will not be the easiest library in the world to hack.
+
+Key files:
+
+ * **sqlite.h.in** - This file defines the public interface to the SQLite
+ library. Readers will need to be familiar with this interface before
+ trying to understand how the library works internally.
+
+ * **sqliteInt.h** - this header file defines many of the data objects
+ used internally by SQLite. In addition to "sqliteInt.h", some
+ subsystems have their own header files.
+
+ * **parse.y** - This file describes the LALR(1) grammar that SQLite uses
+ to parse SQL statements, and the actions that are taken at each step
+ in the parsing process.
+
+ * **vdbe.c** - This file implements the virtual machine that runs
+ prepared statements. There are various helper files whose names
+ begin with "vdbe". The VDBE has access to the vdbeInt.h header file
+ which defines internal data objects. The rest of SQLite interacts
+ with the VDBE through an interface defined by vdbe.h.
+
+ * **where.c** - This file (together with its helper files named
+ by "where*.c") analyzes the WHERE clause and generates
+ virtual machine code to run queries efficiently. This file is
+ sometimes called the "query optimizer". It has its own private
+ header file, whereInt.h, that defines data objects used internally.
+
+ * **btree.c** - This file contains the implementation of the B-Tree
+ storage engine used by SQLite. The interface to the rest of the system
+ is defined by "btree.h". The "btreeInt.h" header defines objects
+ used internally by btree.c and not published to the rest of the system.
+
+ * **pager.c** - This file contains the "pager" implementation, the
+ module that implements transactions. The "pager.h" header file
+ defines the interface between pager.c and the rest of the system.
+
+ * **os_unix.c** and **os_win.c** - These two files implement the interface
+ between SQLite and the underlying operating system using the run-time
+ pluggable VFS interface.
+
+ * **shell.c.in** - This file is not part of the core SQLite library. This
+ is the file that, when linked against sqlite3.a, generates the
+ "sqlite3.exe" command-line shell. The "shell.c.in" file is transformed
+ into "shell.c" as part of the build process.
+
+ * **tclsqlite.c** - This file implements the Tcl bindings for SQLite. It
+ is not part of the core SQLite library. But as most of the tests in this
+ repository are written in Tcl, the Tcl language bindings are important.
+
+ * **test*.c** - Files in the src/ folder that begin with "test" go into
+ building the "testfixture.exe" program. The testfixture.exe program is
+ an enhanced Tcl shell. The testfixture.exe program runs scripts in the
+ test/ folder to validate the core SQLite code. The testfixture program
+ (and some other test programs too) is build and run when you type
+ "make test".
+
+ * **ext/misc/json1.c** - This file implements the various JSON functions
+ that are build into SQLite.
+
+There are many other source files. Each has a succinct header comment that
+describes its purpose and role within the larger system.
+
+
+## Verifying Code Authenticity
+
+If you obtained an SQLite source tree from a secondary source, such as a
+GitHub mirror, and you want to verify that it has not been altered, there
+are a couple of ways to do that.
+
+If you have a release version of SQLite, and you are using the
+`sqlite3.c` amalgamation, then SHA3-256 hashes for the amalgamation are
+available in the [change log](https://www.sqlite.org/changes.html) on
+the official website. After building the `sqlite3.c` file, you can check
+that it is authentic by comparing the hash. This does not ensure that the
+test scripts are unaltered, but it does validate the deliverable part of
+the code and the verification process only involves computing and
+comparing a single hash.
+
+For versions other than an official release, or if you are building the
+`sqlite3.c` amalgamation using non-standard build options, the verification
+process is a little more involved. The `manifest` file at the root directory
+of the source tree
+contains either a SHA3-256 hash (for newer files) or a SHA1 hash (for
+older files) for every source file in the repository. You can write a script
+to extracts hashes from `manifest` and verifies the hashes against the
+corresponding files in the source tree. The SHA3-256 hash of the `manifest`
+file itself is the official name of the version of the source tree that you
+have. The `manifest.uuid` file should contain the SHA3-256 hash of the
+`manifest` file. If all of the above hash comparisons are correct, then
+you can be confident that your source tree is authentic and unadulterated.
+
+The format of the `manifest` file should be mostly self-explanatory, but
+if you want details, they are available
+[here](https://fossil-scm.org/fossil/doc/trunk/www/fileformat.wiki#manifest).
+
+## Contacts
+
+The main SQLite website is [http://www.sqlite.org/](http://www.sqlite.org/)
+with geographically distributed backups at
+[http://www2.sqlite.org/](http://www2.sqlite.org) and
+[http://www3.sqlite.org/](http://www3.sqlite.org).
diff --git a/BotZone2.8v1 Android/sqlcipher/VERSION b/BotZone2.8v1 Android/sqlcipher/VERSION
new file mode 100644
index 0000000..a72fd67
--- /dev/null
+++ b/BotZone2.8v1 Android/sqlcipher/VERSION
@@ -0,0 +1 @@
+3.28.0
diff --git a/BotZone2.8v1 Android/sqlcipher/aclocal.m4 b/BotZone2.8v1 Android/sqlcipher/aclocal.m4
new file mode 100644
index 0000000..79ce10f
--- /dev/null
+++ b/BotZone2.8v1 Android/sqlcipher/aclocal.m4
@@ -0,0 +1,9043 @@
+# generated automatically by aclocal 1.16.1 -*- Autoconf -*-
+
+# Copyright (C) 1996-2018 Free Software Foundation, Inc.
+
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+m4_ifndef([AC_CONFIG_MACRO_DIRS], [m4_defun([_AM_CONFIG_MACRO_DIRS], [])m4_defun([AC_CONFIG_MACRO_DIRS], [_AM_CONFIG_MACRO_DIRS($@)])])
+# libtool.m4 - Configure libtool for the host system. -*-Autoconf-*-
+#
+# Copyright (C) 1996-2001, 2003-2015 Free Software Foundation, Inc.
+# Written by Gordon Matzigkeit, 1996
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+m4_define([_LT_COPYING], [dnl
+# Copyright (C) 2014 Free Software Foundation, Inc.
+# This is free software; see the source for copying conditions. There is NO
+# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+# GNU Libtool is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of of the License, or
+# (at your option) any later version.
+#
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program or library that is built
+# using GNU Libtool, you may include this file under the same
+# distribution terms that you use for the rest of that program.
+#
+# GNU Libtool is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+])
+
+# serial 58 LT_INIT
+
+
+# LT_PREREQ(VERSION)
+# ------------------
+# Complain and exit if this libtool version is less that VERSION.
+m4_defun([LT_PREREQ],
+[m4_if(m4_version_compare(m4_defn([LT_PACKAGE_VERSION]), [$1]), -1,
+ [m4_default([$3],
+ [m4_fatal([Libtool version $1 or higher is required],
+ 63)])],
+ [$2])])
+
+
+# _LT_CHECK_BUILDDIR
+# ------------------
+# Complain if the absolute build directory name contains unusual characters
+m4_defun([_LT_CHECK_BUILDDIR],
+[case `pwd` in
+ *\ * | *\ *)
+ AC_MSG_WARN([Libtool does not cope well with whitespace in `pwd`]) ;;
+esac
+])
+
+
+# LT_INIT([OPTIONS])
+# ------------------
+AC_DEFUN([LT_INIT],
+[AC_PREREQ([2.62])dnl We use AC_PATH_PROGS_FEATURE_CHECK
+AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl
+AC_BEFORE([$0], [LT_LANG])dnl
+AC_BEFORE([$0], [LT_OUTPUT])dnl
+AC_BEFORE([$0], [LTDL_INIT])dnl
+m4_require([_LT_CHECK_BUILDDIR])dnl
+
+dnl Autoconf doesn't catch unexpanded LT_ macros by default:
+m4_pattern_forbid([^_?LT_[A-Z_]+$])dnl
+m4_pattern_allow([^(_LT_EOF|LT_DLGLOBAL|LT_DLLAZY_OR_NOW|LT_MULTI_MODULE)$])dnl
+dnl aclocal doesn't pull ltoptions.m4, ltsugar.m4, or ltversion.m4
+dnl unless we require an AC_DEFUNed macro:
+AC_REQUIRE([LTOPTIONS_VERSION])dnl
+AC_REQUIRE([LTSUGAR_VERSION])dnl
+AC_REQUIRE([LTVERSION_VERSION])dnl
+AC_REQUIRE([LTOBSOLETE_VERSION])dnl
+m4_require([_LT_PROG_LTMAIN])dnl
+
+_LT_SHELL_INIT([SHELL=${CONFIG_SHELL-/bin/sh}])
+
+dnl Parse OPTIONS
+_LT_SET_OPTIONS([$0], [$1])
+
+# This can be used to rebuild libtool when needed
+LIBTOOL_DEPS=$ltmain
+
+# Always use our own libtool.
+LIBTOOL='$(SHELL) $(top_builddir)/libtool'
+AC_SUBST(LIBTOOL)dnl
+
+_LT_SETUP
+
+# Only expand once:
+m4_define([LT_INIT])
+])# LT_INIT
+
+# Old names:
+AU_ALIAS([AC_PROG_LIBTOOL], [LT_INIT])
+AU_ALIAS([AM_PROG_LIBTOOL], [LT_INIT])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_PROG_LIBTOOL], [])
+dnl AC_DEFUN([AM_PROG_LIBTOOL], [])
+
+
+# _LT_PREPARE_CC_BASENAME
+# -----------------------
+m4_defun([_LT_PREPARE_CC_BASENAME], [
+# Calculate cc_basename. Skip known compiler wrappers and cross-prefix.
+func_cc_basename ()
+{
+ for cc_temp in @S|@*""; do
+ case $cc_temp in
+ compile | *[[\\/]]compile | ccache | *[[\\/]]ccache ) ;;
+ distcc | *[[\\/]]distcc | purify | *[[\\/]]purify ) ;;
+ \-*) ;;
+ *) break;;
+ esac
+ done
+ func_cc_basename_result=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"`
+}
+])# _LT_PREPARE_CC_BASENAME
+
+
+# _LT_CC_BASENAME(CC)
+# -------------------
+# It would be clearer to call AC_REQUIREs from _LT_PREPARE_CC_BASENAME,
+# but that macro is also expanded into generated libtool script, which
+# arranges for $SED and $ECHO to be set by different means.
+m4_defun([_LT_CC_BASENAME],
+[m4_require([_LT_PREPARE_CC_BASENAME])dnl
+AC_REQUIRE([_LT_DECL_SED])dnl
+AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH])dnl
+func_cc_basename $1
+cc_basename=$func_cc_basename_result
+])
+
+
+# _LT_FILEUTILS_DEFAULTS
+# ----------------------
+# It is okay to use these file commands and assume they have been set
+# sensibly after 'm4_require([_LT_FILEUTILS_DEFAULTS])'.
+m4_defun([_LT_FILEUTILS_DEFAULTS],
+[: ${CP="cp -f"}
+: ${MV="mv -f"}
+: ${RM="rm -f"}
+])# _LT_FILEUTILS_DEFAULTS
+
+
+# _LT_SETUP
+# ---------
+m4_defun([_LT_SETUP],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_CANONICAL_BUILD])dnl
+AC_REQUIRE([_LT_PREPARE_SED_QUOTE_VARS])dnl
+AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH])dnl
+
+_LT_DECL([], [PATH_SEPARATOR], [1], [The PATH separator for the build system])dnl
+dnl
+_LT_DECL([], [host_alias], [0], [The host system])dnl
+_LT_DECL([], [host], [0])dnl
+_LT_DECL([], [host_os], [0])dnl
+dnl
+_LT_DECL([], [build_alias], [0], [The build system])dnl
+_LT_DECL([], [build], [0])dnl
+_LT_DECL([], [build_os], [0])dnl
+dnl
+AC_REQUIRE([AC_PROG_CC])dnl
+AC_REQUIRE([LT_PATH_LD])dnl
+AC_REQUIRE([LT_PATH_NM])dnl
+dnl
+AC_REQUIRE([AC_PROG_LN_S])dnl
+test -z "$LN_S" && LN_S="ln -s"
+_LT_DECL([], [LN_S], [1], [Whether we need soft or hard links])dnl
+dnl
+AC_REQUIRE([LT_CMD_MAX_LEN])dnl
+_LT_DECL([objext], [ac_objext], [0], [Object file suffix (normally "o")])dnl
+_LT_DECL([], [exeext], [0], [Executable file suffix (normally "")])dnl
+dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_CHECK_SHELL_FEATURES])dnl
+m4_require([_LT_PATH_CONVERSION_FUNCTIONS])dnl
+m4_require([_LT_CMD_RELOAD])dnl
+m4_require([_LT_CHECK_MAGIC_METHOD])dnl
+m4_require([_LT_CHECK_SHAREDLIB_FROM_LINKLIB])dnl
+m4_require([_LT_CMD_OLD_ARCHIVE])dnl
+m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl
+m4_require([_LT_WITH_SYSROOT])dnl
+m4_require([_LT_CMD_TRUNCATE])dnl
+
+_LT_CONFIG_LIBTOOL_INIT([
+# See if we are running on zsh, and set the options that allow our
+# commands through without removal of \ escapes INIT.
+if test -n "\${ZSH_VERSION+set}"; then
+ setopt NO_GLOB_SUBST
+fi
+])
+if test -n "${ZSH_VERSION+set}"; then
+ setopt NO_GLOB_SUBST
+fi
+
+_LT_CHECK_OBJDIR
+
+m4_require([_LT_TAG_COMPILER])dnl
+
+case $host_os in
+aix3*)
+ # AIX sometimes has problems with the GCC collect2 program. For some
+ # reason, if we set the COLLECT_NAMES environment variable, the problems
+ # vanish in a puff of smoke.
+ if test set != "${COLLECT_NAMES+set}"; then
+ COLLECT_NAMES=
+ export COLLECT_NAMES
+ fi
+ ;;
+esac
+
+# Global variables:
+ofile=libtool
+can_build_shared=yes
+
+# All known linkers require a '.a' archive for static linking (except MSVC,
+# which needs '.lib').
+libext=a
+
+with_gnu_ld=$lt_cv_prog_gnu_ld
+
+old_CC=$CC
+old_CFLAGS=$CFLAGS
+
+# Set sane defaults for various variables
+test -z "$CC" && CC=cc
+test -z "$LTCC" && LTCC=$CC
+test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS
+test -z "$LD" && LD=ld
+test -z "$ac_objext" && ac_objext=o
+
+_LT_CC_BASENAME([$compiler])
+
+# Only perform the check for file, if the check method requires it
+test -z "$MAGIC_CMD" && MAGIC_CMD=file
+case $deplibs_check_method in
+file_magic*)
+ if test "$file_magic_cmd" = '$MAGIC_CMD'; then
+ _LT_PATH_MAGIC
+ fi
+ ;;
+esac
+
+# Use C for the default configuration in the libtool script
+LT_SUPPORTED_TAG([CC])
+_LT_LANG_C_CONFIG
+_LT_LANG_DEFAULT_CONFIG
+_LT_CONFIG_COMMANDS
+])# _LT_SETUP
+
+
+# _LT_PREPARE_SED_QUOTE_VARS
+# --------------------------
+# Define a few sed substitution that help us do robust quoting.
+m4_defun([_LT_PREPARE_SED_QUOTE_VARS],
+[# Backslashify metacharacters that are still active within
+# double-quoted strings.
+sed_quote_subst='s/\([["`$\\]]\)/\\\1/g'
+
+# Same as above, but do not quote variable references.
+double_quote_subst='s/\([["`\\]]\)/\\\1/g'
+
+# Sed substitution to delay expansion of an escaped shell variable in a
+# double_quote_subst'ed string.
+delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g'
+
+# Sed substitution to delay expansion of an escaped single quote.
+delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g'
+
+# Sed substitution to avoid accidental globbing in evaled expressions
+no_glob_subst='s/\*/\\\*/g'
+])
+
+# _LT_PROG_LTMAIN
+# ---------------
+# Note that this code is called both from 'configure', and 'config.status'
+# now that we use AC_CONFIG_COMMANDS to generate libtool. Notably,
+# 'config.status' has no value for ac_aux_dir unless we are using Automake,
+# so we pass a copy along to make sure it has a sensible value anyway.
+m4_defun([_LT_PROG_LTMAIN],
+[m4_ifdef([AC_REQUIRE_AUX_FILE], [AC_REQUIRE_AUX_FILE([ltmain.sh])])dnl
+_LT_CONFIG_LIBTOOL_INIT([ac_aux_dir='$ac_aux_dir'])
+ltmain=$ac_aux_dir/ltmain.sh
+])# _LT_PROG_LTMAIN
+
+
+
+# So that we can recreate a full libtool script including additional
+# tags, we accumulate the chunks of code to send to AC_CONFIG_COMMANDS
+# in macros and then make a single call at the end using the 'libtool'
+# label.
+
+
+# _LT_CONFIG_LIBTOOL_INIT([INIT-COMMANDS])
+# ----------------------------------------
+# Register INIT-COMMANDS to be passed to AC_CONFIG_COMMANDS later.
+m4_define([_LT_CONFIG_LIBTOOL_INIT],
+[m4_ifval([$1],
+ [m4_append([_LT_OUTPUT_LIBTOOL_INIT],
+ [$1
+])])])
+
+# Initialize.
+m4_define([_LT_OUTPUT_LIBTOOL_INIT])
+
+
+# _LT_CONFIG_LIBTOOL([COMMANDS])
+# ------------------------------
+# Register COMMANDS to be passed to AC_CONFIG_COMMANDS later.
+m4_define([_LT_CONFIG_LIBTOOL],
+[m4_ifval([$1],
+ [m4_append([_LT_OUTPUT_LIBTOOL_COMMANDS],
+ [$1
+])])])
+
+# Initialize.
+m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS])
+
+
+# _LT_CONFIG_SAVE_COMMANDS([COMMANDS], [INIT_COMMANDS])
+# -----------------------------------------------------
+m4_defun([_LT_CONFIG_SAVE_COMMANDS],
+[_LT_CONFIG_LIBTOOL([$1])
+_LT_CONFIG_LIBTOOL_INIT([$2])
+])
+
+
+# _LT_FORMAT_COMMENT([COMMENT])
+# -----------------------------
+# Add leading comment marks to the start of each line, and a trailing
+# full-stop to the whole comment if one is not present already.
+m4_define([_LT_FORMAT_COMMENT],
+[m4_ifval([$1], [
+m4_bpatsubst([m4_bpatsubst([$1], [^ *], [# ])],
+ [['`$\]], [\\\&])]m4_bmatch([$1], [[!?.]$], [], [.])
+)])
+
+
+
+
+
+# _LT_DECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION], [IS-TAGGED?])
+# -------------------------------------------------------------------
+# CONFIGNAME is the name given to the value in the libtool script.
+# VARNAME is the (base) name used in the configure script.
+# VALUE may be 0, 1 or 2 for a computed quote escaped value based on
+# VARNAME. Any other value will be used directly.
+m4_define([_LT_DECL],
+[lt_if_append_uniq([lt_decl_varnames], [$2], [, ],
+ [lt_dict_add_subkey([lt_decl_dict], [$2], [libtool_name],
+ [m4_ifval([$1], [$1], [$2])])
+ lt_dict_add_subkey([lt_decl_dict], [$2], [value], [$3])
+ m4_ifval([$4],
+ [lt_dict_add_subkey([lt_decl_dict], [$2], [description], [$4])])
+ lt_dict_add_subkey([lt_decl_dict], [$2],
+ [tagged?], [m4_ifval([$5], [yes], [no])])])
+])
+
+
+# _LT_TAGDECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION])
+# --------------------------------------------------------
+m4_define([_LT_TAGDECL], [_LT_DECL([$1], [$2], [$3], [$4], [yes])])
+
+
+# lt_decl_tag_varnames([SEPARATOR], [VARNAME1...])
+# ------------------------------------------------
+m4_define([lt_decl_tag_varnames],
+[_lt_decl_filter([tagged?], [yes], $@)])
+
+
+# _lt_decl_filter(SUBKEY, VALUE, [SEPARATOR], [VARNAME1..])
+# ---------------------------------------------------------
+m4_define([_lt_decl_filter],
+[m4_case([$#],
+ [0], [m4_fatal([$0: too few arguments: $#])],
+ [1], [m4_fatal([$0: too few arguments: $#: $1])],
+ [2], [lt_dict_filter([lt_decl_dict], [$1], [$2], [], lt_decl_varnames)],
+ [3], [lt_dict_filter([lt_decl_dict], [$1], [$2], [$3], lt_decl_varnames)],
+ [lt_dict_filter([lt_decl_dict], $@)])[]dnl
+])
+
+
+# lt_decl_quote_varnames([SEPARATOR], [VARNAME1...])
+# --------------------------------------------------
+m4_define([lt_decl_quote_varnames],
+[_lt_decl_filter([value], [1], $@)])
+
+
+# lt_decl_dquote_varnames([SEPARATOR], [VARNAME1...])
+# ---------------------------------------------------
+m4_define([lt_decl_dquote_varnames],
+[_lt_decl_filter([value], [2], $@)])
+
+
+# lt_decl_varnames_tagged([SEPARATOR], [VARNAME1...])
+# ---------------------------------------------------
+m4_define([lt_decl_varnames_tagged],
+[m4_assert([$# <= 2])dnl
+_$0(m4_quote(m4_default([$1], [[, ]])),
+ m4_ifval([$2], [[$2]], [m4_dquote(lt_decl_tag_varnames)]),
+ m4_split(m4_normalize(m4_quote(_LT_TAGS)), [ ]))])
+m4_define([_lt_decl_varnames_tagged],
+[m4_ifval([$3], [lt_combine([$1], [$2], [_], $3)])])
+
+
+# lt_decl_all_varnames([SEPARATOR], [VARNAME1...])
+# ------------------------------------------------
+m4_define([lt_decl_all_varnames],
+[_$0(m4_quote(m4_default([$1], [[, ]])),
+ m4_if([$2], [],
+ m4_quote(lt_decl_varnames),
+ m4_quote(m4_shift($@))))[]dnl
+])
+m4_define([_lt_decl_all_varnames],
+[lt_join($@, lt_decl_varnames_tagged([$1],
+ lt_decl_tag_varnames([[, ]], m4_shift($@))))dnl
+])
+
+
+# _LT_CONFIG_STATUS_DECLARE([VARNAME])
+# ------------------------------------
+# Quote a variable value, and forward it to 'config.status' so that its
+# declaration there will have the same value as in 'configure'. VARNAME
+# must have a single quote delimited value for this to work.
+m4_define([_LT_CONFIG_STATUS_DECLARE],
+[$1='`$ECHO "$][$1" | $SED "$delay_single_quote_subst"`'])
+
+
+# _LT_CONFIG_STATUS_DECLARATIONS
+# ------------------------------
+# We delimit libtool config variables with single quotes, so when
+# we write them to config.status, we have to be sure to quote all
+# embedded single quotes properly. In configure, this macro expands
+# each variable declared with _LT_DECL (and _LT_TAGDECL) into:
+#
+# ='`$ECHO "$" | $SED "$delay_single_quote_subst"`'
+m4_defun([_LT_CONFIG_STATUS_DECLARATIONS],
+[m4_foreach([_lt_var], m4_quote(lt_decl_all_varnames),
+ [m4_n([_LT_CONFIG_STATUS_DECLARE(_lt_var)])])])
+
+
+# _LT_LIBTOOL_TAGS
+# ----------------
+# Output comment and list of tags supported by the script
+m4_defun([_LT_LIBTOOL_TAGS],
+[_LT_FORMAT_COMMENT([The names of the tagged configurations supported by this script])dnl
+available_tags='_LT_TAGS'dnl
+])
+
+
+# _LT_LIBTOOL_DECLARE(VARNAME, [TAG])
+# -----------------------------------
+# Extract the dictionary values for VARNAME (optionally with TAG) and
+# expand to a commented shell variable setting:
+#
+# # Some comment about what VAR is for.
+# visible_name=$lt_internal_name
+m4_define([_LT_LIBTOOL_DECLARE],
+[_LT_FORMAT_COMMENT(m4_quote(lt_dict_fetch([lt_decl_dict], [$1],
+ [description])))[]dnl
+m4_pushdef([_libtool_name],
+ m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [libtool_name])))[]dnl
+m4_case(m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [value])),
+ [0], [_libtool_name=[$]$1],
+ [1], [_libtool_name=$lt_[]$1],
+ [2], [_libtool_name=$lt_[]$1],
+ [_libtool_name=lt_dict_fetch([lt_decl_dict], [$1], [value])])[]dnl
+m4_ifval([$2], [_$2])[]m4_popdef([_libtool_name])[]dnl
+])
+
+
+# _LT_LIBTOOL_CONFIG_VARS
+# -----------------------
+# Produce commented declarations of non-tagged libtool config variables
+# suitable for insertion in the LIBTOOL CONFIG section of the 'libtool'
+# script. Tagged libtool config variables (even for the LIBTOOL CONFIG
+# section) are produced by _LT_LIBTOOL_TAG_VARS.
+m4_defun([_LT_LIBTOOL_CONFIG_VARS],
+[m4_foreach([_lt_var],
+ m4_quote(_lt_decl_filter([tagged?], [no], [], lt_decl_varnames)),
+ [m4_n([_LT_LIBTOOL_DECLARE(_lt_var)])])])
+
+
+# _LT_LIBTOOL_TAG_VARS(TAG)
+# -------------------------
+m4_define([_LT_LIBTOOL_TAG_VARS],
+[m4_foreach([_lt_var], m4_quote(lt_decl_tag_varnames),
+ [m4_n([_LT_LIBTOOL_DECLARE(_lt_var, [$1])])])])
+
+
+# _LT_TAGVAR(VARNAME, [TAGNAME])
+# ------------------------------
+m4_define([_LT_TAGVAR], [m4_ifval([$2], [$1_$2], [$1])])
+
+
+# _LT_CONFIG_COMMANDS
+# -------------------
+# Send accumulated output to $CONFIG_STATUS. Thanks to the lists of
+# variables for single and double quote escaping we saved from calls
+# to _LT_DECL, we can put quote escaped variables declarations
+# into 'config.status', and then the shell code to quote escape them in
+# for loops in 'config.status'. Finally, any additional code accumulated
+# from calls to _LT_CONFIG_LIBTOOL_INIT is expanded.
+m4_defun([_LT_CONFIG_COMMANDS],
+[AC_PROVIDE_IFELSE([LT_OUTPUT],
+ dnl If the libtool generation code has been placed in $CONFIG_LT,
+ dnl instead of duplicating it all over again into config.status,
+ dnl then we will have config.status run $CONFIG_LT later, so it
+ dnl needs to know what name is stored there:
+ [AC_CONFIG_COMMANDS([libtool],
+ [$SHELL $CONFIG_LT || AS_EXIT(1)], [CONFIG_LT='$CONFIG_LT'])],
+ dnl If the libtool generation code is destined for config.status,
+ dnl expand the accumulated commands and init code now:
+ [AC_CONFIG_COMMANDS([libtool],
+ [_LT_OUTPUT_LIBTOOL_COMMANDS], [_LT_OUTPUT_LIBTOOL_COMMANDS_INIT])])
+])#_LT_CONFIG_COMMANDS
+
+
+# Initialize.
+m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS_INIT],
+[
+
+# The HP-UX ksh and POSIX shell print the target directory to stdout
+# if CDPATH is set.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+sed_quote_subst='$sed_quote_subst'
+double_quote_subst='$double_quote_subst'
+delay_variable_subst='$delay_variable_subst'
+_LT_CONFIG_STATUS_DECLARATIONS
+LTCC='$LTCC'
+LTCFLAGS='$LTCFLAGS'
+compiler='$compiler_DEFAULT'
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+ eval 'cat <<_LTECHO_EOF
+\$[]1
+_LTECHO_EOF'
+}
+
+# Quote evaled strings.
+for var in lt_decl_all_varnames([[ \
+]], lt_decl_quote_varnames); do
+ case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
+ *[[\\\\\\\`\\"\\\$]]*)
+ eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\"" ## exclude from sc_prohibit_nested_quotes
+ ;;
+ *)
+ eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
+ ;;
+ esac
+done
+
+# Double-quote double-evaled strings.
+for var in lt_decl_all_varnames([[ \
+]], lt_decl_dquote_varnames); do
+ case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
+ *[[\\\\\\\`\\"\\\$]]*)
+ eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" ## exclude from sc_prohibit_nested_quotes
+ ;;
+ *)
+ eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
+ ;;
+ esac
+done
+
+_LT_OUTPUT_LIBTOOL_INIT
+])
+
+# _LT_GENERATED_FILE_INIT(FILE, [COMMENT])
+# ------------------------------------
+# Generate a child script FILE with all initialization necessary to
+# reuse the environment learned by the parent script, and make the
+# file executable. If COMMENT is supplied, it is inserted after the
+# '#!' sequence but before initialization text begins. After this
+# macro, additional text can be appended to FILE to form the body of
+# the child script. The macro ends with non-zero status if the
+# file could not be fully written (such as if the disk is full).
+m4_ifdef([AS_INIT_GENERATED],
+[m4_defun([_LT_GENERATED_FILE_INIT],[AS_INIT_GENERATED($@)])],
+[m4_defun([_LT_GENERATED_FILE_INIT],
+[m4_require([AS_PREPARE])]dnl
+[m4_pushdef([AS_MESSAGE_LOG_FD])]dnl
+[lt_write_fail=0
+cat >$1 <<_ASEOF || lt_write_fail=1
+#! $SHELL
+# Generated by $as_me.
+$2
+SHELL=\${CONFIG_SHELL-$SHELL}
+export SHELL
+_ASEOF
+cat >>$1 <<\_ASEOF || lt_write_fail=1
+AS_SHELL_SANITIZE
+_AS_PREPARE
+exec AS_MESSAGE_FD>&1
+_ASEOF
+test 0 = "$lt_write_fail" && chmod +x $1[]dnl
+m4_popdef([AS_MESSAGE_LOG_FD])])])# _LT_GENERATED_FILE_INIT
+
+# LT_OUTPUT
+# ---------
+# This macro allows early generation of the libtool script (before
+# AC_OUTPUT is called), incase it is used in configure for compilation
+# tests.
+AC_DEFUN([LT_OUTPUT],
+[: ${CONFIG_LT=./config.lt}
+AC_MSG_NOTICE([creating $CONFIG_LT])
+_LT_GENERATED_FILE_INIT(["$CONFIG_LT"],
+[# Run this file to recreate a libtool stub with the current configuration.])
+
+cat >>"$CONFIG_LT" <<\_LTEOF
+lt_cl_silent=false
+exec AS_MESSAGE_LOG_FD>>config.log
+{
+ echo
+ AS_BOX([Running $as_me.])
+} >&AS_MESSAGE_LOG_FD
+
+lt_cl_help="\
+'$as_me' creates a local libtool stub from the current configuration,
+for use in further configure time tests before the real libtool is
+generated.
+
+Usage: $[0] [[OPTIONS]]
+
+ -h, --help print this help, then exit
+ -V, --version print version number, then exit
+ -q, --quiet do not print progress messages
+ -d, --debug don't remove temporary files
+
+Report bugs to ."
+
+lt_cl_version="\
+m4_ifset([AC_PACKAGE_NAME], [AC_PACKAGE_NAME ])config.lt[]dnl
+m4_ifset([AC_PACKAGE_VERSION], [ AC_PACKAGE_VERSION])
+configured by $[0], generated by m4_PACKAGE_STRING.
+
+Copyright (C) 2011 Free Software Foundation, Inc.
+This config.lt script is free software; the Free Software Foundation
+gives unlimited permision to copy, distribute and modify it."
+
+while test 0 != $[#]
+do
+ case $[1] in
+ --version | --v* | -V )
+ echo "$lt_cl_version"; exit 0 ;;
+ --help | --h* | -h )
+ echo "$lt_cl_help"; exit 0 ;;
+ --debug | --d* | -d )
+ debug=: ;;
+ --quiet | --q* | --silent | --s* | -q )
+ lt_cl_silent=: ;;
+
+ -*) AC_MSG_ERROR([unrecognized option: $[1]
+Try '$[0] --help' for more information.]) ;;
+
+ *) AC_MSG_ERROR([unrecognized argument: $[1]
+Try '$[0] --help' for more information.]) ;;
+ esac
+ shift
+done
+
+if $lt_cl_silent; then
+ exec AS_MESSAGE_FD>/dev/null
+fi
+_LTEOF
+
+cat >>"$CONFIG_LT" <<_LTEOF
+_LT_OUTPUT_LIBTOOL_COMMANDS_INIT
+_LTEOF
+
+cat >>"$CONFIG_LT" <<\_LTEOF
+AC_MSG_NOTICE([creating $ofile])
+_LT_OUTPUT_LIBTOOL_COMMANDS
+AS_EXIT(0)
+_LTEOF
+chmod +x "$CONFIG_LT"
+
+# configure is writing to config.log, but config.lt does its own redirection,
+# appending to config.log, which fails on DOS, as config.log is still kept
+# open by configure. Here we exec the FD to /dev/null, effectively closing
+# config.log, so it can be properly (re)opened and appended to by config.lt.
+lt_cl_success=:
+test yes = "$silent" &&
+ lt_config_lt_args="$lt_config_lt_args --quiet"
+exec AS_MESSAGE_LOG_FD>/dev/null
+$SHELL "$CONFIG_LT" $lt_config_lt_args || lt_cl_success=false
+exec AS_MESSAGE_LOG_FD>>config.log
+$lt_cl_success || AS_EXIT(1)
+])# LT_OUTPUT
+
+
+# _LT_CONFIG(TAG)
+# ---------------
+# If TAG is the built-in tag, create an initial libtool script with a
+# default configuration from the untagged config vars. Otherwise add code
+# to config.status for appending the configuration named by TAG from the
+# matching tagged config vars.
+m4_defun([_LT_CONFIG],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+_LT_CONFIG_SAVE_COMMANDS([
+ m4_define([_LT_TAG], m4_if([$1], [], [C], [$1]))dnl
+ m4_if(_LT_TAG, [C], [
+ # See if we are running on zsh, and set the options that allow our
+ # commands through without removal of \ escapes.
+ if test -n "${ZSH_VERSION+set}"; then
+ setopt NO_GLOB_SUBST
+ fi
+
+ cfgfile=${ofile}T
+ trap "$RM \"$cfgfile\"; exit 1" 1 2 15
+ $RM "$cfgfile"
+
+ cat <<_LT_EOF >> "$cfgfile"
+#! $SHELL
+# Generated automatically by $as_me ($PACKAGE) $VERSION
+# Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`:
+# NOTE: Changes made to this file will be lost: look at ltmain.sh.
+
+# Provide generalized library-building support services.
+# Written by Gordon Matzigkeit, 1996
+
+_LT_COPYING
+_LT_LIBTOOL_TAGS
+
+# Configured defaults for sys_lib_dlsearch_path munging.
+: \${LT_SYS_LIBRARY_PATH="$configure_time_lt_sys_library_path"}
+
+# ### BEGIN LIBTOOL CONFIG
+_LT_LIBTOOL_CONFIG_VARS
+_LT_LIBTOOL_TAG_VARS
+# ### END LIBTOOL CONFIG
+
+_LT_EOF
+
+ cat <<'_LT_EOF' >> "$cfgfile"
+
+# ### BEGIN FUNCTIONS SHARED WITH CONFIGURE
+
+_LT_PREPARE_MUNGE_PATH_LIST
+_LT_PREPARE_CC_BASENAME
+
+# ### END FUNCTIONS SHARED WITH CONFIGURE
+
+_LT_EOF
+
+ case $host_os in
+ aix3*)
+ cat <<\_LT_EOF >> "$cfgfile"
+# AIX sometimes has problems with the GCC collect2 program. For some
+# reason, if we set the COLLECT_NAMES environment variable, the problems
+# vanish in a puff of smoke.
+if test set != "${COLLECT_NAMES+set}"; then
+ COLLECT_NAMES=
+ export COLLECT_NAMES
+fi
+_LT_EOF
+ ;;
+ esac
+
+ _LT_PROG_LTMAIN
+
+ # We use sed instead of cat because bash on DJGPP gets confused if
+ # if finds mixed CR/LF and LF-only lines. Since sed operates in
+ # text mode, it properly converts lines to CR/LF. This bash problem
+ # is reportedly fixed, but why not run on old versions too?
+ sed '$q' "$ltmain" >> "$cfgfile" \
+ || (rm -f "$cfgfile"; exit 1)
+
+ mv -f "$cfgfile" "$ofile" ||
+ (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
+ chmod +x "$ofile"
+],
+[cat <<_LT_EOF >> "$ofile"
+
+dnl Unfortunately we have to use $1 here, since _LT_TAG is not expanded
+dnl in a comment (ie after a #).
+# ### BEGIN LIBTOOL TAG CONFIG: $1
+_LT_LIBTOOL_TAG_VARS(_LT_TAG)
+# ### END LIBTOOL TAG CONFIG: $1
+_LT_EOF
+])dnl /m4_if
+],
+[m4_if([$1], [], [
+ PACKAGE='$PACKAGE'
+ VERSION='$VERSION'
+ RM='$RM'
+ ofile='$ofile'], [])
+])dnl /_LT_CONFIG_SAVE_COMMANDS
+])# _LT_CONFIG
+
+
+# LT_SUPPORTED_TAG(TAG)
+# ---------------------
+# Trace this macro to discover what tags are supported by the libtool
+# --tag option, using:
+# autoconf --trace 'LT_SUPPORTED_TAG:$1'
+AC_DEFUN([LT_SUPPORTED_TAG], [])
+
+
+# C support is built-in for now
+m4_define([_LT_LANG_C_enabled], [])
+m4_define([_LT_TAGS], [])
+
+
+# LT_LANG(LANG)
+# -------------
+# Enable libtool support for the given language if not already enabled.
+AC_DEFUN([LT_LANG],
+[AC_BEFORE([$0], [LT_OUTPUT])dnl
+m4_case([$1],
+ [C], [_LT_LANG(C)],
+ [C++], [_LT_LANG(CXX)],
+ [Go], [_LT_LANG(GO)],
+ [Java], [_LT_LANG(GCJ)],
+ [Fortran 77], [_LT_LANG(F77)],
+ [Fortran], [_LT_LANG(FC)],
+ [Windows Resource], [_LT_LANG(RC)],
+ [m4_ifdef([_LT_LANG_]$1[_CONFIG],
+ [_LT_LANG($1)],
+ [m4_fatal([$0: unsupported language: "$1"])])])dnl
+])# LT_LANG
+
+
+# _LT_LANG(LANGNAME)
+# ------------------
+m4_defun([_LT_LANG],
+[m4_ifdef([_LT_LANG_]$1[_enabled], [],
+ [LT_SUPPORTED_TAG([$1])dnl
+ m4_append([_LT_TAGS], [$1 ])dnl
+ m4_define([_LT_LANG_]$1[_enabled], [])dnl
+ _LT_LANG_$1_CONFIG($1)])dnl
+])# _LT_LANG
+
+
+m4_ifndef([AC_PROG_GO], [
+# NOTE: This macro has been submitted for inclusion into #
+# GNU Autoconf as AC_PROG_GO. When it is available in #
+# a released version of Autoconf we should remove this #
+# macro and use it instead. #
+m4_defun([AC_PROG_GO],
+[AC_LANG_PUSH(Go)dnl
+AC_ARG_VAR([GOC], [Go compiler command])dnl
+AC_ARG_VAR([GOFLAGS], [Go compiler flags])dnl
+_AC_ARG_VAR_LDFLAGS()dnl
+AC_CHECK_TOOL(GOC, gccgo)
+if test -z "$GOC"; then
+ if test -n "$ac_tool_prefix"; then
+ AC_CHECK_PROG(GOC, [${ac_tool_prefix}gccgo], [${ac_tool_prefix}gccgo])
+ fi
+fi
+if test -z "$GOC"; then
+ AC_CHECK_PROG(GOC, gccgo, gccgo, false)
+fi
+])#m4_defun
+])#m4_ifndef
+
+
+# _LT_LANG_DEFAULT_CONFIG
+# -----------------------
+m4_defun([_LT_LANG_DEFAULT_CONFIG],
+[AC_PROVIDE_IFELSE([AC_PROG_CXX],
+ [LT_LANG(CXX)],
+ [m4_define([AC_PROG_CXX], defn([AC_PROG_CXX])[LT_LANG(CXX)])])
+
+AC_PROVIDE_IFELSE([AC_PROG_F77],
+ [LT_LANG(F77)],
+ [m4_define([AC_PROG_F77], defn([AC_PROG_F77])[LT_LANG(F77)])])
+
+AC_PROVIDE_IFELSE([AC_PROG_FC],
+ [LT_LANG(FC)],
+ [m4_define([AC_PROG_FC], defn([AC_PROG_FC])[LT_LANG(FC)])])
+
+dnl The call to [A][M_PROG_GCJ] is quoted like that to stop aclocal
+dnl pulling things in needlessly.
+AC_PROVIDE_IFELSE([AC_PROG_GCJ],
+ [LT_LANG(GCJ)],
+ [AC_PROVIDE_IFELSE([A][M_PROG_GCJ],
+ [LT_LANG(GCJ)],
+ [AC_PROVIDE_IFELSE([LT_PROG_GCJ],
+ [LT_LANG(GCJ)],
+ [m4_ifdef([AC_PROG_GCJ],
+ [m4_define([AC_PROG_GCJ], defn([AC_PROG_GCJ])[LT_LANG(GCJ)])])
+ m4_ifdef([A][M_PROG_GCJ],
+ [m4_define([A][M_PROG_GCJ], defn([A][M_PROG_GCJ])[LT_LANG(GCJ)])])
+ m4_ifdef([LT_PROG_GCJ],
+ [m4_define([LT_PROG_GCJ], defn([LT_PROG_GCJ])[LT_LANG(GCJ)])])])])])
+
+AC_PROVIDE_IFELSE([AC_PROG_GO],
+ [LT_LANG(GO)],
+ [m4_define([AC_PROG_GO], defn([AC_PROG_GO])[LT_LANG(GO)])])
+
+AC_PROVIDE_IFELSE([LT_PROG_RC],
+ [LT_LANG(RC)],
+ [m4_define([LT_PROG_RC], defn([LT_PROG_RC])[LT_LANG(RC)])])
+])# _LT_LANG_DEFAULT_CONFIG
+
+# Obsolete macros:
+AU_DEFUN([AC_LIBTOOL_CXX], [LT_LANG(C++)])
+AU_DEFUN([AC_LIBTOOL_F77], [LT_LANG(Fortran 77)])
+AU_DEFUN([AC_LIBTOOL_FC], [LT_LANG(Fortran)])
+AU_DEFUN([AC_LIBTOOL_GCJ], [LT_LANG(Java)])
+AU_DEFUN([AC_LIBTOOL_RC], [LT_LANG(Windows Resource)])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_CXX], [])
+dnl AC_DEFUN([AC_LIBTOOL_F77], [])
+dnl AC_DEFUN([AC_LIBTOOL_FC], [])
+dnl AC_DEFUN([AC_LIBTOOL_GCJ], [])
+dnl AC_DEFUN([AC_LIBTOOL_RC], [])
+
+
+# _LT_TAG_COMPILER
+# ----------------
+m4_defun([_LT_TAG_COMPILER],
+[AC_REQUIRE([AC_PROG_CC])dnl
+
+_LT_DECL([LTCC], [CC], [1], [A C compiler])dnl
+_LT_DECL([LTCFLAGS], [CFLAGS], [1], [LTCC compiler flags])dnl
+_LT_TAGDECL([CC], [compiler], [1], [A language specific compiler])dnl
+_LT_TAGDECL([with_gcc], [GCC], [0], [Is the compiler the GNU compiler?])dnl
+
+# If no C compiler was specified, use CC.
+LTCC=${LTCC-"$CC"}
+
+# If no C compiler flags were specified, use CFLAGS.
+LTCFLAGS=${LTCFLAGS-"$CFLAGS"}
+
+# Allow CC to be a program name with arguments.
+compiler=$CC
+])# _LT_TAG_COMPILER
+
+
+# _LT_COMPILER_BOILERPLATE
+# ------------------------
+# Check for compiler boilerplate output or warnings with
+# the simple compiler test code.
+m4_defun([_LT_COMPILER_BOILERPLATE],
+[m4_require([_LT_DECL_SED])dnl
+ac_outfile=conftest.$ac_objext
+echo "$lt_simple_compile_test_code" >conftest.$ac_ext
+eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
+_lt_compiler_boilerplate=`cat conftest.err`
+$RM conftest*
+])# _LT_COMPILER_BOILERPLATE
+
+
+# _LT_LINKER_BOILERPLATE
+# ----------------------
+# Check for linker boilerplate output or warnings with
+# the simple link test code.
+m4_defun([_LT_LINKER_BOILERPLATE],
+[m4_require([_LT_DECL_SED])dnl
+ac_outfile=conftest.$ac_objext
+echo "$lt_simple_link_test_code" >conftest.$ac_ext
+eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
+_lt_linker_boilerplate=`cat conftest.err`
+$RM -r conftest*
+])# _LT_LINKER_BOILERPLATE
+
+# _LT_REQUIRED_DARWIN_CHECKS
+# -------------------------
+m4_defun_once([_LT_REQUIRED_DARWIN_CHECKS],[
+ case $host_os in
+ rhapsody* | darwin*)
+ AC_CHECK_TOOL([DSYMUTIL], [dsymutil], [:])
+ AC_CHECK_TOOL([NMEDIT], [nmedit], [:])
+ AC_CHECK_TOOL([LIPO], [lipo], [:])
+ AC_CHECK_TOOL([OTOOL], [otool], [:])
+ AC_CHECK_TOOL([OTOOL64], [otool64], [:])
+ _LT_DECL([], [DSYMUTIL], [1],
+ [Tool to manipulate archived DWARF debug symbol files on Mac OS X])
+ _LT_DECL([], [NMEDIT], [1],
+ [Tool to change global to local symbols on Mac OS X])
+ _LT_DECL([], [LIPO], [1],
+ [Tool to manipulate fat objects and archives on Mac OS X])
+ _LT_DECL([], [OTOOL], [1],
+ [ldd/readelf like tool for Mach-O binaries on Mac OS X])
+ _LT_DECL([], [OTOOL64], [1],
+ [ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4])
+
+ AC_CACHE_CHECK([for -single_module linker flag],[lt_cv_apple_cc_single_mod],
+ [lt_cv_apple_cc_single_mod=no
+ if test -z "$LT_MULTI_MODULE"; then
+ # By default we will add the -single_module flag. You can override
+ # by either setting the environment variable LT_MULTI_MODULE
+ # non-empty at configure time, or by adding -multi_module to the
+ # link flags.
+ rm -rf libconftest.dylib*
+ echo "int foo(void){return 1;}" > conftest.c
+ echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
+-dynamiclib -Wl,-single_module conftest.c" >&AS_MESSAGE_LOG_FD
+ $LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
+ -dynamiclib -Wl,-single_module conftest.c 2>conftest.err
+ _lt_result=$?
+ # If there is a non-empty error log, and "single_module"
+ # appears in it, assume the flag caused a linker warning
+ if test -s conftest.err && $GREP single_module conftest.err; then
+ cat conftest.err >&AS_MESSAGE_LOG_FD
+ # Otherwise, if the output was created with a 0 exit code from
+ # the compiler, it worked.
+ elif test -f libconftest.dylib && test 0 = "$_lt_result"; then
+ lt_cv_apple_cc_single_mod=yes
+ else
+ cat conftest.err >&AS_MESSAGE_LOG_FD
+ fi
+ rm -rf libconftest.dylib*
+ rm -f conftest.*
+ fi])
+
+ AC_CACHE_CHECK([for -exported_symbols_list linker flag],
+ [lt_cv_ld_exported_symbols_list],
+ [lt_cv_ld_exported_symbols_list=no
+ save_LDFLAGS=$LDFLAGS
+ echo "_main" > conftest.sym
+ LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym"
+ AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])],
+ [lt_cv_ld_exported_symbols_list=yes],
+ [lt_cv_ld_exported_symbols_list=no])
+ LDFLAGS=$save_LDFLAGS
+ ])
+
+ AC_CACHE_CHECK([for -force_load linker flag],[lt_cv_ld_force_load],
+ [lt_cv_ld_force_load=no
+ cat > conftest.c << _LT_EOF
+int forced_loaded() { return 2;}
+_LT_EOF
+ echo "$LTCC $LTCFLAGS -c -o conftest.o conftest.c" >&AS_MESSAGE_LOG_FD
+ $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&AS_MESSAGE_LOG_FD
+ echo "$AR cru libconftest.a conftest.o" >&AS_MESSAGE_LOG_FD
+ $AR cru libconftest.a conftest.o 2>&AS_MESSAGE_LOG_FD
+ echo "$RANLIB libconftest.a" >&AS_MESSAGE_LOG_FD
+ $RANLIB libconftest.a 2>&AS_MESSAGE_LOG_FD
+ cat > conftest.c << _LT_EOF
+int main() { return 0;}
+_LT_EOF
+ echo "$LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a" >&AS_MESSAGE_LOG_FD
+ $LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a 2>conftest.err
+ _lt_result=$?
+ if test -s conftest.err && $GREP force_load conftest.err; then
+ cat conftest.err >&AS_MESSAGE_LOG_FD
+ elif test -f conftest && test 0 = "$_lt_result" && $GREP forced_load conftest >/dev/null 2>&1; then
+ lt_cv_ld_force_load=yes
+ else
+ cat conftest.err >&AS_MESSAGE_LOG_FD
+ fi
+ rm -f conftest.err libconftest.a conftest conftest.c
+ rm -rf conftest.dSYM
+ ])
+ case $host_os in
+ rhapsody* | darwin1.[[012]])
+ _lt_dar_allow_undefined='$wl-undefined ${wl}suppress' ;;
+ darwin1.*)
+ _lt_dar_allow_undefined='$wl-flat_namespace $wl-undefined ${wl}suppress' ;;
+ darwin*) # darwin 5.x on
+ # if running on 10.5 or later, the deployment target defaults
+ # to the OS version, if on x86, and 10.4, the deployment
+ # target defaults to 10.4. Don't you love it?
+ case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in
+ 10.0,*86*-darwin8*|10.0,*-darwin[[91]]*)
+ _lt_dar_allow_undefined='$wl-undefined ${wl}dynamic_lookup' ;;
+ 10.[[012]][[,.]]*)
+ _lt_dar_allow_undefined='$wl-flat_namespace $wl-undefined ${wl}suppress' ;;
+ 10.*)
+ _lt_dar_allow_undefined='$wl-undefined ${wl}dynamic_lookup' ;;
+ esac
+ ;;
+ esac
+ if test yes = "$lt_cv_apple_cc_single_mod"; then
+ _lt_dar_single_mod='$single_module'
+ fi
+ if test yes = "$lt_cv_ld_exported_symbols_list"; then
+ _lt_dar_export_syms=' $wl-exported_symbols_list,$output_objdir/$libname-symbols.expsym'
+ else
+ _lt_dar_export_syms='~$NMEDIT -s $output_objdir/$libname-symbols.expsym $lib'
+ fi
+ if test : != "$DSYMUTIL" && test no = "$lt_cv_ld_force_load"; then
+ _lt_dsymutil='~$DSYMUTIL $lib || :'
+ else
+ _lt_dsymutil=
+ fi
+ ;;
+ esac
+])
+
+
+# _LT_DARWIN_LINKER_FEATURES([TAG])
+# ---------------------------------
+# Checks for linker and compiler features on darwin
+m4_defun([_LT_DARWIN_LINKER_FEATURES],
+[
+ m4_require([_LT_REQUIRED_DARWIN_CHECKS])
+ _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+ _LT_TAGVAR(hardcode_direct, $1)=no
+ _LT_TAGVAR(hardcode_automatic, $1)=yes
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported
+ if test yes = "$lt_cv_ld_force_load"; then
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience $wl-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`'
+ m4_case([$1], [F77], [_LT_TAGVAR(compiler_needs_object, $1)=yes],
+ [FC], [_LT_TAGVAR(compiler_needs_object, $1)=yes])
+ else
+ _LT_TAGVAR(whole_archive_flag_spec, $1)=''
+ fi
+ _LT_TAGVAR(link_all_deplibs, $1)=yes
+ _LT_TAGVAR(allow_undefined_flag, $1)=$_lt_dar_allow_undefined
+ case $cc_basename in
+ ifort*|nagfor*) _lt_dar_can_shared=yes ;;
+ *) _lt_dar_can_shared=$GCC ;;
+ esac
+ if test yes = "$_lt_dar_can_shared"; then
+ output_verbose_link_cmd=func_echo_all
+ _LT_TAGVAR(archive_cmds, $1)="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dsymutil"
+ _LT_TAGVAR(module_cmds, $1)="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dsymutil"
+ _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dar_export_syms$_lt_dsymutil"
+ _LT_TAGVAR(module_expsym_cmds, $1)="sed -e 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dar_export_syms$_lt_dsymutil"
+ m4_if([$1], [CXX],
+[ if test yes != "$lt_cv_apple_cc_single_mod"; then
+ _LT_TAGVAR(archive_cmds, $1)="\$CC -r -keep_private_externs -nostdlib -o \$lib-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$lib-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring$_lt_dsymutil"
+ _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC -r -keep_private_externs -nostdlib -o \$lib-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$lib-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring$_lt_dar_export_syms$_lt_dsymutil"
+ fi
+],[])
+ else
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+])
+
+# _LT_SYS_MODULE_PATH_AIX([TAGNAME])
+# ----------------------------------
+# Links a minimal program and checks the executable
+# for the system default hardcoded library path. In most cases,
+# this is /usr/lib:/lib, but when the MPI compilers are used
+# the location of the communication and MPI libs are included too.
+# If we don't find anything, use the default library path according
+# to the aix ld manual.
+# Store the results from the different compilers for each TAGNAME.
+# Allow to override them for all tags through lt_cv_aix_libpath.
+m4_defun([_LT_SYS_MODULE_PATH_AIX],
+[m4_require([_LT_DECL_SED])dnl
+if test set = "${lt_cv_aix_libpath+set}"; then
+ aix_libpath=$lt_cv_aix_libpath
+else
+ AC_CACHE_VAL([_LT_TAGVAR([lt_cv_aix_libpath_], [$1])],
+ [AC_LINK_IFELSE([AC_LANG_PROGRAM],[
+ lt_aix_libpath_sed='[
+ /Import File Strings/,/^$/ {
+ /^0/ {
+ s/^0 *\([^ ]*\) *$/\1/
+ p
+ }
+ }]'
+ _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+ # Check for a 64-bit object if we didn't find anything.
+ if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then
+ _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+ fi],[])
+ if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then
+ _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=/usr/lib:/lib
+ fi
+ ])
+ aix_libpath=$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])
+fi
+])# _LT_SYS_MODULE_PATH_AIX
+
+
+# _LT_SHELL_INIT(ARG)
+# -------------------
+m4_define([_LT_SHELL_INIT],
+[m4_divert_text([M4SH-INIT], [$1
+])])# _LT_SHELL_INIT
+
+
+
+# _LT_PROG_ECHO_BACKSLASH
+# -----------------------
+# Find how we can fake an echo command that does not interpret backslash.
+# In particular, with Autoconf 2.60 or later we add some code to the start
+# of the generated configure script that will find a shell with a builtin
+# printf (that we can use as an echo command).
+m4_defun([_LT_PROG_ECHO_BACKSLASH],
+[ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO
+ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+
+AC_MSG_CHECKING([how to print strings])
+# Test print first, because it will be a builtin if present.
+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
+ test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
+ ECHO='print -r --'
+elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
+ ECHO='printf %s\n'
+else
+ # Use this function as a fallback that always works.
+ func_fallback_echo ()
+ {
+ eval 'cat <<_LTECHO_EOF
+$[]1
+_LTECHO_EOF'
+ }
+ ECHO='func_fallback_echo'
+fi
+
+# func_echo_all arg...
+# Invoke $ECHO with all args, space-separated.
+func_echo_all ()
+{
+ $ECHO "$*"
+}
+
+case $ECHO in
+ printf*) AC_MSG_RESULT([printf]) ;;
+ print*) AC_MSG_RESULT([print -r]) ;;
+ *) AC_MSG_RESULT([cat]) ;;
+esac
+
+m4_ifdef([_AS_DETECT_SUGGESTED],
+[_AS_DETECT_SUGGESTED([
+ test -n "${ZSH_VERSION+set}${BASH_VERSION+set}" || (
+ ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO
+ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+ PATH=/empty FPATH=/empty; export PATH FPATH
+ test "X`printf %s $ECHO`" = "X$ECHO" \
+ || test "X`print -r -- $ECHO`" = "X$ECHO" )])])
+
+_LT_DECL([], [SHELL], [1], [Shell to use when invoking shell scripts])
+_LT_DECL([], [ECHO], [1], [An echo program that protects backslashes])
+])# _LT_PROG_ECHO_BACKSLASH
+
+
+# _LT_WITH_SYSROOT
+# ----------------
+AC_DEFUN([_LT_WITH_SYSROOT],
+[AC_MSG_CHECKING([for sysroot])
+AC_ARG_WITH([sysroot],
+[AS_HELP_STRING([--with-sysroot@<:@=DIR@:>@],
+ [Search for dependent libraries within DIR (or the compiler's sysroot
+ if not specified).])],
+[], [with_sysroot=no])
+
+dnl lt_sysroot will always be passed unquoted. We quote it here
+dnl in case the user passed a directory name.
+lt_sysroot=
+case $with_sysroot in #(
+ yes)
+ if test yes = "$GCC"; then
+ lt_sysroot=`$CC --print-sysroot 2>/dev/null`
+ fi
+ ;; #(
+ /*)
+ lt_sysroot=`echo "$with_sysroot" | sed -e "$sed_quote_subst"`
+ ;; #(
+ no|'')
+ ;; #(
+ *)
+ AC_MSG_RESULT([$with_sysroot])
+ AC_MSG_ERROR([The sysroot must be an absolute path.])
+ ;;
+esac
+
+ AC_MSG_RESULT([${lt_sysroot:-no}])
+_LT_DECL([], [lt_sysroot], [0], [The root where to search for ]dnl
+[dependent libraries, and where our libraries should be installed.])])
+
+# _LT_ENABLE_LOCK
+# ---------------
+m4_defun([_LT_ENABLE_LOCK],
+[AC_ARG_ENABLE([libtool-lock],
+ [AS_HELP_STRING([--disable-libtool-lock],
+ [avoid locking (might break parallel builds)])])
+test no = "$enable_libtool_lock" || enable_libtool_lock=yes
+
+# Some flags need to be propagated to the compiler or linker for good
+# libtool support.
+case $host in
+ia64-*-hpux*)
+ # Find out what ABI is being produced by ac_compile, and set mode
+ # options accordingly.
+ echo 'int i;' > conftest.$ac_ext
+ if AC_TRY_EVAL(ac_compile); then
+ case `/usr/bin/file conftest.$ac_objext` in
+ *ELF-32*)
+ HPUX_IA64_MODE=32
+ ;;
+ *ELF-64*)
+ HPUX_IA64_MODE=64
+ ;;
+ esac
+ fi
+ rm -rf conftest*
+ ;;
+*-*-irix6*)
+ # Find out what ABI is being produced by ac_compile, and set linker
+ # options accordingly.
+ echo '[#]line '$LINENO' "configure"' > conftest.$ac_ext
+ if AC_TRY_EVAL(ac_compile); then
+ if test yes = "$lt_cv_prog_gnu_ld"; then
+ case `/usr/bin/file conftest.$ac_objext` in
+ *32-bit*)
+ LD="${LD-ld} -melf32bsmip"
+ ;;
+ *N32*)
+ LD="${LD-ld} -melf32bmipn32"
+ ;;
+ *64-bit*)
+ LD="${LD-ld} -melf64bmip"
+ ;;
+ esac
+ else
+ case `/usr/bin/file conftest.$ac_objext` in
+ *32-bit*)
+ LD="${LD-ld} -32"
+ ;;
+ *N32*)
+ LD="${LD-ld} -n32"
+ ;;
+ *64-bit*)
+ LD="${LD-ld} -64"
+ ;;
+ esac
+ fi
+ fi
+ rm -rf conftest*
+ ;;
+
+mips64*-*linux*)
+ # Find out what ABI is being produced by ac_compile, and set linker
+ # options accordingly.
+ echo '[#]line '$LINENO' "configure"' > conftest.$ac_ext
+ if AC_TRY_EVAL(ac_compile); then
+ emul=elf
+ case `/usr/bin/file conftest.$ac_objext` in
+ *32-bit*)
+ emul="${emul}32"
+ ;;
+ *64-bit*)
+ emul="${emul}64"
+ ;;
+ esac
+ case `/usr/bin/file conftest.$ac_objext` in
+ *MSB*)
+ emul="${emul}btsmip"
+ ;;
+ *LSB*)
+ emul="${emul}ltsmip"
+ ;;
+ esac
+ case `/usr/bin/file conftest.$ac_objext` in
+ *N32*)
+ emul="${emul}n32"
+ ;;
+ esac
+ LD="${LD-ld} -m $emul"
+ fi
+ rm -rf conftest*
+ ;;
+
+x86_64-*kfreebsd*-gnu|x86_64-*linux*|powerpc*-*linux*| \
+s390*-*linux*|s390*-*tpf*|sparc*-*linux*)
+ # Find out what ABI is being produced by ac_compile, and set linker
+ # options accordingly. Note that the listed cases only cover the
+ # situations where additional linker options are needed (such as when
+ # doing 32-bit compilation for a host where ld defaults to 64-bit, or
+ # vice versa); the common cases where no linker options are needed do
+ # not appear in the list.
+ echo 'int i;' > conftest.$ac_ext
+ if AC_TRY_EVAL(ac_compile); then
+ case `/usr/bin/file conftest.o` in
+ *32-bit*)
+ case $host in
+ x86_64-*kfreebsd*-gnu)
+ LD="${LD-ld} -m elf_i386_fbsd"
+ ;;
+ x86_64-*linux*)
+ case `/usr/bin/file conftest.o` in
+ *x86-64*)
+ LD="${LD-ld} -m elf32_x86_64"
+ ;;
+ *)
+ LD="${LD-ld} -m elf_i386"
+ ;;
+ esac
+ ;;
+ powerpc64le-*linux*)
+ LD="${LD-ld} -m elf32lppclinux"
+ ;;
+ powerpc64-*linux*)
+ LD="${LD-ld} -m elf32ppclinux"
+ ;;
+ s390x-*linux*)
+ LD="${LD-ld} -m elf_s390"
+ ;;
+ sparc64-*linux*)
+ LD="${LD-ld} -m elf32_sparc"
+ ;;
+ esac
+ ;;
+ *64-bit*)
+ case $host in
+ x86_64-*kfreebsd*-gnu)
+ LD="${LD-ld} -m elf_x86_64_fbsd"
+ ;;
+ x86_64-*linux*)
+ LD="${LD-ld} -m elf_x86_64"
+ ;;
+ powerpcle-*linux*)
+ LD="${LD-ld} -m elf64lppc"
+ ;;
+ powerpc-*linux*)
+ LD="${LD-ld} -m elf64ppc"
+ ;;
+ s390*-*linux*|s390*-*tpf*)
+ LD="${LD-ld} -m elf64_s390"
+ ;;
+ sparc*-*linux*)
+ LD="${LD-ld} -m elf64_sparc"
+ ;;
+ esac
+ ;;
+ esac
+ fi
+ rm -rf conftest*
+ ;;
+
+*-*-sco3.2v5*)
+ # On SCO OpenServer 5, we need -belf to get full-featured binaries.
+ SAVE_CFLAGS=$CFLAGS
+ CFLAGS="$CFLAGS -belf"
+ AC_CACHE_CHECK([whether the C compiler needs -belf], lt_cv_cc_needs_belf,
+ [AC_LANG_PUSH(C)
+ AC_LINK_IFELSE([AC_LANG_PROGRAM([[]],[[]])],[lt_cv_cc_needs_belf=yes],[lt_cv_cc_needs_belf=no])
+ AC_LANG_POP])
+ if test yes != "$lt_cv_cc_needs_belf"; then
+ # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf
+ CFLAGS=$SAVE_CFLAGS
+ fi
+ ;;
+*-*solaris*)
+ # Find out what ABI is being produced by ac_compile, and set linker
+ # options accordingly.
+ echo 'int i;' > conftest.$ac_ext
+ if AC_TRY_EVAL(ac_compile); then
+ case `/usr/bin/file conftest.o` in
+ *64-bit*)
+ case $lt_cv_prog_gnu_ld in
+ yes*)
+ case $host in
+ i?86-*-solaris*|x86_64-*-solaris*)
+ LD="${LD-ld} -m elf_x86_64"
+ ;;
+ sparc*-*-solaris*)
+ LD="${LD-ld} -m elf64_sparc"
+ ;;
+ esac
+ # GNU ld 2.21 introduced _sol2 emulations. Use them if available.
+ if ${LD-ld} -V | grep _sol2 >/dev/null 2>&1; then
+ LD=${LD-ld}_sol2
+ fi
+ ;;
+ *)
+ if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then
+ LD="${LD-ld} -64"
+ fi
+ ;;
+ esac
+ ;;
+ esac
+ fi
+ rm -rf conftest*
+ ;;
+esac
+
+need_locks=$enable_libtool_lock
+])# _LT_ENABLE_LOCK
+
+
+# _LT_PROG_AR
+# -----------
+m4_defun([_LT_PROG_AR],
+[AC_CHECK_TOOLS(AR, [ar], false)
+: ${AR=ar}
+: ${AR_FLAGS=cru}
+_LT_DECL([], [AR], [1], [The archiver])
+_LT_DECL([], [AR_FLAGS], [1], [Flags to create an archive])
+
+AC_CACHE_CHECK([for archiver @FILE support], [lt_cv_ar_at_file],
+ [lt_cv_ar_at_file=no
+ AC_COMPILE_IFELSE([AC_LANG_PROGRAM],
+ [echo conftest.$ac_objext > conftest.lst
+ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&AS_MESSAGE_LOG_FD'
+ AC_TRY_EVAL([lt_ar_try])
+ if test 0 -eq "$ac_status"; then
+ # Ensure the archiver fails upon bogus file names.
+ rm -f conftest.$ac_objext libconftest.a
+ AC_TRY_EVAL([lt_ar_try])
+ if test 0 -ne "$ac_status"; then
+ lt_cv_ar_at_file=@
+ fi
+ fi
+ rm -f conftest.* libconftest.a
+ ])
+ ])
+
+if test no = "$lt_cv_ar_at_file"; then
+ archiver_list_spec=
+else
+ archiver_list_spec=$lt_cv_ar_at_file
+fi
+_LT_DECL([], [archiver_list_spec], [1],
+ [How to feed a file listing to the archiver])
+])# _LT_PROG_AR
+
+
+# _LT_CMD_OLD_ARCHIVE
+# -------------------
+m4_defun([_LT_CMD_OLD_ARCHIVE],
+[_LT_PROG_AR
+
+AC_CHECK_TOOL(STRIP, strip, :)
+test -z "$STRIP" && STRIP=:
+_LT_DECL([], [STRIP], [1], [A symbol stripping program])
+
+AC_CHECK_TOOL(RANLIB, ranlib, :)
+test -z "$RANLIB" && RANLIB=:
+_LT_DECL([], [RANLIB], [1],
+ [Commands used to install an old-style archive])
+
+# Determine commands to create old-style static archives.
+old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs'
+old_postinstall_cmds='chmod 644 $oldlib'
+old_postuninstall_cmds=
+
+if test -n "$RANLIB"; then
+ case $host_os in
+ bitrig* | openbsd*)
+ old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$tool_oldlib"
+ ;;
+ *)
+ old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$tool_oldlib"
+ ;;
+ esac
+ old_archive_cmds="$old_archive_cmds~\$RANLIB \$tool_oldlib"
+fi
+
+case $host_os in
+ darwin*)
+ lock_old_archive_extraction=yes ;;
+ *)
+ lock_old_archive_extraction=no ;;
+esac
+_LT_DECL([], [old_postinstall_cmds], [2])
+_LT_DECL([], [old_postuninstall_cmds], [2])
+_LT_TAGDECL([], [old_archive_cmds], [2],
+ [Commands used to build an old-style archive])
+_LT_DECL([], [lock_old_archive_extraction], [0],
+ [Whether to use a lock for old archive extraction])
+])# _LT_CMD_OLD_ARCHIVE
+
+
+# _LT_COMPILER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS,
+# [OUTPUT-FILE], [ACTION-SUCCESS], [ACTION-FAILURE])
+# ----------------------------------------------------------------
+# Check whether the given compiler option works
+AC_DEFUN([_LT_COMPILER_OPTION],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_SED])dnl
+AC_CACHE_CHECK([$1], [$2],
+ [$2=no
+ m4_if([$4], , [ac_outfile=conftest.$ac_objext], [ac_outfile=$4])
+ echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+ lt_compiler_flag="$3" ## exclude from sc_useless_quotes_in_assignment
+ # Insert the option either (1) after the last *FLAGS variable, or
+ # (2) before a word containing "conftest.", or (3) at the end.
+ # Note that $ac_compile itself does not contain backslashes and begins
+ # with a dollar sign (not a hyphen), so the echo should work correctly.
+ # The option is referenced via a variable to avoid confusing sed.
+ lt_compile=`echo "$ac_compile" | $SED \
+ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+ -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \
+ -e 's:$: $lt_compiler_flag:'`
+ (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&AS_MESSAGE_LOG_FD)
+ (eval "$lt_compile" 2>conftest.err)
+ ac_status=$?
+ cat conftest.err >&AS_MESSAGE_LOG_FD
+ echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD
+ if (exit $ac_status) && test -s "$ac_outfile"; then
+ # The compiler can only warn and ignore the option if not recognized
+ # So say no if there are warnings other than the usual output.
+ $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp
+ $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+ if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then
+ $2=yes
+ fi
+ fi
+ $RM conftest*
+])
+
+if test yes = "[$]$2"; then
+ m4_if([$5], , :, [$5])
+else
+ m4_if([$6], , :, [$6])
+fi
+])# _LT_COMPILER_OPTION
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_COMPILER_OPTION], [_LT_COMPILER_OPTION])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_COMPILER_OPTION], [])
+
+
+# _LT_LINKER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS,
+# [ACTION-SUCCESS], [ACTION-FAILURE])
+# ----------------------------------------------------
+# Check whether the given linker option works
+AC_DEFUN([_LT_LINKER_OPTION],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_SED])dnl
+AC_CACHE_CHECK([$1], [$2],
+ [$2=no
+ save_LDFLAGS=$LDFLAGS
+ LDFLAGS="$LDFLAGS $3"
+ echo "$lt_simple_link_test_code" > conftest.$ac_ext
+ if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then
+ # The linker can only warn and ignore the option if not recognized
+ # So say no if there are warnings
+ if test -s conftest.err; then
+ # Append any errors to the config.log.
+ cat conftest.err 1>&AS_MESSAGE_LOG_FD
+ $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp
+ $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+ if diff conftest.exp conftest.er2 >/dev/null; then
+ $2=yes
+ fi
+ else
+ $2=yes
+ fi
+ fi
+ $RM -r conftest*
+ LDFLAGS=$save_LDFLAGS
+])
+
+if test yes = "[$]$2"; then
+ m4_if([$4], , :, [$4])
+else
+ m4_if([$5], , :, [$5])
+fi
+])# _LT_LINKER_OPTION
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_LINKER_OPTION], [_LT_LINKER_OPTION])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_LINKER_OPTION], [])
+
+
+# LT_CMD_MAX_LEN
+#---------------
+AC_DEFUN([LT_CMD_MAX_LEN],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+# find the maximum length of command line arguments
+AC_MSG_CHECKING([the maximum length of command line arguments])
+AC_CACHE_VAL([lt_cv_sys_max_cmd_len], [dnl
+ i=0
+ teststring=ABCD
+
+ case $build_os in
+ msdosdjgpp*)
+ # On DJGPP, this test can blow up pretty badly due to problems in libc
+ # (any single argument exceeding 2000 bytes causes a buffer overrun
+ # during glob expansion). Even if it were fixed, the result of this
+ # check would be larger than it should be.
+ lt_cv_sys_max_cmd_len=12288; # 12K is about right
+ ;;
+
+ gnu*)
+ # Under GNU Hurd, this test is not required because there is
+ # no limit to the length of command line arguments.
+ # Libtool will interpret -1 as no limit whatsoever
+ lt_cv_sys_max_cmd_len=-1;
+ ;;
+
+ cygwin* | mingw* | cegcc*)
+ # On Win9x/ME, this test blows up -- it succeeds, but takes
+ # about 5 minutes as the teststring grows exponentially.
+ # Worse, since 9x/ME are not pre-emptively multitasking,
+ # you end up with a "frozen" computer, even though with patience
+ # the test eventually succeeds (with a max line length of 256k).
+ # Instead, let's just punt: use the minimum linelength reported by
+ # all of the supported platforms: 8192 (on NT/2K/XP).
+ lt_cv_sys_max_cmd_len=8192;
+ ;;
+
+ mint*)
+ # On MiNT this can take a long time and run out of memory.
+ lt_cv_sys_max_cmd_len=8192;
+ ;;
+
+ amigaos*)
+ # On AmigaOS with pdksh, this test takes hours, literally.
+ # So we just punt and use a minimum line length of 8192.
+ lt_cv_sys_max_cmd_len=8192;
+ ;;
+
+ bitrig* | darwin* | dragonfly* | freebsd* | netbsd* | openbsd*)
+ # This has been around since 386BSD, at least. Likely further.
+ if test -x /sbin/sysctl; then
+ lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax`
+ elif test -x /usr/sbin/sysctl; then
+ lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax`
+ else
+ lt_cv_sys_max_cmd_len=65536 # usable default for all BSDs
+ fi
+ # And add a safety zone
+ lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
+ lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
+ ;;
+
+ interix*)
+ # We know the value 262144 and hardcode it with a safety zone (like BSD)
+ lt_cv_sys_max_cmd_len=196608
+ ;;
+
+ os2*)
+ # The test takes a long time on OS/2.
+ lt_cv_sys_max_cmd_len=8192
+ ;;
+
+ osf*)
+ # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure
+ # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not
+ # nice to cause kernel panics so lets avoid the loop below.
+ # First set a reasonable default.
+ lt_cv_sys_max_cmd_len=16384
+ #
+ if test -x /sbin/sysconfig; then
+ case `/sbin/sysconfig -q proc exec_disable_arg_limit` in
+ *1*) lt_cv_sys_max_cmd_len=-1 ;;
+ esac
+ fi
+ ;;
+ sco3.2v5*)
+ lt_cv_sys_max_cmd_len=102400
+ ;;
+ sysv5* | sco5v6* | sysv4.2uw2*)
+ kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null`
+ if test -n "$kargmax"; then
+ lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[[ ]]//'`
+ else
+ lt_cv_sys_max_cmd_len=32768
+ fi
+ ;;
+ *)
+ lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null`
+ if test -n "$lt_cv_sys_max_cmd_len" && \
+ test undefined != "$lt_cv_sys_max_cmd_len"; then
+ lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
+ lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
+ else
+ # Make teststring a little bigger before we do anything with it.
+ # a 1K string should be a reasonable start.
+ for i in 1 2 3 4 5 6 7 8; do
+ teststring=$teststring$teststring
+ done
+ SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}}
+ # If test is not a shell built-in, we'll probably end up computing a
+ # maximum length that is only half of the actual maximum length, but
+ # we can't tell.
+ while { test X`env echo "$teststring$teststring" 2>/dev/null` \
+ = "X$teststring$teststring"; } >/dev/null 2>&1 &&
+ test 17 != "$i" # 1/2 MB should be enough
+ do
+ i=`expr $i + 1`
+ teststring=$teststring$teststring
+ done
+ # Only check the string length outside the loop.
+ lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1`
+ teststring=
+ # Add a significant safety factor because C++ compilers can tack on
+ # massive amounts of additional arguments before passing them to the
+ # linker. It appears as though 1/2 is a usable value.
+ lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2`
+ fi
+ ;;
+ esac
+])
+if test -n "$lt_cv_sys_max_cmd_len"; then
+ AC_MSG_RESULT($lt_cv_sys_max_cmd_len)
+else
+ AC_MSG_RESULT(none)
+fi
+max_cmd_len=$lt_cv_sys_max_cmd_len
+_LT_DECL([], [max_cmd_len], [0],
+ [What is the maximum length of a command?])
+])# LT_CMD_MAX_LEN
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_SYS_MAX_CMD_LEN], [LT_CMD_MAX_LEN])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_SYS_MAX_CMD_LEN], [])
+
+
+# _LT_HEADER_DLFCN
+# ----------------
+m4_defun([_LT_HEADER_DLFCN],
+[AC_CHECK_HEADERS([dlfcn.h], [], [], [AC_INCLUDES_DEFAULT])dnl
+])# _LT_HEADER_DLFCN
+
+
+# _LT_TRY_DLOPEN_SELF (ACTION-IF-TRUE, ACTION-IF-TRUE-W-USCORE,
+# ACTION-IF-FALSE, ACTION-IF-CROSS-COMPILING)
+# ----------------------------------------------------------------
+m4_defun([_LT_TRY_DLOPEN_SELF],
+[m4_require([_LT_HEADER_DLFCN])dnl
+if test yes = "$cross_compiling"; then :
+ [$4]
+else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
+[#line $LINENO "configure"
+#include "confdefs.h"
+
+#if HAVE_DLFCN_H
+#include
+#endif
+
+#include
+
+#ifdef RTLD_GLOBAL
+# define LT_DLGLOBAL RTLD_GLOBAL
+#else
+# ifdef DL_GLOBAL
+# define LT_DLGLOBAL DL_GLOBAL
+# else
+# define LT_DLGLOBAL 0
+# endif
+#endif
+
+/* We may have to define LT_DLLAZY_OR_NOW in the command line if we
+ find out it does not work in some platform. */
+#ifndef LT_DLLAZY_OR_NOW
+# ifdef RTLD_LAZY
+# define LT_DLLAZY_OR_NOW RTLD_LAZY
+# else
+# ifdef DL_LAZY
+# define LT_DLLAZY_OR_NOW DL_LAZY
+# else
+# ifdef RTLD_NOW
+# define LT_DLLAZY_OR_NOW RTLD_NOW
+# else
+# ifdef DL_NOW
+# define LT_DLLAZY_OR_NOW DL_NOW
+# else
+# define LT_DLLAZY_OR_NOW 0
+# endif
+# endif
+# endif
+# endif
+#endif
+
+/* When -fvisibility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+#if defined __GNUC__ && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+int fnord () __attribute__((visibility("default")));
+#endif
+
+int fnord () { return 42; }
+int main ()
+{
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+ int status = $lt_dlunknown;
+
+ if (self)
+ {
+ if (dlsym (self,"fnord")) status = $lt_dlno_uscore;
+ else
+ {
+ if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore;
+ else puts (dlerror ());
+ }
+ /* dlclose (self); */
+ }
+ else
+ puts (dlerror ());
+
+ return status;
+}]
+_LT_EOF
+ if AC_TRY_EVAL(ac_link) && test -s "conftest$ac_exeext" 2>/dev/null; then
+ (./conftest; exit; ) >&AS_MESSAGE_LOG_FD 2>/dev/null
+ lt_status=$?
+ case x$lt_status in
+ x$lt_dlno_uscore) $1 ;;
+ x$lt_dlneed_uscore) $2 ;;
+ x$lt_dlunknown|x*) $3 ;;
+ esac
+ else :
+ # compilation failed
+ $3
+ fi
+fi
+rm -fr conftest*
+])# _LT_TRY_DLOPEN_SELF
+
+
+# LT_SYS_DLOPEN_SELF
+# ------------------
+AC_DEFUN([LT_SYS_DLOPEN_SELF],
+[m4_require([_LT_HEADER_DLFCN])dnl
+if test yes != "$enable_dlopen"; then
+ enable_dlopen=unknown
+ enable_dlopen_self=unknown
+ enable_dlopen_self_static=unknown
+else
+ lt_cv_dlopen=no
+ lt_cv_dlopen_libs=
+
+ case $host_os in
+ beos*)
+ lt_cv_dlopen=load_add_on
+ lt_cv_dlopen_libs=
+ lt_cv_dlopen_self=yes
+ ;;
+
+ mingw* | pw32* | cegcc*)
+ lt_cv_dlopen=LoadLibrary
+ lt_cv_dlopen_libs=
+ ;;
+
+ cygwin*)
+ lt_cv_dlopen=dlopen
+ lt_cv_dlopen_libs=
+ ;;
+
+ darwin*)
+ # if libdl is installed we need to link against it
+ AC_CHECK_LIB([dl], [dlopen],
+ [lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-ldl],[
+ lt_cv_dlopen=dyld
+ lt_cv_dlopen_libs=
+ lt_cv_dlopen_self=yes
+ ])
+ ;;
+
+ tpf*)
+ # Don't try to run any link tests for TPF. We know it's impossible
+ # because TPF is a cross-compiler, and we know how we open DSOs.
+ lt_cv_dlopen=dlopen
+ lt_cv_dlopen_libs=
+ lt_cv_dlopen_self=no
+ ;;
+
+ *)
+ AC_CHECK_FUNC([shl_load],
+ [lt_cv_dlopen=shl_load],
+ [AC_CHECK_LIB([dld], [shl_load],
+ [lt_cv_dlopen=shl_load lt_cv_dlopen_libs=-ldld],
+ [AC_CHECK_FUNC([dlopen],
+ [lt_cv_dlopen=dlopen],
+ [AC_CHECK_LIB([dl], [dlopen],
+ [lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-ldl],
+ [AC_CHECK_LIB([svld], [dlopen],
+ [lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-lsvld],
+ [AC_CHECK_LIB([dld], [dld_link],
+ [lt_cv_dlopen=dld_link lt_cv_dlopen_libs=-ldld])
+ ])
+ ])
+ ])
+ ])
+ ])
+ ;;
+ esac
+
+ if test no = "$lt_cv_dlopen"; then
+ enable_dlopen=no
+ else
+ enable_dlopen=yes
+ fi
+
+ case $lt_cv_dlopen in
+ dlopen)
+ save_CPPFLAGS=$CPPFLAGS
+ test yes = "$ac_cv_header_dlfcn_h" && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H"
+
+ save_LDFLAGS=$LDFLAGS
+ wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\"
+
+ save_LIBS=$LIBS
+ LIBS="$lt_cv_dlopen_libs $LIBS"
+
+ AC_CACHE_CHECK([whether a program can dlopen itself],
+ lt_cv_dlopen_self, [dnl
+ _LT_TRY_DLOPEN_SELF(
+ lt_cv_dlopen_self=yes, lt_cv_dlopen_self=yes,
+ lt_cv_dlopen_self=no, lt_cv_dlopen_self=cross)
+ ])
+
+ if test yes = "$lt_cv_dlopen_self"; then
+ wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\"
+ AC_CACHE_CHECK([whether a statically linked program can dlopen itself],
+ lt_cv_dlopen_self_static, [dnl
+ _LT_TRY_DLOPEN_SELF(
+ lt_cv_dlopen_self_static=yes, lt_cv_dlopen_self_static=yes,
+ lt_cv_dlopen_self_static=no, lt_cv_dlopen_self_static=cross)
+ ])
+ fi
+
+ CPPFLAGS=$save_CPPFLAGS
+ LDFLAGS=$save_LDFLAGS
+ LIBS=$save_LIBS
+ ;;
+ esac
+
+ case $lt_cv_dlopen_self in
+ yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;;
+ *) enable_dlopen_self=unknown ;;
+ esac
+
+ case $lt_cv_dlopen_self_static in
+ yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;;
+ *) enable_dlopen_self_static=unknown ;;
+ esac
+fi
+_LT_DECL([dlopen_support], [enable_dlopen], [0],
+ [Whether dlopen is supported])
+_LT_DECL([dlopen_self], [enable_dlopen_self], [0],
+ [Whether dlopen of programs is supported])
+_LT_DECL([dlopen_self_static], [enable_dlopen_self_static], [0],
+ [Whether dlopen of statically linked programs is supported])
+])# LT_SYS_DLOPEN_SELF
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_DLOPEN_SELF], [LT_SYS_DLOPEN_SELF])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_DLOPEN_SELF], [])
+
+
+# _LT_COMPILER_C_O([TAGNAME])
+# ---------------------------
+# Check to see if options -c and -o are simultaneously supported by compiler.
+# This macro does not hard code the compiler like AC_PROG_CC_C_O.
+m4_defun([_LT_COMPILER_C_O],
+[m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_TAG_COMPILER])dnl
+AC_CACHE_CHECK([if $compiler supports -c -o file.$ac_objext],
+ [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)],
+ [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=no
+ $RM -r conftest 2>/dev/null
+ mkdir conftest
+ cd conftest
+ mkdir out
+ echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+
+ lt_compiler_flag="-o out/conftest2.$ac_objext"
+ # Insert the option either (1) after the last *FLAGS variable, or
+ # (2) before a word containing "conftest.", or (3) at the end.
+ # Note that $ac_compile itself does not contain backslashes and begins
+ # with a dollar sign (not a hyphen), so the echo should work correctly.
+ lt_compile=`echo "$ac_compile" | $SED \
+ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+ -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \
+ -e 's:$: $lt_compiler_flag:'`
+ (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&AS_MESSAGE_LOG_FD)
+ (eval "$lt_compile" 2>out/conftest.err)
+ ac_status=$?
+ cat out/conftest.err >&AS_MESSAGE_LOG_FD
+ echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD
+ if (exit $ac_status) && test -s out/conftest2.$ac_objext
+ then
+ # The compiler can only warn and ignore the option if not recognized
+ # So say no if there are warnings
+ $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp
+ $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
+ if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then
+ _LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes
+ fi
+ fi
+ chmod u+w . 2>&AS_MESSAGE_LOG_FD
+ $RM conftest*
+ # SGI C++ compiler will create directory out/ii_files/ for
+ # template instantiation
+ test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
+ $RM out/* && rmdir out
+ cd ..
+ $RM -r conftest
+ $RM conftest*
+])
+_LT_TAGDECL([compiler_c_o], [lt_cv_prog_compiler_c_o], [1],
+ [Does compiler simultaneously support -c and -o options?])
+])# _LT_COMPILER_C_O
+
+
+# _LT_COMPILER_FILE_LOCKS([TAGNAME])
+# ----------------------------------
+# Check to see if we can do hard links to lock some files if needed
+m4_defun([_LT_COMPILER_FILE_LOCKS],
+[m4_require([_LT_ENABLE_LOCK])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+_LT_COMPILER_C_O([$1])
+
+hard_links=nottested
+if test no = "$_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)" && test no != "$need_locks"; then
+ # do not overwrite the value of need_locks provided by the user
+ AC_MSG_CHECKING([if we can lock with hard links])
+ hard_links=yes
+ $RM conftest*
+ ln conftest.a conftest.b 2>/dev/null && hard_links=no
+ touch conftest.a
+ ln conftest.a conftest.b 2>&5 || hard_links=no
+ ln conftest.a conftest.b 2>/dev/null && hard_links=no
+ AC_MSG_RESULT([$hard_links])
+ if test no = "$hard_links"; then
+ AC_MSG_WARN(['$CC' does not support '-c -o', so 'make -j' may be unsafe])
+ need_locks=warn
+ fi
+else
+ need_locks=no
+fi
+_LT_DECL([], [need_locks], [1], [Must we lock files when doing compilation?])
+])# _LT_COMPILER_FILE_LOCKS
+
+
+# _LT_CHECK_OBJDIR
+# ----------------
+m4_defun([_LT_CHECK_OBJDIR],
+[AC_CACHE_CHECK([for objdir], [lt_cv_objdir],
+[rm -f .libs 2>/dev/null
+mkdir .libs 2>/dev/null
+if test -d .libs; then
+ lt_cv_objdir=.libs
+else
+ # MS-DOS does not allow filenames that begin with a dot.
+ lt_cv_objdir=_libs
+fi
+rmdir .libs 2>/dev/null])
+objdir=$lt_cv_objdir
+_LT_DECL([], [objdir], [0],
+ [The name of the directory that contains temporary libtool files])dnl
+m4_pattern_allow([LT_OBJDIR])dnl
+AC_DEFINE_UNQUOTED([LT_OBJDIR], "$lt_cv_objdir/",
+ [Define to the sub-directory where libtool stores uninstalled libraries.])
+])# _LT_CHECK_OBJDIR
+
+
+# _LT_LINKER_HARDCODE_LIBPATH([TAGNAME])
+# --------------------------------------
+# Check hardcoding attributes.
+m4_defun([_LT_LINKER_HARDCODE_LIBPATH],
+[AC_MSG_CHECKING([how to hardcode library paths into programs])
+_LT_TAGVAR(hardcode_action, $1)=
+if test -n "$_LT_TAGVAR(hardcode_libdir_flag_spec, $1)" ||
+ test -n "$_LT_TAGVAR(runpath_var, $1)" ||
+ test yes = "$_LT_TAGVAR(hardcode_automatic, $1)"; then
+
+ # We can hardcode non-existent directories.
+ if test no != "$_LT_TAGVAR(hardcode_direct, $1)" &&
+ # If the only mechanism to avoid hardcoding is shlibpath_var, we
+ # have to relink, otherwise we might link with an installed library
+ # when we should be linking with a yet-to-be-installed one
+ ## test no != "$_LT_TAGVAR(hardcode_shlibpath_var, $1)" &&
+ test no != "$_LT_TAGVAR(hardcode_minus_L, $1)"; then
+ # Linking always hardcodes the temporary library directory.
+ _LT_TAGVAR(hardcode_action, $1)=relink
+ else
+ # We can link without hardcoding, and we can hardcode nonexisting dirs.
+ _LT_TAGVAR(hardcode_action, $1)=immediate
+ fi
+else
+ # We cannot hardcode anything, or else we can only hardcode existing
+ # directories.
+ _LT_TAGVAR(hardcode_action, $1)=unsupported
+fi
+AC_MSG_RESULT([$_LT_TAGVAR(hardcode_action, $1)])
+
+if test relink = "$_LT_TAGVAR(hardcode_action, $1)" ||
+ test yes = "$_LT_TAGVAR(inherit_rpath, $1)"; then
+ # Fast installation is not supported
+ enable_fast_install=no
+elif test yes = "$shlibpath_overrides_runpath" ||
+ test no = "$enable_shared"; then
+ # Fast installation is not necessary
+ enable_fast_install=needless
+fi
+_LT_TAGDECL([], [hardcode_action], [0],
+ [How to hardcode a shared library path into an executable])
+])# _LT_LINKER_HARDCODE_LIBPATH
+
+
+# _LT_CMD_STRIPLIB
+# ----------------
+m4_defun([_LT_CMD_STRIPLIB],
+[m4_require([_LT_DECL_EGREP])
+striplib=
+old_striplib=
+AC_MSG_CHECKING([whether stripping libraries is possible])
+if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then
+ test -z "$old_striplib" && old_striplib="$STRIP --strip-debug"
+ test -z "$striplib" && striplib="$STRIP --strip-unneeded"
+ AC_MSG_RESULT([yes])
+else
+# FIXME - insert some real tests, host_os isn't really good enough
+ case $host_os in
+ darwin*)
+ if test -n "$STRIP"; then
+ striplib="$STRIP -x"
+ old_striplib="$STRIP -S"
+ AC_MSG_RESULT([yes])
+ else
+ AC_MSG_RESULT([no])
+ fi
+ ;;
+ *)
+ AC_MSG_RESULT([no])
+ ;;
+ esac
+fi
+_LT_DECL([], [old_striplib], [1], [Commands to strip libraries])
+_LT_DECL([], [striplib], [1])
+])# _LT_CMD_STRIPLIB
+
+
+# _LT_PREPARE_MUNGE_PATH_LIST
+# ---------------------------
+# Make sure func_munge_path_list() is defined correctly.
+m4_defun([_LT_PREPARE_MUNGE_PATH_LIST],
+[[# func_munge_path_list VARIABLE PATH
+# -----------------------------------
+# VARIABLE is name of variable containing _space_ separated list of
+# directories to be munged by the contents of PATH, which is string
+# having a format:
+# "DIR[:DIR]:"
+# string "DIR[ DIR]" will be prepended to VARIABLE
+# ":DIR[:DIR]"
+# string "DIR[ DIR]" will be appended to VARIABLE
+# "DIRP[:DIRP]::[DIRA:]DIRA"
+# string "DIRP[ DIRP]" will be prepended to VARIABLE and string
+# "DIRA[ DIRA]" will be appended to VARIABLE
+# "DIR[:DIR]"
+# VARIABLE will be replaced by "DIR[ DIR]"
+func_munge_path_list ()
+{
+ case x@S|@2 in
+ x)
+ ;;
+ *:)
+ eval @S|@1=\"`$ECHO @S|@2 | $SED 's/:/ /g'` \@S|@@S|@1\"
+ ;;
+ x:*)
+ eval @S|@1=\"\@S|@@S|@1 `$ECHO @S|@2 | $SED 's/:/ /g'`\"
+ ;;
+ *::*)
+ eval @S|@1=\"\@S|@@S|@1\ `$ECHO @S|@2 | $SED -e 's/.*:://' -e 's/:/ /g'`\"
+ eval @S|@1=\"`$ECHO @S|@2 | $SED -e 's/::.*//' -e 's/:/ /g'`\ \@S|@@S|@1\"
+ ;;
+ *)
+ eval @S|@1=\"`$ECHO @S|@2 | $SED 's/:/ /g'`\"
+ ;;
+ esac
+}
+]])# _LT_PREPARE_PATH_LIST
+
+
+# _LT_SYS_DYNAMIC_LINKER([TAG])
+# -----------------------------
+# PORTME Fill in your ld.so characteristics
+m4_defun([_LT_SYS_DYNAMIC_LINKER],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_OBJDUMP])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_CHECK_SHELL_FEATURES])dnl
+m4_require([_LT_PREPARE_MUNGE_PATH_LIST])dnl
+AC_MSG_CHECKING([dynamic linker characteristics])
+m4_if([$1],
+ [], [
+if test yes = "$GCC"; then
+ case $host_os in
+ darwin*) lt_awk_arg='/^libraries:/,/LR/' ;;
+ *) lt_awk_arg='/^libraries:/' ;;
+ esac
+ case $host_os in
+ mingw* | cegcc*) lt_sed_strip_eq='s|=\([[A-Za-z]]:\)|\1|g' ;;
+ *) lt_sed_strip_eq='s|=/|/|g' ;;
+ esac
+ lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e $lt_sed_strip_eq`
+ case $lt_search_path_spec in
+ *\;*)
+ # if the path contains ";" then we assume it to be the separator
+ # otherwise default to the standard path separator (i.e. ":") - it is
+ # assumed that no part of a normal pathname contains ";" but that should
+ # okay in the real world where ";" in dirpaths is itself problematic.
+ lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED 's/;/ /g'`
+ ;;
+ *)
+ lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED "s/$PATH_SEPARATOR/ /g"`
+ ;;
+ esac
+ # Ok, now we have the path, separated by spaces, we can step through it
+ # and add multilib dir if necessary...
+ lt_tmp_lt_search_path_spec=
+ lt_multi_os_dir=/`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null`
+ # ...but if some path component already ends with the multilib dir we assume
+ # that all is fine and trust -print-search-dirs as is (GCC 4.2? or newer).
+ case "$lt_multi_os_dir; $lt_search_path_spec " in
+ "/; "* | "/.; "* | "/./; "* | *"$lt_multi_os_dir "* | *"$lt_multi_os_dir/ "*)
+ lt_multi_os_dir=
+ ;;
+ esac
+ for lt_sys_path in $lt_search_path_spec; do
+ if test -d "$lt_sys_path$lt_multi_os_dir"; then
+ lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path$lt_multi_os_dir"
+ elif test -n "$lt_multi_os_dir"; then
+ test -d "$lt_sys_path" && \
+ lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path"
+ fi
+ done
+ lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk '
+BEGIN {RS = " "; FS = "/|\n";} {
+ lt_foo = "";
+ lt_count = 0;
+ for (lt_i = NF; lt_i > 0; lt_i--) {
+ if ($lt_i != "" && $lt_i != ".") {
+ if ($lt_i == "..") {
+ lt_count++;
+ } else {
+ if (lt_count == 0) {
+ lt_foo = "/" $lt_i lt_foo;
+ } else {
+ lt_count--;
+ }
+ }
+ }
+ }
+ if (lt_foo != "") { lt_freq[[lt_foo]]++; }
+ if (lt_freq[[lt_foo]] == 1) { print lt_foo; }
+}'`
+ # AWK program above erroneously prepends '/' to C:/dos/paths
+ # for these hosts.
+ case $host_os in
+ mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\
+ $SED 's|/\([[A-Za-z]]:\)|\1|g'` ;;
+ esac
+ sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP`
+else
+ sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib"
+fi])
+library_names_spec=
+libname_spec='lib$name'
+soname_spec=
+shrext_cmds=.so
+postinstall_cmds=
+postuninstall_cmds=
+finish_cmds=
+finish_eval=
+shlibpath_var=
+shlibpath_overrides_runpath=unknown
+version_type=none
+dynamic_linker="$host_os ld.so"
+sys_lib_dlsearch_path_spec="/lib /usr/lib"
+need_lib_prefix=unknown
+hardcode_into_libs=no
+
+# when you set need_version to no, make sure it does not cause -set_version
+# flags to be left without arguments
+need_version=unknown
+
+AC_ARG_VAR([LT_SYS_LIBRARY_PATH],
+[User-defined run-time library search path.])
+
+case $host_os in
+aix3*)
+ version_type=linux # correct to gnu/linux during the next big refactor
+ library_names_spec='$libname$release$shared_ext$versuffix $libname.a'
+ shlibpath_var=LIBPATH
+
+ # AIX 3 has no versioning support, so we append a major version to the name.
+ soname_spec='$libname$release$shared_ext$major'
+ ;;
+
+aix[[4-9]]*)
+ version_type=linux # correct to gnu/linux during the next big refactor
+ need_lib_prefix=no
+ need_version=no
+ hardcode_into_libs=yes
+ if test ia64 = "$host_cpu"; then
+ # AIX 5 supports IA64
+ library_names_spec='$libname$release$shared_ext$major $libname$release$shared_ext$versuffix $libname$shared_ext'
+ shlibpath_var=LD_LIBRARY_PATH
+ else
+ # With GCC up to 2.95.x, collect2 would create an import file
+ # for dependence libraries. The import file would start with
+ # the line '#! .'. This would cause the generated library to
+ # depend on '.', always an invalid library. This was fixed in
+ # development snapshots of GCC prior to 3.0.
+ case $host_os in
+ aix4 | aix4.[[01]] | aix4.[[01]].*)
+ if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)'
+ echo ' yes '
+ echo '#endif'; } | $CC -E - | $GREP yes > /dev/null; then
+ :
+ else
+ can_build_shared=no
+ fi
+ ;;
+ esac
+ # Using Import Files as archive members, it is possible to support
+ # filename-based versioning of shared library archives on AIX. While
+ # this would work for both with and without runtime linking, it will
+ # prevent static linking of such archives. So we do filename-based
+ # shared library versioning with .so extension only, which is used
+ # when both runtime linking and shared linking is enabled.
+ # Unfortunately, runtime linking may impact performance, so we do
+ # not want this to be the default eventually. Also, we use the
+ # versioned .so libs for executables only if there is the -brtl
+ # linker flag in LDFLAGS as well, or --with-aix-soname=svr4 only.
+ # To allow for filename-based versioning support, we need to create
+ # libNAME.so.V as an archive file, containing:
+ # *) an Import File, referring to the versioned filename of the
+ # archive as well as the shared archive member, telling the
+ # bitwidth (32 or 64) of that shared object, and providing the
+ # list of exported symbols of that shared object, eventually
+ # decorated with the 'weak' keyword
+ # *) the shared object with the F_LOADONLY flag set, to really avoid
+ # it being seen by the linker.
+ # At run time we better use the real file rather than another symlink,
+ # but for link time we create the symlink libNAME.so -> libNAME.so.V
+
+ case $with_aix_soname,$aix_use_runtimelinking in
+ # AIX (on Power*) has no versioning support, so currently we cannot hardcode correct
+ # soname into executable. Probably we can add versioning support to
+ # collect2, so additional links can be useful in future.
+ aix,yes) # traditional libtool
+ dynamic_linker='AIX unversionable lib.so'
+ # If using run time linking (on AIX 4.2 or later) use lib.so
+ # instead of lib.a to let people know that these are not
+ # typical AIX shared libraries.
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext'
+ ;;
+ aix,no) # traditional AIX only
+ dynamic_linker='AIX lib.a[(]lib.so.V[)]'
+ # We preserve .a as extension for shared libraries through AIX4.2
+ # and later when we are not doing run time linking.
+ library_names_spec='$libname$release.a $libname.a'
+ soname_spec='$libname$release$shared_ext$major'
+ ;;
+ svr4,*) # full svr4 only
+ dynamic_linker="AIX lib.so.V[(]$shared_archive_member_spec.o[)]"
+ library_names_spec='$libname$release$shared_ext$major $libname$shared_ext'
+ # We do not specify a path in Import Files, so LIBPATH fires.
+ shlibpath_overrides_runpath=yes
+ ;;
+ *,yes) # both, prefer svr4
+ dynamic_linker="AIX lib.so.V[(]$shared_archive_member_spec.o[)], lib.a[(]lib.so.V[)]"
+ library_names_spec='$libname$release$shared_ext$major $libname$shared_ext'
+ # unpreferred sharedlib libNAME.a needs extra handling
+ postinstall_cmds='test -n "$linkname" || linkname="$realname"~func_stripname "" ".so" "$linkname"~$install_shared_prog "$dir/$func_stripname_result.$libext" "$destdir/$func_stripname_result.$libext"~test -z "$tstripme" || test -z "$striplib" || $striplib "$destdir/$func_stripname_result.$libext"'
+ postuninstall_cmds='for n in $library_names $old_library; do :; done~func_stripname "" ".so" "$n"~test "$func_stripname_result" = "$n" || func_append rmfiles " $odir/$func_stripname_result.$libext"'
+ # We do not specify a path in Import Files, so LIBPATH fires.
+ shlibpath_overrides_runpath=yes
+ ;;
+ *,no) # both, prefer aix
+ dynamic_linker="AIX lib.a[(]lib.so.V[)], lib.so.V[(]$shared_archive_member_spec.o[)]"
+ library_names_spec='$libname$release.a $libname.a'
+ soname_spec='$libname$release$shared_ext$major'
+ # unpreferred sharedlib libNAME.so.V and symlink libNAME.so need extra handling
+ postinstall_cmds='test -z "$dlname" || $install_shared_prog $dir/$dlname $destdir/$dlname~test -z "$tstripme" || test -z "$striplib" || $striplib $destdir/$dlname~test -n "$linkname" || linkname=$realname~func_stripname "" ".a" "$linkname"~(cd "$destdir" && $LN_S -f $dlname $func_stripname_result.so)'
+ postuninstall_cmds='test -z "$dlname" || func_append rmfiles " $odir/$dlname"~for n in $old_library $library_names; do :; done~func_stripname "" ".a" "$n"~func_append rmfiles " $odir/$func_stripname_result.so"'
+ ;;
+ esac
+ shlibpath_var=LIBPATH
+ fi
+ ;;
+
+amigaos*)
+ case $host_cpu in
+ powerpc)
+ # Since July 2007 AmigaOS4 officially supports .so libraries.
+ # When compiling the executable, add -use-dynld -Lsobjs: to the compileline.
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext'
+ ;;
+ m68k)
+ library_names_spec='$libname.ixlibrary $libname.a'
+ # Create ${libname}_ixlibrary.a entries in /sys/libs.
+ finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([[^/]]*\)\.ixlibrary$%\1%'\''`; $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done'
+ ;;
+ esac
+ ;;
+
+beos*)
+ library_names_spec='$libname$shared_ext'
+ dynamic_linker="$host_os ld.so"
+ shlibpath_var=LIBRARY_PATH
+ ;;
+
+bsdi[[45]]*)
+ version_type=linux # correct to gnu/linux during the next big refactor
+ need_version=no
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext'
+ soname_spec='$libname$release$shared_ext$major'
+ finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir'
+ shlibpath_var=LD_LIBRARY_PATH
+ sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib"
+ sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib"
+ # the default ld.so.conf also contains /usr/contrib/lib and
+ # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow
+ # libtool to hard-code these into programs
+ ;;
+
+cygwin* | mingw* | pw32* | cegcc*)
+ version_type=windows
+ shrext_cmds=.dll
+ need_version=no
+ need_lib_prefix=no
+
+ case $GCC,$cc_basename in
+ yes,*)
+ # gcc
+ library_names_spec='$libname.dll.a'
+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
+ postinstall_cmds='base_file=`basename \$file`~
+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~
+ dldir=$destdir/`dirname \$dlpath`~
+ test -d \$dldir || mkdir -p \$dldir~
+ $install_prog $dir/$dlname \$dldir/$dlname~
+ chmod a+x \$dldir/$dlname~
+ if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then
+ eval '\''$striplib \$dldir/$dlname'\'' || exit \$?;
+ fi'
+ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
+ dlpath=$dir/\$dldll~
+ $RM \$dlpath'
+ shlibpath_overrides_runpath=yes
+
+ case $host_os in
+ cygwin*)
+ # Cygwin DLLs use 'cyg' prefix rather than 'lib'
+ soname_spec='`echo $libname | sed -e 's/^lib/cyg/'``echo $release | $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext'
+m4_if([$1], [],[
+ sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api"])
+ ;;
+ mingw* | cegcc*)
+ # MinGW DLLs use traditional 'lib' prefix
+ soname_spec='$libname`echo $release | $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext'
+ ;;
+ pw32*)
+ # pw32 DLLs use 'pw' prefix rather than 'lib'
+ library_names_spec='`echo $libname | sed -e 's/^lib/pw/'``echo $release | $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext'
+ ;;
+ esac
+ dynamic_linker='Win32 ld.exe'
+ ;;
+
+ *,cl*)
+ # Native MSVC
+ libname_spec='$name'
+ soname_spec='$libname`echo $release | $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext'
+ library_names_spec='$libname.dll.lib'
+
+ case $build_os in
+ mingw*)
+ sys_lib_search_path_spec=
+ lt_save_ifs=$IFS
+ IFS=';'
+ for lt_path in $LIB
+ do
+ IFS=$lt_save_ifs
+ # Let DOS variable expansion print the short 8.3 style file name.
+ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
+ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
+ done
+ IFS=$lt_save_ifs
+ # Convert to MSYS style.
+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([[a-zA-Z]]\\):| /\\1|g' -e 's|^ ||'`
+ ;;
+ cygwin*)
+ # Convert to unix form, then to dos form, then back to unix form
+ # but this time dos style (no spaces!) so that the unix form looks
+ # like /cygdrive/c/PROGRA~1:/cygdr...
+ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
+ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
+ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
+ ;;
+ *)
+ sys_lib_search_path_spec=$LIB
+ if $ECHO "$sys_lib_search_path_spec" | [$GREP ';[c-zC-Z]:/' >/dev/null]; then
+ # It is most probably a Windows format PATH.
+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
+ else
+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
+ fi
+ # FIXME: find the short name or the path components, as spaces are
+ # common. (e.g. "Program Files" -> "PROGRA~1")
+ ;;
+ esac
+
+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
+ postinstall_cmds='base_file=`basename \$file`~
+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~
+ dldir=$destdir/`dirname \$dlpath`~
+ test -d \$dldir || mkdir -p \$dldir~
+ $install_prog $dir/$dlname \$dldir/$dlname'
+ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
+ dlpath=$dir/\$dldll~
+ $RM \$dlpath'
+ shlibpath_overrides_runpath=yes
+ dynamic_linker='Win32 link.exe'
+ ;;
+
+ *)
+ # Assume MSVC wrapper
+ library_names_spec='$libname`echo $release | $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext $libname.lib'
+ dynamic_linker='Win32 ld.exe'
+ ;;
+ esac
+ # FIXME: first we should search . and the directory the executable is in
+ shlibpath_var=PATH
+ ;;
+
+darwin* | rhapsody*)
+ dynamic_linker="$host_os dyld"
+ version_type=darwin
+ need_lib_prefix=no
+ need_version=no
+ library_names_spec='$libname$release$major$shared_ext $libname$shared_ext'
+ soname_spec='$libname$release$major$shared_ext'
+ shlibpath_overrides_runpath=yes
+ shlibpath_var=DYLD_LIBRARY_PATH
+ shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`'
+m4_if([$1], [],[
+ sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib"])
+ sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib'
+ ;;
+
+dgux*)
+ version_type=linux # correct to gnu/linux during the next big refactor
+ need_lib_prefix=no
+ need_version=no
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext'
+ soname_spec='$libname$release$shared_ext$major'
+ shlibpath_var=LD_LIBRARY_PATH
+ ;;
+
+freebsd* | dragonfly*)
+ # DragonFly does not have aout. When/if they implement a new
+ # versioning mechanism, adjust this.
+ if test -x /usr/bin/objformat; then
+ objformat=`/usr/bin/objformat`
+ else
+ case $host_os in
+ freebsd[[23]].*) objformat=aout ;;
+ *) objformat=elf ;;
+ esac
+ fi
+ version_type=freebsd-$objformat
+ case $version_type in
+ freebsd-elf*)
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext'
+ soname_spec='$libname$release$shared_ext$major'
+ need_version=no
+ need_lib_prefix=no
+ ;;
+ freebsd-*)
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix'
+ need_version=yes
+ ;;
+ esac
+ shlibpath_var=LD_LIBRARY_PATH
+ case $host_os in
+ freebsd2.*)
+ shlibpath_overrides_runpath=yes
+ ;;
+ freebsd3.[[01]]* | freebsdelf3.[[01]]*)
+ shlibpath_overrides_runpath=yes
+ hardcode_into_libs=yes
+ ;;
+ freebsd3.[[2-9]]* | freebsdelf3.[[2-9]]* | \
+ freebsd4.[[0-5]] | freebsdelf4.[[0-5]] | freebsd4.1.1 | freebsdelf4.1.1)
+ shlibpath_overrides_runpath=no
+ hardcode_into_libs=yes
+ ;;
+ *) # from 4.6 on, and DragonFly
+ shlibpath_overrides_runpath=yes
+ hardcode_into_libs=yes
+ ;;
+ esac
+ ;;
+
+haiku*)
+ version_type=linux # correct to gnu/linux during the next big refactor
+ need_lib_prefix=no
+ need_version=no
+ dynamic_linker="$host_os runtime_loader"
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext'
+ soname_spec='$libname$release$shared_ext$major'
+ shlibpath_var=LIBRARY_PATH
+ shlibpath_overrides_runpath=no
+ sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib'
+ hardcode_into_libs=yes
+ ;;
+
+hpux9* | hpux10* | hpux11*)
+ # Give a soname corresponding to the major version so that dld.sl refuses to
+ # link against other versions.
+ version_type=sunos
+ need_lib_prefix=no
+ need_version=no
+ case $host_cpu in
+ ia64*)
+ shrext_cmds='.so'
+ hardcode_into_libs=yes
+ dynamic_linker="$host_os dld.so"
+ shlibpath_var=LD_LIBRARY_PATH
+ shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext'
+ soname_spec='$libname$release$shared_ext$major'
+ if test 32 = "$HPUX_IA64_MODE"; then
+ sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib"
+ sys_lib_dlsearch_path_spec=/usr/lib/hpux32
+ else
+ sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64"
+ sys_lib_dlsearch_path_spec=/usr/lib/hpux64
+ fi
+ ;;
+ hppa*64*)
+ shrext_cmds='.sl'
+ hardcode_into_libs=yes
+ dynamic_linker="$host_os dld.sl"
+ shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH
+ shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext'
+ soname_spec='$libname$release$shared_ext$major'
+ sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64"
+ sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
+ ;;
+ *)
+ shrext_cmds='.sl'
+ dynamic_linker="$host_os dld.sl"
+ shlibpath_var=SHLIB_PATH
+ shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext'
+ soname_spec='$libname$release$shared_ext$major'
+ ;;
+ esac
+ # HP-UX runs *really* slowly unless shared libraries are mode 555, ...
+ postinstall_cmds='chmod 555 $lib'
+ # or fails outright, so override atomically:
+ install_override_mode=555
+ ;;
+
+interix[[3-9]]*)
+ version_type=linux # correct to gnu/linux during the next big refactor
+ need_lib_prefix=no
+ need_version=no
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext'
+ soname_spec='$libname$release$shared_ext$major'
+ dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)'
+ shlibpath_var=LD_LIBRARY_PATH
+ shlibpath_overrides_runpath=no
+ hardcode_into_libs=yes
+ ;;
+
+irix5* | irix6* | nonstopux*)
+ case $host_os in
+ nonstopux*) version_type=nonstopux ;;
+ *)
+ if test yes = "$lt_cv_prog_gnu_ld"; then
+ version_type=linux # correct to gnu/linux during the next big refactor
+ else
+ version_type=irix
+ fi ;;
+ esac
+ need_lib_prefix=no
+ need_version=no
+ soname_spec='$libname$release$shared_ext$major'
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$release$shared_ext $libname$shared_ext'
+ case $host_os in
+ irix5* | nonstopux*)
+ libsuff= shlibsuff=
+ ;;
+ *)
+ case $LD in # libtool.m4 will add one of these switches to LD
+ *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ")
+ libsuff= shlibsuff= libmagic=32-bit;;
+ *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ")
+ libsuff=32 shlibsuff=N32 libmagic=N32;;
+ *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ")
+ libsuff=64 shlibsuff=64 libmagic=64-bit;;
+ *) libsuff= shlibsuff= libmagic=never-match;;
+ esac
+ ;;
+ esac
+ shlibpath_var=LD_LIBRARY${shlibsuff}_PATH
+ shlibpath_overrides_runpath=no
+ sys_lib_search_path_spec="/usr/lib$libsuff /lib$libsuff /usr/local/lib$libsuff"
+ sys_lib_dlsearch_path_spec="/usr/lib$libsuff /lib$libsuff"
+ hardcode_into_libs=yes
+ ;;
+
+# No shared lib support for Linux oldld, aout, or coff.
+linux*oldld* | linux*aout* | linux*coff*)
+ dynamic_linker=no
+ ;;
+
+linux*android*)
+ version_type=none # Android doesn't support versioned libraries.
+ need_lib_prefix=no
+ need_version=no
+ library_names_spec='$libname$release$shared_ext'
+ soname_spec='$libname$release$shared_ext'
+ finish_cmds=
+ shlibpath_var=LD_LIBRARY_PATH
+ shlibpath_overrides_runpath=yes
+
+ # This implies no fast_install, which is unacceptable.
+ # Some rework will be needed to allow for fast_install
+ # before this can be enabled.
+ hardcode_into_libs=yes
+
+ dynamic_linker='Android linker'
+ # Don't embed -rpath directories since the linker doesn't support them.
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+ ;;
+
+# This must be glibc/ELF.
+linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*)
+ version_type=linux # correct to gnu/linux during the next big refactor
+ need_lib_prefix=no
+ need_version=no
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext'
+ soname_spec='$libname$release$shared_ext$major'
+ finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir'
+ shlibpath_var=LD_LIBRARY_PATH
+ shlibpath_overrides_runpath=no
+
+ # Some binutils ld are patched to set DT_RUNPATH
+ AC_CACHE_VAL([lt_cv_shlibpath_overrides_runpath],
+ [lt_cv_shlibpath_overrides_runpath=no
+ save_LDFLAGS=$LDFLAGS
+ save_libdir=$libdir
+ eval "libdir=/foo; wl=\"$_LT_TAGVAR(lt_prog_compiler_wl, $1)\"; \
+ LDFLAGS=\"\$LDFLAGS $_LT_TAGVAR(hardcode_libdir_flag_spec, $1)\""
+ AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])],
+ [AS_IF([ ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null],
+ [lt_cv_shlibpath_overrides_runpath=yes])])
+ LDFLAGS=$save_LDFLAGS
+ libdir=$save_libdir
+ ])
+ shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath
+
+ # This implies no fast_install, which is unacceptable.
+ # Some rework will be needed to allow for fast_install
+ # before this can be enabled.
+ hardcode_into_libs=yes
+
+ # Ideally, we could use ldconfig to report *all* directores which are
+ # searched for libraries, however this is still not possible. Aside from not
+ # being certain /sbin/ldconfig is available, command
+ # 'ldconfig -N -X -v | grep ^/' on 64bit Fedora does not report /usr/lib64,
+ # even though it is searched at run-time. Try to do the best guess by
+ # appending ld.so.conf contents (and includes) to the search path.
+ if test -f /etc/ld.so.conf; then
+ lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \[$]2)); skip = 1; } { if (!skip) print \[$]0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '`
+ sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra"
+ fi
+
+ # We used to test for /lib/ld.so.1 and disable shared libraries on
+ # powerpc, because MkLinux only supported shared libraries with the
+ # GNU dynamic linker. Since this was broken with cross compilers,
+ # most powerpc-linux boxes support dynamic linking these days and
+ # people can always --disable-shared, the test was removed, and we
+ # assume the GNU/Linux dynamic linker is in use.
+ dynamic_linker='GNU/Linux ld.so'
+ ;;
+
+netbsd*)
+ version_type=sunos
+ need_lib_prefix=no
+ need_version=no
+ if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix'
+ finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
+ dynamic_linker='NetBSD (a.out) ld.so'
+ else
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext'
+ soname_spec='$libname$release$shared_ext$major'
+ dynamic_linker='NetBSD ld.elf_so'
+ fi
+ shlibpath_var=LD_LIBRARY_PATH
+ shlibpath_overrides_runpath=yes
+ hardcode_into_libs=yes
+ ;;
+
+newsos6)
+ version_type=linux # correct to gnu/linux during the next big refactor
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext'
+ shlibpath_var=LD_LIBRARY_PATH
+ shlibpath_overrides_runpath=yes
+ ;;
+
+*nto* | *qnx*)
+ version_type=qnx
+ need_lib_prefix=no
+ need_version=no
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext'
+ soname_spec='$libname$release$shared_ext$major'
+ shlibpath_var=LD_LIBRARY_PATH
+ shlibpath_overrides_runpath=no
+ hardcode_into_libs=yes
+ dynamic_linker='ldqnx.so'
+ ;;
+
+openbsd* | bitrig*)
+ version_type=sunos
+ sys_lib_dlsearch_path_spec=/usr/lib
+ need_lib_prefix=no
+ if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then
+ need_version=no
+ else
+ need_version=yes
+ fi
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix'
+ finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
+ shlibpath_var=LD_LIBRARY_PATH
+ shlibpath_overrides_runpath=yes
+ ;;
+
+os2*)
+ libname_spec='$name'
+ version_type=windows
+ shrext_cmds=.dll
+ need_version=no
+ need_lib_prefix=no
+ # OS/2 can only load a DLL with a base name of 8 characters or less.
+ soname_spec='`test -n "$os2dllname" && libname="$os2dllname";
+ v=$($ECHO $release$versuffix | tr -d .-);
+ n=$($ECHO $libname | cut -b -$((8 - ${#v})) | tr . _);
+ $ECHO $n$v`$shared_ext'
+ library_names_spec='${libname}_dll.$libext'
+ dynamic_linker='OS/2 ld.exe'
+ shlibpath_var=BEGINLIBPATH
+ sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib"
+ sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
+ postinstall_cmds='base_file=`basename \$file`~
+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; $ECHO \$dlname'\''`~
+ dldir=$destdir/`dirname \$dlpath`~
+ test -d \$dldir || mkdir -p \$dldir~
+ $install_prog $dir/$dlname \$dldir/$dlname~
+ chmod a+x \$dldir/$dlname~
+ if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then
+ eval '\''$striplib \$dldir/$dlname'\'' || exit \$?;
+ fi'
+ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; $ECHO \$dlname'\''`~
+ dlpath=$dir/\$dldll~
+ $RM \$dlpath'
+ ;;
+
+osf3* | osf4* | osf5*)
+ version_type=osf
+ need_lib_prefix=no
+ need_version=no
+ soname_spec='$libname$release$shared_ext$major'
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext'
+ shlibpath_var=LD_LIBRARY_PATH
+ sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib"
+ sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
+ ;;
+
+rdos*)
+ dynamic_linker=no
+ ;;
+
+solaris*)
+ version_type=linux # correct to gnu/linux during the next big refactor
+ need_lib_prefix=no
+ need_version=no
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext'
+ soname_spec='$libname$release$shared_ext$major'
+ shlibpath_var=LD_LIBRARY_PATH
+ shlibpath_overrides_runpath=yes
+ hardcode_into_libs=yes
+ # ldd complains unless libraries are executable
+ postinstall_cmds='chmod +x $lib'
+ ;;
+
+sunos4*)
+ version_type=sunos
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix'
+ finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir'
+ shlibpath_var=LD_LIBRARY_PATH
+ shlibpath_overrides_runpath=yes
+ if test yes = "$with_gnu_ld"; then
+ need_lib_prefix=no
+ fi
+ need_version=yes
+ ;;
+
+sysv4 | sysv4.3*)
+ version_type=linux # correct to gnu/linux during the next big refactor
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext'
+ soname_spec='$libname$release$shared_ext$major'
+ shlibpath_var=LD_LIBRARY_PATH
+ case $host_vendor in
+ sni)
+ shlibpath_overrides_runpath=no
+ need_lib_prefix=no
+ runpath_var=LD_RUN_PATH
+ ;;
+ siemens)
+ need_lib_prefix=no
+ ;;
+ motorola)
+ need_lib_prefix=no
+ need_version=no
+ shlibpath_overrides_runpath=no
+ sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib'
+ ;;
+ esac
+ ;;
+
+sysv4*MP*)
+ if test -d /usr/nec; then
+ version_type=linux # correct to gnu/linux during the next big refactor
+ library_names_spec='$libname$shared_ext.$versuffix $libname$shared_ext.$major $libname$shared_ext'
+ soname_spec='$libname$shared_ext.$major'
+ shlibpath_var=LD_LIBRARY_PATH
+ fi
+ ;;
+
+sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
+ version_type=sco
+ need_lib_prefix=no
+ need_version=no
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext $libname$shared_ext'
+ soname_spec='$libname$release$shared_ext$major'
+ shlibpath_var=LD_LIBRARY_PATH
+ shlibpath_overrides_runpath=yes
+ hardcode_into_libs=yes
+ if test yes = "$with_gnu_ld"; then
+ sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib'
+ else
+ sys_lib_search_path_spec='/usr/ccs/lib /usr/lib'
+ case $host_os in
+ sco3.2v5*)
+ sys_lib_search_path_spec="$sys_lib_search_path_spec /lib"
+ ;;
+ esac
+ fi
+ sys_lib_dlsearch_path_spec='/usr/lib'
+ ;;
+
+tpf*)
+ # TPF is a cross-target only. Preferred cross-host = GNU/Linux.
+ version_type=linux # correct to gnu/linux during the next big refactor
+ need_lib_prefix=no
+ need_version=no
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext'
+ shlibpath_var=LD_LIBRARY_PATH
+ shlibpath_overrides_runpath=no
+ hardcode_into_libs=yes
+ ;;
+
+uts4*)
+ version_type=linux # correct to gnu/linux during the next big refactor
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext'
+ soname_spec='$libname$release$shared_ext$major'
+ shlibpath_var=LD_LIBRARY_PATH
+ ;;
+
+*)
+ dynamic_linker=no
+ ;;
+esac
+AC_MSG_RESULT([$dynamic_linker])
+test no = "$dynamic_linker" && can_build_shared=no
+
+variables_saved_for_relink="PATH $shlibpath_var $runpath_var"
+if test yes = "$GCC"; then
+ variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH"
+fi
+
+if test set = "${lt_cv_sys_lib_search_path_spec+set}"; then
+ sys_lib_search_path_spec=$lt_cv_sys_lib_search_path_spec
+fi
+
+if test set = "${lt_cv_sys_lib_dlsearch_path_spec+set}"; then
+ sys_lib_dlsearch_path_spec=$lt_cv_sys_lib_dlsearch_path_spec
+fi
+
+# remember unaugmented sys_lib_dlsearch_path content for libtool script decls...
+configure_time_dlsearch_path=$sys_lib_dlsearch_path_spec
+
+# ... but it needs LT_SYS_LIBRARY_PATH munging for other configure-time code
+func_munge_path_list sys_lib_dlsearch_path_spec "$LT_SYS_LIBRARY_PATH"
+
+# to be used as default LT_SYS_LIBRARY_PATH value in generated libtool
+configure_time_lt_sys_library_path=$LT_SYS_LIBRARY_PATH
+
+_LT_DECL([], [variables_saved_for_relink], [1],
+ [Variables whose values should be saved in libtool wrapper scripts and
+ restored at link time])
+_LT_DECL([], [need_lib_prefix], [0],
+ [Do we need the "lib" prefix for modules?])
+_LT_DECL([], [need_version], [0], [Do we need a version for libraries?])
+_LT_DECL([], [version_type], [0], [Library versioning type])
+_LT_DECL([], [runpath_var], [0], [Shared library runtime path variable])
+_LT_DECL([], [shlibpath_var], [0],[Shared library path variable])
+_LT_DECL([], [shlibpath_overrides_runpath], [0],
+ [Is shlibpath searched before the hard-coded library search path?])
+_LT_DECL([], [libname_spec], [1], [Format of library name prefix])
+_LT_DECL([], [library_names_spec], [1],
+ [[List of archive names. First name is the real one, the rest are links.
+ The last name is the one that the linker finds with -lNAME]])
+_LT_DECL([], [soname_spec], [1],
+ [[The coded name of the library, if different from the real name]])
+_LT_DECL([], [install_override_mode], [1],
+ [Permission mode override for installation of shared libraries])
+_LT_DECL([], [postinstall_cmds], [2],
+ [Command to use after installation of a shared archive])
+_LT_DECL([], [postuninstall_cmds], [2],
+ [Command to use after uninstallation of a shared archive])
+_LT_DECL([], [finish_cmds], [2],
+ [Commands used to finish a libtool library installation in a directory])
+_LT_DECL([], [finish_eval], [1],
+ [[As "finish_cmds", except a single script fragment to be evaled but
+ not shown]])
+_LT_DECL([], [hardcode_into_libs], [0],
+ [Whether we should hardcode library paths into libraries])
+_LT_DECL([], [sys_lib_search_path_spec], [2],
+ [Compile-time system search path for libraries])
+_LT_DECL([sys_lib_dlsearch_path_spec], [configure_time_dlsearch_path], [2],
+ [Detected run-time system search path for libraries])
+_LT_DECL([], [configure_time_lt_sys_library_path], [2],
+ [Explicit LT_SYS_LIBRARY_PATH set during ./configure time])
+])# _LT_SYS_DYNAMIC_LINKER
+
+
+# _LT_PATH_TOOL_PREFIX(TOOL)
+# --------------------------
+# find a file program that can recognize shared library
+AC_DEFUN([_LT_PATH_TOOL_PREFIX],
+[m4_require([_LT_DECL_EGREP])dnl
+AC_MSG_CHECKING([for $1])
+AC_CACHE_VAL(lt_cv_path_MAGIC_CMD,
+[case $MAGIC_CMD in
+[[\\/*] | ?:[\\/]*])
+ lt_cv_path_MAGIC_CMD=$MAGIC_CMD # Let the user override the test with a path.
+ ;;
+*)
+ lt_save_MAGIC_CMD=$MAGIC_CMD
+ lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR
+dnl $ac_dummy forces splitting on constant user-supplied paths.
+dnl POSIX.2 word splitting is done only on the output of word expansions,
+dnl not every word. This closes a longstanding sh security hole.
+ ac_dummy="m4_if([$2], , $PATH, [$2])"
+ for ac_dir in $ac_dummy; do
+ IFS=$lt_save_ifs
+ test -z "$ac_dir" && ac_dir=.
+ if test -f "$ac_dir/$1"; then
+ lt_cv_path_MAGIC_CMD=$ac_dir/"$1"
+ if test -n "$file_magic_test_file"; then
+ case $deplibs_check_method in
+ "file_magic "*)
+ file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"`
+ MAGIC_CMD=$lt_cv_path_MAGIC_CMD
+ if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null |
+ $EGREP "$file_magic_regex" > /dev/null; then
+ :
+ else
+ cat <<_LT_EOF 1>&2
+
+*** Warning: the command libtool uses to detect shared libraries,
+*** $file_magic_cmd, produces output that libtool cannot recognize.
+*** The result is that libtool may fail to recognize shared libraries
+*** as such. This will affect the creation of libtool libraries that
+*** depend on shared libraries, but programs linked with such libtool
+*** libraries will work regardless of this problem. Nevertheless, you
+*** may want to report the problem to your system manager and/or to
+*** bug-libtool@gnu.org
+
+_LT_EOF
+ fi ;;
+ esac
+ fi
+ break
+ fi
+ done
+ IFS=$lt_save_ifs
+ MAGIC_CMD=$lt_save_MAGIC_CMD
+ ;;
+esac])
+MAGIC_CMD=$lt_cv_path_MAGIC_CMD
+if test -n "$MAGIC_CMD"; then
+ AC_MSG_RESULT($MAGIC_CMD)
+else
+ AC_MSG_RESULT(no)
+fi
+_LT_DECL([], [MAGIC_CMD], [0],
+ [Used to examine libraries when file_magic_cmd begins with "file"])dnl
+])# _LT_PATH_TOOL_PREFIX
+
+# Old name:
+AU_ALIAS([AC_PATH_TOOL_PREFIX], [_LT_PATH_TOOL_PREFIX])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_PATH_TOOL_PREFIX], [])
+
+
+# _LT_PATH_MAGIC
+# --------------
+# find a file program that can recognize a shared library
+m4_defun([_LT_PATH_MAGIC],
+[_LT_PATH_TOOL_PREFIX(${ac_tool_prefix}file, /usr/bin$PATH_SEPARATOR$PATH)
+if test -z "$lt_cv_path_MAGIC_CMD"; then
+ if test -n "$ac_tool_prefix"; then
+ _LT_PATH_TOOL_PREFIX(file, /usr/bin$PATH_SEPARATOR$PATH)
+ else
+ MAGIC_CMD=:
+ fi
+fi
+])# _LT_PATH_MAGIC
+
+
+# LT_PATH_LD
+# ----------
+# find the pathname to the GNU or non-GNU linker
+AC_DEFUN([LT_PATH_LD],
+[AC_REQUIRE([AC_PROG_CC])dnl
+AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_CANONICAL_BUILD])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_PROG_ECHO_BACKSLASH])dnl
+
+AC_ARG_WITH([gnu-ld],
+ [AS_HELP_STRING([--with-gnu-ld],
+ [assume the C compiler uses GNU ld @<:@default=no@:>@])],
+ [test no = "$withval" || with_gnu_ld=yes],
+ [with_gnu_ld=no])dnl
+
+ac_prog=ld
+if test yes = "$GCC"; then
+ # Check if gcc -print-prog-name=ld gives a path.
+ AC_MSG_CHECKING([for ld used by $CC])
+ case $host in
+ *-*-mingw*)
+ # gcc leaves a trailing carriage return, which upsets mingw
+ ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;;
+ *)
+ ac_prog=`($CC -print-prog-name=ld) 2>&5` ;;
+ esac
+ case $ac_prog in
+ # Accept absolute paths.
+ [[\\/]]* | ?:[[\\/]]*)
+ re_direlt='/[[^/]][[^/]]*/\.\./'
+ # Canonicalize the pathname of ld
+ ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'`
+ while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do
+ ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"`
+ done
+ test -z "$LD" && LD=$ac_prog
+ ;;
+ "")
+ # If it fails, then pretend we aren't using GCC.
+ ac_prog=ld
+ ;;
+ *)
+ # If it is relative, then search for the first ld in PATH.
+ with_gnu_ld=unknown
+ ;;
+ esac
+elif test yes = "$with_gnu_ld"; then
+ AC_MSG_CHECKING([for GNU ld])
+else
+ AC_MSG_CHECKING([for non-GNU ld])
+fi
+AC_CACHE_VAL(lt_cv_path_LD,
+[if test -z "$LD"; then
+ lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR
+ for ac_dir in $PATH; do
+ IFS=$lt_save_ifs
+ test -z "$ac_dir" && ac_dir=.
+ if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then
+ lt_cv_path_LD=$ac_dir/$ac_prog
+ # Check to see if the program is GNU ld. I'd rather use --version,
+ # but apparently some variants of GNU ld only accept -v.
+ # Break only if it was the GNU/non-GNU ld that we prefer.
+ case `"$lt_cv_path_LD" -v 2>&1 &1 conftest.i
+cat conftest.i conftest.i >conftest2.i
+: ${lt_DD:=$DD}
+AC_PATH_PROGS_FEATURE_CHECK([lt_DD], [dd],
+[if "$ac_path_lt_DD" bs=32 count=1 conftest.out 2>/dev/null; then
+ cmp -s conftest.i conftest.out \
+ && ac_cv_path_lt_DD="$ac_path_lt_DD" ac_path_lt_DD_found=:
+fi])
+rm -f conftest.i conftest2.i conftest.out])
+])# _LT_PATH_DD
+
+
+# _LT_CMD_TRUNCATE
+# ----------------
+# find command to truncate a binary pipe
+m4_defun([_LT_CMD_TRUNCATE],
+[m4_require([_LT_PATH_DD])
+AC_CACHE_CHECK([how to truncate binary pipes], [lt_cv_truncate_bin],
+[printf 0123456789abcdef0123456789abcdef >conftest.i
+cat conftest.i conftest.i >conftest2.i
+lt_cv_truncate_bin=
+if "$ac_cv_path_lt_DD" bs=32 count=1 conftest.out 2>/dev/null; then
+ cmp -s conftest.i conftest.out \
+ && lt_cv_truncate_bin="$ac_cv_path_lt_DD bs=4096 count=1"
+fi
+rm -f conftest.i conftest2.i conftest.out
+test -z "$lt_cv_truncate_bin" && lt_cv_truncate_bin="$SED -e 4q"])
+_LT_DECL([lt_truncate_bin], [lt_cv_truncate_bin], [1],
+ [Command to truncate a binary pipe])
+])# _LT_CMD_TRUNCATE
+
+
+# _LT_CHECK_MAGIC_METHOD
+# ----------------------
+# how to check for library dependencies
+# -- PORTME fill in with the dynamic library characteristics
+m4_defun([_LT_CHECK_MAGIC_METHOD],
+[m4_require([_LT_DECL_EGREP])
+m4_require([_LT_DECL_OBJDUMP])
+AC_CACHE_CHECK([how to recognize dependent libraries],
+lt_cv_deplibs_check_method,
+[lt_cv_file_magic_cmd='$MAGIC_CMD'
+lt_cv_file_magic_test_file=
+lt_cv_deplibs_check_method='unknown'
+# Need to set the preceding variable on all platforms that support
+# interlibrary dependencies.
+# 'none' -- dependencies not supported.
+# 'unknown' -- same as none, but documents that we really don't know.
+# 'pass_all' -- all dependencies passed with no checks.
+# 'test_compile' -- check by making test program.
+# 'file_magic [[regex]]' -- check by looking for files in library path
+# that responds to the $file_magic_cmd with a given extended regex.
+# If you have 'file' or equivalent on your system and you're not sure
+# whether 'pass_all' will *always* work, you probably want this one.
+
+case $host_os in
+aix[[4-9]]*)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+
+beos*)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+
+bsdi[[45]]*)
+ lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (shared object|dynamic lib)'
+ lt_cv_file_magic_cmd='/usr/bin/file -L'
+ lt_cv_file_magic_test_file=/shlib/libc.so
+ ;;
+
+cygwin*)
+ # func_win32_libid is a shell function defined in ltmain.sh
+ lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+ lt_cv_file_magic_cmd='func_win32_libid'
+ ;;
+
+mingw* | pw32*)
+ # Base MSYS/MinGW do not provide the 'file' command needed by
+ # func_win32_libid shell function, so use a weaker test based on 'objdump',
+ # unless we find 'file', for example because we are cross-compiling.
+ if ( file / ) >/dev/null 2>&1; then
+ lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+ lt_cv_file_magic_cmd='func_win32_libid'
+ else
+ # Keep this pattern in sync with the one in func_win32_libid.
+ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
+ lt_cv_file_magic_cmd='$OBJDUMP -f'
+ fi
+ ;;
+
+cegcc*)
+ # use the weaker test based on 'objdump'. See mingw*.
+ lt_cv_deplibs_check_method='file_magic file format pe-arm-.*little(.*architecture: arm)?'
+ lt_cv_file_magic_cmd='$OBJDUMP -f'
+ ;;
+
+darwin* | rhapsody*)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+
+freebsd* | dragonfly*)
+ if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
+ case $host_cpu in
+ i*86 )
+ # Not sure whether the presence of OpenBSD here was a mistake.
+ # Let's accept both of them until this is cleared up.
+ lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[[3-9]]86 (compact )?demand paged shared library'
+ lt_cv_file_magic_cmd=/usr/bin/file
+ lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*`
+ ;;
+ esac
+ else
+ lt_cv_deplibs_check_method=pass_all
+ fi
+ ;;
+
+haiku*)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+
+hpux10.20* | hpux11*)
+ lt_cv_file_magic_cmd=/usr/bin/file
+ case $host_cpu in
+ ia64*)
+ lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|ELF-[[0-9]][[0-9]]) shared object file - IA64'
+ lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so
+ ;;
+ hppa*64*)
+ [lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF[ -][0-9][0-9])(-bit)?( [LM]SB)? shared object( file)?[, -]* PA-RISC [0-9]\.[0-9]']
+ lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl
+ ;;
+ *)
+ lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|PA-RISC[[0-9]]\.[[0-9]]) shared library'
+ lt_cv_file_magic_test_file=/usr/lib/libc.sl
+ ;;
+ esac
+ ;;
+
+interix[[3-9]]*)
+ # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here
+ lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|\.a)$'
+ ;;
+
+irix5* | irix6* | nonstopux*)
+ case $LD in
+ *-32|*"-32 ") libmagic=32-bit;;
+ *-n32|*"-n32 ") libmagic=N32;;
+ *-64|*"-64 ") libmagic=64-bit;;
+ *) libmagic=never-match;;
+ esac
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+
+# This must be glibc/ELF.
+linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+
+netbsd*)
+ if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
+ lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$'
+ else
+ lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|_pic\.a)$'
+ fi
+ ;;
+
+newos6*)
+ lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (executable|dynamic lib)'
+ lt_cv_file_magic_cmd=/usr/bin/file
+ lt_cv_file_magic_test_file=/usr/lib/libnls.so
+ ;;
+
+*nto* | *qnx*)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+
+openbsd* | bitrig*)
+ if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then
+ lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|\.so|_pic\.a)$'
+ else
+ lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$'
+ fi
+ ;;
+
+osf3* | osf4* | osf5*)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+
+rdos*)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+
+solaris*)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+
+sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+
+sysv4 | sysv4.3*)
+ case $host_vendor in
+ motorola)
+ lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (shared object|dynamic lib) M[[0-9]][[0-9]]* Version [[0-9]]'
+ lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*`
+ ;;
+ ncr)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+ sequent)
+ lt_cv_file_magic_cmd='/bin/file'
+ lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB (shared object|dynamic lib )'
+ ;;
+ sni)
+ lt_cv_file_magic_cmd='/bin/file'
+ lt_cv_deplibs_check_method="file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB dynamic lib"
+ lt_cv_file_magic_test_file=/lib/libc.so
+ ;;
+ siemens)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+ pc)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+ esac
+ ;;
+
+tpf*)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+os2*)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+esac
+])
+
+file_magic_glob=
+want_nocaseglob=no
+if test "$build" = "$host"; then
+ case $host_os in
+ mingw* | pw32*)
+ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
+ want_nocaseglob=yes
+ else
+ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[[\1]]\/[[\1]]\/g;/g"`
+ fi
+ ;;
+ esac
+fi
+
+file_magic_cmd=$lt_cv_file_magic_cmd
+deplibs_check_method=$lt_cv_deplibs_check_method
+test -z "$deplibs_check_method" && deplibs_check_method=unknown
+
+_LT_DECL([], [deplibs_check_method], [1],
+ [Method to check whether dependent libraries are shared objects])
+_LT_DECL([], [file_magic_cmd], [1],
+ [Command to use when deplibs_check_method = "file_magic"])
+_LT_DECL([], [file_magic_glob], [1],
+ [How to find potential files when deplibs_check_method = "file_magic"])
+_LT_DECL([], [want_nocaseglob], [1],
+ [Find potential files using nocaseglob when deplibs_check_method = "file_magic"])
+])# _LT_CHECK_MAGIC_METHOD
+
+
+# LT_PATH_NM
+# ----------
+# find the pathname to a BSD- or MS-compatible name lister
+AC_DEFUN([LT_PATH_NM],
+[AC_REQUIRE([AC_PROG_CC])dnl
+AC_CACHE_CHECK([for BSD- or MS-compatible name lister (nm)], lt_cv_path_NM,
+[if test -n "$NM"; then
+ # Let the user override the test.
+ lt_cv_path_NM=$NM
+else
+ lt_nm_to_check=${ac_tool_prefix}nm
+ if test -n "$ac_tool_prefix" && test "$build" = "$host"; then
+ lt_nm_to_check="$lt_nm_to_check nm"
+ fi
+ for lt_tmp_nm in $lt_nm_to_check; do
+ lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR
+ for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do
+ IFS=$lt_save_ifs
+ test -z "$ac_dir" && ac_dir=.
+ tmp_nm=$ac_dir/$lt_tmp_nm
+ if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext"; then
+ # Check to see if the nm accepts a BSD-compat flag.
+ # Adding the 'sed 1q' prevents false positives on HP-UX, which says:
+ # nm: unknown option "B" ignored
+ # Tru64's nm complains that /dev/null is an invalid object file
+ # MSYS converts /dev/null to NUL, MinGW nm treats NUL as empty
+ case $build_os in
+ mingw*) lt_bad_file=conftest.nm/nofile ;;
+ *) lt_bad_file=/dev/null ;;
+ esac
+ case `"$tmp_nm" -B $lt_bad_file 2>&1 | sed '1q'` in
+ *$lt_bad_file* | *'Invalid file or object type'*)
+ lt_cv_path_NM="$tmp_nm -B"
+ break 2
+ ;;
+ *)
+ case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in
+ */dev/null*)
+ lt_cv_path_NM="$tmp_nm -p"
+ break 2
+ ;;
+ *)
+ lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but
+ continue # so that we can try to find one that supports BSD flags
+ ;;
+ esac
+ ;;
+ esac
+ fi
+ done
+ IFS=$lt_save_ifs
+ done
+ : ${lt_cv_path_NM=no}
+fi])
+if test no != "$lt_cv_path_NM"; then
+ NM=$lt_cv_path_NM
+else
+ # Didn't find any BSD compatible name lister, look for dumpbin.
+ if test -n "$DUMPBIN"; then :
+ # Let the user override the test.
+ else
+ AC_CHECK_TOOLS(DUMPBIN, [dumpbin "link -dump"], :)
+ case `$DUMPBIN -symbols -headers /dev/null 2>&1 | sed '1q'` in
+ *COFF*)
+ DUMPBIN="$DUMPBIN -symbols -headers"
+ ;;
+ *)
+ DUMPBIN=:
+ ;;
+ esac
+ fi
+ AC_SUBST([DUMPBIN])
+ if test : != "$DUMPBIN"; then
+ NM=$DUMPBIN
+ fi
+fi
+test -z "$NM" && NM=nm
+AC_SUBST([NM])
+_LT_DECL([], [NM], [1], [A BSD- or MS-compatible name lister])dnl
+
+AC_CACHE_CHECK([the name lister ($NM) interface], [lt_cv_nm_interface],
+ [lt_cv_nm_interface="BSD nm"
+ echo "int some_variable = 0;" > conftest.$ac_ext
+ (eval echo "\"\$as_me:$LINENO: $ac_compile\"" >&AS_MESSAGE_LOG_FD)
+ (eval "$ac_compile" 2>conftest.err)
+ cat conftest.err >&AS_MESSAGE_LOG_FD
+ (eval echo "\"\$as_me:$LINENO: $NM \\\"conftest.$ac_objext\\\"\"" >&AS_MESSAGE_LOG_FD)
+ (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out)
+ cat conftest.err >&AS_MESSAGE_LOG_FD
+ (eval echo "\"\$as_me:$LINENO: output\"" >&AS_MESSAGE_LOG_FD)
+ cat conftest.out >&AS_MESSAGE_LOG_FD
+ if $GREP 'External.*some_variable' conftest.out > /dev/null; then
+ lt_cv_nm_interface="MS dumpbin"
+ fi
+ rm -f conftest*])
+])# LT_PATH_NM
+
+# Old names:
+AU_ALIAS([AM_PROG_NM], [LT_PATH_NM])
+AU_ALIAS([AC_PROG_NM], [LT_PATH_NM])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_PROG_NM], [])
+dnl AC_DEFUN([AC_PROG_NM], [])
+
+# _LT_CHECK_SHAREDLIB_FROM_LINKLIB
+# --------------------------------
+# how to determine the name of the shared library
+# associated with a specific link library.
+# -- PORTME fill in with the dynamic library characteristics
+m4_defun([_LT_CHECK_SHAREDLIB_FROM_LINKLIB],
+[m4_require([_LT_DECL_EGREP])
+m4_require([_LT_DECL_OBJDUMP])
+m4_require([_LT_DECL_DLLTOOL])
+AC_CACHE_CHECK([how to associate runtime and link libraries],
+lt_cv_sharedlib_from_linklib_cmd,
+[lt_cv_sharedlib_from_linklib_cmd='unknown'
+
+case $host_os in
+cygwin* | mingw* | pw32* | cegcc*)
+ # two different shell functions defined in ltmain.sh;
+ # decide which one to use based on capabilities of $DLLTOOL
+ case `$DLLTOOL --help 2>&1` in
+ *--identify-strict*)
+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
+ ;;
+ *)
+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
+ ;;
+ esac
+ ;;
+*)
+ # fallback: assume linklib IS sharedlib
+ lt_cv_sharedlib_from_linklib_cmd=$ECHO
+ ;;
+esac
+])
+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
+
+_LT_DECL([], [sharedlib_from_linklib_cmd], [1],
+ [Command to associate shared and link libraries])
+])# _LT_CHECK_SHAREDLIB_FROM_LINKLIB
+
+
+# _LT_PATH_MANIFEST_TOOL
+# ----------------------
+# locate the manifest tool
+m4_defun([_LT_PATH_MANIFEST_TOOL],
+[AC_CHECK_TOOL(MANIFEST_TOOL, mt, :)
+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
+AC_CACHE_CHECK([if $MANIFEST_TOOL is a manifest tool], [lt_cv_path_mainfest_tool],
+ [lt_cv_path_mainfest_tool=no
+ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&AS_MESSAGE_LOG_FD
+ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
+ cat conftest.err >&AS_MESSAGE_LOG_FD
+ if $GREP 'Manifest Tool' conftest.out > /dev/null; then
+ lt_cv_path_mainfest_tool=yes
+ fi
+ rm -f conftest*])
+if test yes != "$lt_cv_path_mainfest_tool"; then
+ MANIFEST_TOOL=:
+fi
+_LT_DECL([], [MANIFEST_TOOL], [1], [Manifest tool])dnl
+])# _LT_PATH_MANIFEST_TOOL
+
+
+# _LT_DLL_DEF_P([FILE])
+# ---------------------
+# True iff FILE is a Windows DLL '.def' file.
+# Keep in sync with func_dll_def_p in the libtool script
+AC_DEFUN([_LT_DLL_DEF_P],
+[dnl
+ test DEF = "`$SED -n dnl
+ -e '\''s/^[[ ]]*//'\'' dnl Strip leading whitespace
+ -e '\''/^\(;.*\)*$/d'\'' dnl Delete empty lines and comments
+ -e '\''s/^\(EXPORTS\|LIBRARY\)\([[ ]].*\)*$/DEF/p'\'' dnl
+ -e q dnl Only consider the first "real" line
+ $1`" dnl
+])# _LT_DLL_DEF_P
+
+
+# LT_LIB_M
+# --------
+# check for math library
+AC_DEFUN([LT_LIB_M],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+LIBM=
+case $host in
+*-*-beos* | *-*-cegcc* | *-*-cygwin* | *-*-haiku* | *-*-pw32* | *-*-darwin*)
+ # These system don't have libm, or don't need it
+ ;;
+*-ncr-sysv4.3*)
+ AC_CHECK_LIB(mw, _mwvalidcheckl, LIBM=-lmw)
+ AC_CHECK_LIB(m, cos, LIBM="$LIBM -lm")
+ ;;
+*)
+ AC_CHECK_LIB(m, cos, LIBM=-lm)
+ ;;
+esac
+AC_SUBST([LIBM])
+])# LT_LIB_M
+
+# Old name:
+AU_ALIAS([AC_CHECK_LIBM], [LT_LIB_M])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_CHECK_LIBM], [])
+
+
+# _LT_COMPILER_NO_RTTI([TAGNAME])
+# -------------------------------
+m4_defun([_LT_COMPILER_NO_RTTI],
+[m4_require([_LT_TAG_COMPILER])dnl
+
+_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=
+
+if test yes = "$GCC"; then
+ case $cc_basename in
+ nvcc*)
+ _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -Xcompiler -fno-builtin' ;;
+ *)
+ _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin' ;;
+ esac
+
+ _LT_COMPILER_OPTION([if $compiler supports -fno-rtti -fno-exceptions],
+ lt_cv_prog_compiler_rtti_exceptions,
+ [-fno-rtti -fno-exceptions], [],
+ [_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)="$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1) -fno-rtti -fno-exceptions"])
+fi
+_LT_TAGDECL([no_builtin_flag], [lt_prog_compiler_no_builtin_flag], [1],
+ [Compiler flag to turn off builtin functions])
+])# _LT_COMPILER_NO_RTTI
+
+
+# _LT_CMD_GLOBAL_SYMBOLS
+# ----------------------
+m4_defun([_LT_CMD_GLOBAL_SYMBOLS],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_PROG_CC])dnl
+AC_REQUIRE([AC_PROG_AWK])dnl
+AC_REQUIRE([LT_PATH_NM])dnl
+AC_REQUIRE([LT_PATH_LD])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_TAG_COMPILER])dnl
+
+# Check for command to grab the raw symbol name followed by C symbol from nm.
+AC_MSG_CHECKING([command to parse $NM output from $compiler object])
+AC_CACHE_VAL([lt_cv_sys_global_symbol_pipe],
+[
+# These are sane defaults that work on at least a few old systems.
+# [They come from Ultrix. What could be older than Ultrix?!! ;)]
+
+# Character class describing NM global symbol codes.
+symcode='[[BCDEGRST]]'
+
+# Regexp to match symbols that can be accessed directly from C.
+sympat='\([[_A-Za-z]][[_A-Za-z0-9]]*\)'
+
+# Define system-specific variables.
+case $host_os in
+aix*)
+ symcode='[[BCDT]]'
+ ;;
+cygwin* | mingw* | pw32* | cegcc*)
+ symcode='[[ABCDGISTW]]'
+ ;;
+hpux*)
+ if test ia64 = "$host_cpu"; then
+ symcode='[[ABCDEGRST]]'
+ fi
+ ;;
+irix* | nonstopux*)
+ symcode='[[BCDEGRST]]'
+ ;;
+osf*)
+ symcode='[[BCDEGQRST]]'
+ ;;
+solaris*)
+ symcode='[[BDRT]]'
+ ;;
+sco3.2v5*)
+ symcode='[[DT]]'
+ ;;
+sysv4.2uw2*)
+ symcode='[[DT]]'
+ ;;
+sysv5* | sco5v6* | unixware* | OpenUNIX*)
+ symcode='[[ABDT]]'
+ ;;
+sysv4)
+ symcode='[[DFNSTU]]'
+ ;;
+esac
+
+# If we're using GNU nm, then use its standard symbol codes.
+case `$NM -V 2>&1` in
+*GNU* | *'with BFD'*)
+ symcode='[[ABCDGIRSTW]]' ;;
+esac
+
+if test "$lt_cv_nm_interface" = "MS dumpbin"; then
+ # Gets list of data symbols to import.
+ lt_cv_sys_global_symbol_to_import="sed -n -e 's/^I .* \(.*\)$/\1/p'"
+ # Adjust the below global symbol transforms to fixup imported variables.
+ lt_cdecl_hook=" -e 's/^I .* \(.*\)$/extern __declspec(dllimport) char \1;/p'"
+ lt_c_name_hook=" -e 's/^I .* \(.*\)$/ {\"\1\", (void *) 0},/p'"
+ lt_c_name_lib_hook="\
+ -e 's/^I .* \(lib.*\)$/ {\"\1\", (void *) 0},/p'\
+ -e 's/^I .* \(.*\)$/ {\"lib\1\", (void *) 0},/p'"
+else
+ # Disable hooks by default.
+ lt_cv_sys_global_symbol_to_import=
+ lt_cdecl_hook=
+ lt_c_name_hook=
+ lt_c_name_lib_hook=
+fi
+
+# Transform an extracted symbol line into a proper C declaration.
+# Some systems (esp. on ia64) link data and code symbols differently,
+# so use this general approach.
+lt_cv_sys_global_symbol_to_cdecl="sed -n"\
+$lt_cdecl_hook\
+" -e 's/^T .* \(.*\)$/extern int \1();/p'"\
+" -e 's/^$symcode$symcode* .* \(.*\)$/extern char \1;/p'"
+
+# Transform an extracted symbol line into symbol name and symbol address
+lt_cv_sys_global_symbol_to_c_name_address="sed -n"\
+$lt_c_name_hook\
+" -e 's/^: \(.*\) .*$/ {\"\1\", (void *) 0},/p'"\
+" -e 's/^$symcode$symcode* .* \(.*\)$/ {\"\1\", (void *) \&\1},/p'"
+
+# Transform an extracted symbol line into symbol name with lib prefix and
+# symbol address.
+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n"\
+$lt_c_name_lib_hook\
+" -e 's/^: \(.*\) .*$/ {\"\1\", (void *) 0},/p'"\
+" -e 's/^$symcode$symcode* .* \(lib.*\)$/ {\"\1\", (void *) \&\1},/p'"\
+" -e 's/^$symcode$symcode* .* \(.*\)$/ {\"lib\1\", (void *) \&\1},/p'"
+
+# Handle CRLF in mingw tool chain
+opt_cr=
+case $build_os in
+mingw*)
+ opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp
+ ;;
+esac
+
+# Try without a prefix underscore, then with it.
+for ac_symprfx in "" "_"; do
+
+ # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol.
+ symxfrm="\\1 $ac_symprfx\\2 \\2"
+
+ # Write the raw and C identifiers.
+ if test "$lt_cv_nm_interface" = "MS dumpbin"; then
+ # Fake it for dumpbin and say T for any non-static function,
+ # D for any global variable and I for any imported variable.
+ # Also find C++ and __fastcall symbols from MSVC++,
+ # which start with @ or ?.
+ lt_cv_sys_global_symbol_pipe="$AWK ['"\
+" {last_section=section; section=\$ 3};"\
+" /^COFF SYMBOL TABLE/{for(i in hide) delete hide[i]};"\
+" /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\
+" /^ *Symbol name *: /{split(\$ 0,sn,\":\"); si=substr(sn[2],2)};"\
+" /^ *Type *: code/{print \"T\",si,substr(si,length(prfx))};"\
+" /^ *Type *: data/{print \"I\",si,substr(si,length(prfx))};"\
+" \$ 0!~/External *\|/{next};"\
+" / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\
+" {if(hide[section]) next};"\
+" {f=\"D\"}; \$ 0~/\(\).*\|/{f=\"T\"};"\
+" {split(\$ 0,a,/\||\r/); split(a[2],s)};"\
+" s[1]~/^[@?]/{print f,s[1],s[1]; next};"\
+" s[1]~prfx {split(s[1],t,\"@\"); print f,t[1],substr(t[1],length(prfx))}"\
+" ' prfx=^$ac_symprfx]"
+ else
+ lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[[ ]]\($symcode$symcode*\)[[ ]][[ ]]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
+ fi
+ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
+
+ # Check to see that the pipe works correctly.
+ pipe_works=no
+
+ rm -f conftest*
+ cat > conftest.$ac_ext <<_LT_EOF
+#ifdef __cplusplus
+extern "C" {
+#endif
+char nm_test_var;
+void nm_test_func(void);
+void nm_test_func(void){}
+#ifdef __cplusplus
+}
+#endif
+int main(){nm_test_var='a';nm_test_func();return(0);}
+_LT_EOF
+
+ if AC_TRY_EVAL(ac_compile); then
+ # Now try to grab the symbols.
+ nlist=conftest.nm
+ if AC_TRY_EVAL(NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist) && test -s "$nlist"; then
+ # Try sorting and uniquifying the output.
+ if sort "$nlist" | uniq > "$nlist"T; then
+ mv -f "$nlist"T "$nlist"
+ else
+ rm -f "$nlist"T
+ fi
+
+ # Make sure that we snagged all the symbols we need.
+ if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
+ if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
+ cat <<_LT_EOF > conftest.$ac_ext
+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
+#if defined _WIN32 || defined __CYGWIN__ || defined _WIN32_WCE
+/* DATA imports from DLLs on WIN32 can't be const, because runtime
+ relocations are performed -- see ld's documentation on pseudo-relocs. */
+# define LT@&t@_DLSYM_CONST
+#elif defined __osf__
+/* This system does not cope well with relocations in const data. */
+# define LT@&t@_DLSYM_CONST
+#else
+# define LT@&t@_DLSYM_CONST const
+#endif
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+_LT_EOF
+ # Now generate the symbol file.
+ eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext'
+
+ cat <<_LT_EOF >> conftest.$ac_ext
+
+/* The mapping between symbol names and symbols. */
+LT@&t@_DLSYM_CONST struct {
+ const char *name;
+ void *address;
+}
+lt__PROGRAM__LTX_preloaded_symbols[[]] =
+{
+ { "@PROGRAM@", (void *) 0 },
+_LT_EOF
+ $SED "s/^$symcode$symcode* .* \(.*\)$/ {\"\1\", (void *) \&\1},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext
+ cat <<\_LT_EOF >> conftest.$ac_ext
+ {0, (void *) 0}
+};
+
+/* This works around a problem in FreeBSD linker */
+#ifdef FREEBSD_WORKAROUND
+static const void *lt_preloaded_setup() {
+ return lt__PROGRAM__LTX_preloaded_symbols;
+}
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+_LT_EOF
+ # Now try linking the two files.
+ mv conftest.$ac_objext conftstm.$ac_objext
+ lt_globsym_save_LIBS=$LIBS
+ lt_globsym_save_CFLAGS=$CFLAGS
+ LIBS=conftstm.$ac_objext
+ CFLAGS="$CFLAGS$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)"
+ if AC_TRY_EVAL(ac_link) && test -s conftest$ac_exeext; then
+ pipe_works=yes
+ fi
+ LIBS=$lt_globsym_save_LIBS
+ CFLAGS=$lt_globsym_save_CFLAGS
+ else
+ echo "cannot find nm_test_func in $nlist" >&AS_MESSAGE_LOG_FD
+ fi
+ else
+ echo "cannot find nm_test_var in $nlist" >&AS_MESSAGE_LOG_FD
+ fi
+ else
+ echo "cannot run $lt_cv_sys_global_symbol_pipe" >&AS_MESSAGE_LOG_FD
+ fi
+ else
+ echo "$progname: failed program was:" >&AS_MESSAGE_LOG_FD
+ cat conftest.$ac_ext >&5
+ fi
+ rm -rf conftest* conftst*
+
+ # Do not use the global_symbol_pipe unless it works.
+ if test yes = "$pipe_works"; then
+ break
+ else
+ lt_cv_sys_global_symbol_pipe=
+ fi
+done
+])
+if test -z "$lt_cv_sys_global_symbol_pipe"; then
+ lt_cv_sys_global_symbol_to_cdecl=
+fi
+if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then
+ AC_MSG_RESULT(failed)
+else
+ AC_MSG_RESULT(ok)
+fi
+
+# Response file support.
+if test "$lt_cv_nm_interface" = "MS dumpbin"; then
+ nm_file_list_spec='@'
+elif $NM --help 2>/dev/null | grep '[[@]]FILE' >/dev/null; then
+ nm_file_list_spec='@'
+fi
+
+_LT_DECL([global_symbol_pipe], [lt_cv_sys_global_symbol_pipe], [1],
+ [Take the output of nm and produce a listing of raw symbols and C names])
+_LT_DECL([global_symbol_to_cdecl], [lt_cv_sys_global_symbol_to_cdecl], [1],
+ [Transform the output of nm in a proper C declaration])
+_LT_DECL([global_symbol_to_import], [lt_cv_sys_global_symbol_to_import], [1],
+ [Transform the output of nm into a list of symbols to manually relocate])
+_LT_DECL([global_symbol_to_c_name_address],
+ [lt_cv_sys_global_symbol_to_c_name_address], [1],
+ [Transform the output of nm in a C name address pair])
+_LT_DECL([global_symbol_to_c_name_address_lib_prefix],
+ [lt_cv_sys_global_symbol_to_c_name_address_lib_prefix], [1],
+ [Transform the output of nm in a C name address pair when lib prefix is needed])
+_LT_DECL([nm_interface], [lt_cv_nm_interface], [1],
+ [The name lister interface])
+_LT_DECL([], [nm_file_list_spec], [1],
+ [Specify filename containing input files for $NM])
+]) # _LT_CMD_GLOBAL_SYMBOLS
+
+
+# _LT_COMPILER_PIC([TAGNAME])
+# ---------------------------
+m4_defun([_LT_COMPILER_PIC],
+[m4_require([_LT_TAG_COMPILER])dnl
+_LT_TAGVAR(lt_prog_compiler_wl, $1)=
+_LT_TAGVAR(lt_prog_compiler_pic, $1)=
+_LT_TAGVAR(lt_prog_compiler_static, $1)=
+
+m4_if([$1], [CXX], [
+ # C++ specific cases for pic, static, wl, etc.
+ if test yes = "$GXX"; then
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+
+ case $host_os in
+ aix*)
+ # All AIX code is PIC.
+ if test ia64 = "$host_cpu"; then
+ # AIX 5 now supports IA64 processor
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ fi
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+ ;;
+
+ amigaos*)
+ case $host_cpu in
+ powerpc)
+ # see comment about AmigaOS4 .so support
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+ ;;
+ m68k)
+ # FIXME: we need at least 68020 code to build shared libraries, but
+ # adding the '-m68020' flag to GCC prevents building anything better,
+ # like '-m68040'.
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4'
+ ;;
+ esac
+ ;;
+
+ beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*)
+ # PIC is the default for these OSes.
+ ;;
+ mingw* | cygwin* | os2* | pw32* | cegcc*)
+ # This hack is so that the source file can tell whether it is being
+ # built for inclusion in a dll (and should export symbols for example).
+ # Although the cygwin gcc ignores -fPIC, still need this for old-style
+ # (--disable-auto-import) libraries
+ m4_if([$1], [GCJ], [],
+ [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+ case $host_os in
+ os2*)
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-static'
+ ;;
+ esac
+ ;;
+ darwin* | rhapsody*)
+ # PIC is the default on this platform
+ # Common symbols not allowed in MH_DYLIB files
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common'
+ ;;
+ *djgpp*)
+ # DJGPP does not support shared libraries at all
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+ ;;
+ haiku*)
+ # PIC is the default for Haiku.
+ # The "-static" flag exists, but is broken.
+ _LT_TAGVAR(lt_prog_compiler_static, $1)=
+ ;;
+ interix[[3-9]]*)
+ # Interix 3.x gcc -fpic/-fPIC options generate broken code.
+ # Instead, we relocate shared libraries at runtime.
+ ;;
+ sysv4*MP*)
+ if test -d /usr/nec; then
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic
+ fi
+ ;;
+ hpux*)
+ # PIC is the default for 64-bit PA HP-UX, but not for 32-bit
+ # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag
+ # sets the default TLS model and affects inlining.
+ case $host_cpu in
+ hppa*64*)
+ ;;
+ *)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+ ;;
+ esac
+ ;;
+ *qnx* | *nto*)
+ # QNX uses GNU C++, but need to define -shared option too, otherwise
+ # it will coredump.
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+ ;;
+ *)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+ ;;
+ esac
+ else
+ case $host_os in
+ aix[[4-9]]*)
+ # All AIX code is PIC.
+ if test ia64 = "$host_cpu"; then
+ # AIX 5 now supports IA64 processor
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ else
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp'
+ fi
+ ;;
+ chorus*)
+ case $cc_basename in
+ cxch68*)
+ # Green Hills C++ Compiler
+ # _LT_TAGVAR(lt_prog_compiler_static, $1)="--no_auto_instantiation -u __main -u __premain -u _abort -r $COOL_DIR/lib/libOrb.a $MVME_DIR/lib/CC/libC.a $MVME_DIR/lib/classix/libcx.s.a"
+ ;;
+ esac
+ ;;
+ mingw* | cygwin* | os2* | pw32* | cegcc*)
+ # This hack is so that the source file can tell whether it is being
+ # built for inclusion in a dll (and should export symbols for example).
+ m4_if([$1], [GCJ], [],
+ [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+ ;;
+ dgux*)
+ case $cc_basename in
+ ec++*)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ ;;
+ ghcx*)
+ # Green Hills C++ Compiler
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+ ;;
+ *)
+ ;;
+ esac
+ ;;
+ freebsd* | dragonfly*)
+ # FreeBSD uses GNU C++
+ ;;
+ hpux9* | hpux10* | hpux11*)
+ case $cc_basename in
+ CC*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-a ${wl}archive'
+ if test ia64 != "$host_cpu"; then
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z'
+ fi
+ ;;
+ aCC*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-a ${wl}archive'
+ case $host_cpu in
+ hppa*64*|ia64*)
+ # +Z the default
+ ;;
+ *)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z'
+ ;;
+ esac
+ ;;
+ *)
+ ;;
+ esac
+ ;;
+ interix*)
+ # This is c89, which is MS Visual C++ (no shared libs)
+ # Anyone wants to do a port?
+ ;;
+ irix5* | irix6* | nonstopux*)
+ case $cc_basename in
+ CC*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+ # CC pic flag -KPIC is the default.
+ ;;
+ *)
+ ;;
+ esac
+ ;;
+ linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*)
+ case $cc_basename in
+ KCC*)
+ # KAI C++ Compiler
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+ ;;
+ ecpc* )
+ # old Intel C++ for x86_64, which still supported -KPIC.
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+ ;;
+ icpc* )
+ # Intel C++, used to be incompatible with GCC.
+ # ICC 10 doesn't accept -KPIC any more.
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+ ;;
+ pgCC* | pgcpp*)
+ # Portland Group C++ compiler
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ ;;
+ cxx*)
+ # Compaq C++
+ # Make sure the PIC flag is empty. It appears that all Alpha
+ # Linux and Compaq Tru64 Unix objects are PIC.
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+ ;;
+ xlc* | xlC* | bgxl[[cC]]* | mpixl[[cC]]*)
+ # IBM XL 8.0, 9.0 on PPC and BlueGene
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink'
+ ;;
+ *)
+ case `$CC -V 2>&1 | sed 5q` in
+ *Sun\ C*)
+ # Sun C++ 5.9
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+ ;;
+ esac
+ ;;
+ esac
+ ;;
+ lynxos*)
+ ;;
+ m88k*)
+ ;;
+ mvs*)
+ case $cc_basename in
+ cxx*)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-W c,exportall'
+ ;;
+ *)
+ ;;
+ esac
+ ;;
+ netbsd*)
+ ;;
+ *qnx* | *nto*)
+ # QNX uses GNU C++, but need to define -shared option too, otherwise
+ # it will coredump.
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+ ;;
+ osf3* | osf4* | osf5*)
+ case $cc_basename in
+ KCC*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,'
+ ;;
+ RCC*)
+ # Rational C++ 2.4.1
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+ ;;
+ cxx*)
+ # Digital/Compaq C++
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ # Make sure the PIC flag is empty. It appears that all Alpha
+ # Linux and Compaq Tru64 Unix objects are PIC.
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+ ;;
+ *)
+ ;;
+ esac
+ ;;
+ psos*)
+ ;;
+ solaris*)
+ case $cc_basename in
+ CC* | sunCC*)
+ # Sun C++ 4.2, 5.x and Centerline C++
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+ ;;
+ gcx*)
+ # Green Hills C++ Compiler
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
+ ;;
+ *)
+ ;;
+ esac
+ ;;
+ sunos4*)
+ case $cc_basename in
+ CC*)
+ # Sun C++ 4.x
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ ;;
+ lcc*)
+ # Lucid
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+ ;;
+ *)
+ ;;
+ esac
+ ;;
+ sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
+ case $cc_basename in
+ CC*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ ;;
+ esac
+ ;;
+ tandem*)
+ case $cc_basename in
+ NCC*)
+ # NonStop-UX NCC 3.20
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ ;;
+ *)
+ ;;
+ esac
+ ;;
+ vxworks*)
+ ;;
+ *)
+ _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+ ;;
+ esac
+ fi
+],
+[
+ if test yes = "$GCC"; then
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+
+ case $host_os in
+ aix*)
+ # All AIX code is PIC.
+ if test ia64 = "$host_cpu"; then
+ # AIX 5 now supports IA64 processor
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ fi
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+ ;;
+
+ amigaos*)
+ case $host_cpu in
+ powerpc)
+ # see comment about AmigaOS4 .so support
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+ ;;
+ m68k)
+ # FIXME: we need at least 68020 code to build shared libraries, but
+ # adding the '-m68020' flag to GCC prevents building anything better,
+ # like '-m68040'.
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4'
+ ;;
+ esac
+ ;;
+
+ beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*)
+ # PIC is the default for these OSes.
+ ;;
+
+ mingw* | cygwin* | pw32* | os2* | cegcc*)
+ # This hack is so that the source file can tell whether it is being
+ # built for inclusion in a dll (and should export symbols for example).
+ # Although the cygwin gcc ignores -fPIC, still need this for old-style
+ # (--disable-auto-import) libraries
+ m4_if([$1], [GCJ], [],
+ [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+ case $host_os in
+ os2*)
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-static'
+ ;;
+ esac
+ ;;
+
+ darwin* | rhapsody*)
+ # PIC is the default on this platform
+ # Common symbols not allowed in MH_DYLIB files
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common'
+ ;;
+
+ haiku*)
+ # PIC is the default for Haiku.
+ # The "-static" flag exists, but is broken.
+ _LT_TAGVAR(lt_prog_compiler_static, $1)=
+ ;;
+
+ hpux*)
+ # PIC is the default for 64-bit PA HP-UX, but not for 32-bit
+ # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag
+ # sets the default TLS model and affects inlining.
+ case $host_cpu in
+ hppa*64*)
+ # +Z the default
+ ;;
+ *)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+ ;;
+ esac
+ ;;
+
+ interix[[3-9]]*)
+ # Interix 3.x gcc -fpic/-fPIC options generate broken code.
+ # Instead, we relocate shared libraries at runtime.
+ ;;
+
+ msdosdjgpp*)
+ # Just because we use GCC doesn't mean we suddenly get shared libraries
+ # on systems that don't support them.
+ _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+ enable_shared=no
+ ;;
+
+ *nto* | *qnx*)
+ # QNX uses GNU C++, but need to define -shared option too, otherwise
+ # it will coredump.
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+ ;;
+
+ sysv4*MP*)
+ if test -d /usr/nec; then
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic
+ fi
+ ;;
+
+ *)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+ ;;
+ esac
+
+ case $cc_basename in
+ nvcc*) # Cuda Compiler Driver 2.2
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Xlinker '
+ if test -n "$_LT_TAGVAR(lt_prog_compiler_pic, $1)"; then
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)="-Xcompiler $_LT_TAGVAR(lt_prog_compiler_pic, $1)"
+ fi
+ ;;
+ esac
+ else
+ # PORTME Check for flag to pass linker flags through the system compiler.
+ case $host_os in
+ aix*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ if test ia64 = "$host_cpu"; then
+ # AIX 5 now supports IA64 processor
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ else
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp'
+ fi
+ ;;
+
+ darwin* | rhapsody*)
+ # PIC is the default on this platform
+ # Common symbols not allowed in MH_DYLIB files
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common'
+ case $cc_basename in
+ nagfor*)
+ # NAG Fortran compiler
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,-Wl,,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ ;;
+ esac
+ ;;
+
+ mingw* | cygwin* | pw32* | os2* | cegcc*)
+ # This hack is so that the source file can tell whether it is being
+ # built for inclusion in a dll (and should export symbols for example).
+ m4_if([$1], [GCJ], [],
+ [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+ case $host_os in
+ os2*)
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-static'
+ ;;
+ esac
+ ;;
+
+ hpux9* | hpux10* | hpux11*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but
+ # not for PA HP-UX.
+ case $host_cpu in
+ hppa*64*|ia64*)
+ # +Z the default
+ ;;
+ *)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z'
+ ;;
+ esac
+ # Is there a better lt_prog_compiler_static that works with the bundled CC?
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-a ${wl}archive'
+ ;;
+
+ irix5* | irix6* | nonstopux*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ # PIC (with -KPIC) is the default.
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+ ;;
+
+ linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*)
+ case $cc_basename in
+ # old Intel for x86_64, which still supported -KPIC.
+ ecc*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+ ;;
+ # icc used to be incompatible with GCC.
+ # ICC 10 doesn't accept -KPIC any more.
+ icc* | ifort*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+ ;;
+ # Lahey Fortran 8.1.
+ lf95*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='--shared'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='--static'
+ ;;
+ nagfor*)
+ # NAG Fortran compiler
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,-Wl,,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ ;;
+ tcc*)
+ # Fabrice Bellard et al's Tiny C Compiler
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+ ;;
+ pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
+ # Portland Group compilers (*not* the Pentium gcc compiler,
+ # which looks to be a dead project)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ ;;
+ ccc*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ # All Alpha code is PIC.
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+ ;;
+ xl* | bgxl* | bgf* | mpixl*)
+ # IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink'
+ ;;
+ *)
+ case `$CC -V 2>&1 | sed 5q` in
+ *Sun\ Ceres\ Fortran* | *Sun*Fortran*\ [[1-7]].* | *Sun*Fortran*\ 8.[[0-3]]*)
+ # Sun Fortran 8.3 passes all unrecognized flags to the linker
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)=''
+ ;;
+ *Sun\ F* | *Sun*Fortran*)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+ ;;
+ *Sun\ C*)
+ # Sun C 5.9
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ ;;
+ *Intel*\ [[CF]]*Compiler*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+ ;;
+ *Portland\ Group*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ ;;
+ esac
+ ;;
+ esac
+ ;;
+
+ newsos6)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ ;;
+
+ *nto* | *qnx*)
+ # QNX uses GNU C++, but need to define -shared option too, otherwise
+ # it will coredump.
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+ ;;
+
+ osf3* | osf4* | osf5*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ # All OSF/1 code is PIC.
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+ ;;
+
+ rdos*)
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+ ;;
+
+ solaris*)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ case $cc_basename in
+ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ';;
+ *)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,';;
+ esac
+ ;;
+
+ sunos4*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ ;;
+
+ sysv4 | sysv4.2uw2* | sysv4.3*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ ;;
+
+ sysv4*MP*)
+ if test -d /usr/nec; then
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-Kconform_pic'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ fi
+ ;;
+
+ sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ ;;
+
+ unicos*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+ ;;
+
+ uts4*)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ ;;
+
+ *)
+ _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+ ;;
+ esac
+ fi
+])
+case $host_os in
+ # For platforms that do not support PIC, -DPIC is meaningless:
+ *djgpp*)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+ ;;
+ *)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)="$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])"
+ ;;
+esac
+
+AC_CACHE_CHECK([for $compiler option to produce PIC],
+ [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)],
+ [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_prog_compiler_pic, $1)])
+_LT_TAGVAR(lt_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)
+
+#
+# Check to make sure the PIC flag actually works.
+#
+if test -n "$_LT_TAGVAR(lt_prog_compiler_pic, $1)"; then
+ _LT_COMPILER_OPTION([if $compiler PIC flag $_LT_TAGVAR(lt_prog_compiler_pic, $1) works],
+ [_LT_TAGVAR(lt_cv_prog_compiler_pic_works, $1)],
+ [$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])], [],
+ [case $_LT_TAGVAR(lt_prog_compiler_pic, $1) in
+ "" | " "*) ;;
+ *) _LT_TAGVAR(lt_prog_compiler_pic, $1)=" $_LT_TAGVAR(lt_prog_compiler_pic, $1)" ;;
+ esac],
+ [_LT_TAGVAR(lt_prog_compiler_pic, $1)=
+ _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no])
+fi
+_LT_TAGDECL([pic_flag], [lt_prog_compiler_pic], [1],
+ [Additional compiler flags for building library objects])
+
+_LT_TAGDECL([wl], [lt_prog_compiler_wl], [1],
+ [How to pass a linker flag through the compiler])
+#
+# Check to make sure the static flag actually works.
+#
+wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1) eval lt_tmp_static_flag=\"$_LT_TAGVAR(lt_prog_compiler_static, $1)\"
+_LT_LINKER_OPTION([if $compiler static flag $lt_tmp_static_flag works],
+ _LT_TAGVAR(lt_cv_prog_compiler_static_works, $1),
+ $lt_tmp_static_flag,
+ [],
+ [_LT_TAGVAR(lt_prog_compiler_static, $1)=])
+_LT_TAGDECL([link_static_flag], [lt_prog_compiler_static], [1],
+ [Compiler flag to prevent dynamic linking])
+])# _LT_COMPILER_PIC
+
+
+# _LT_LINKER_SHLIBS([TAGNAME])
+# ----------------------------
+# See if the linker supports building shared libraries.
+m4_defun([_LT_LINKER_SHLIBS],
+[AC_REQUIRE([LT_PATH_LD])dnl
+AC_REQUIRE([LT_PATH_NM])dnl
+m4_require([_LT_PATH_MANIFEST_TOOL])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl
+m4_require([_LT_TAG_COMPILER])dnl
+AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries])
+m4_if([$1], [CXX], [
+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+ _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*']
+ case $host_os in
+ aix[[4-9]]*)
+ # If we're using GNU nm, then we don't want the "-C" option.
+ # -C means demangle to GNU nm, but means don't demangle to AIX nm.
+ # Without the "-l" option, or with the "-B" option, AIX nm treats
+ # weak defined symbols like other global defined symbols, whereas
+ # GNU nm marks them as "W".
+ # While the 'weak' keyword is ignored in the Export File, we need
+ # it in the Import File for the 'aix-soname' feature, so we have
+ # to replace the "-B" option with "-P" for AIX nm.
+ if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } else { print \$ 3 } } }'\'' | sort -u > $export_symbols'
+ else
+ _LT_TAGVAR(export_symbols_cmds, $1)='`func_echo_all $NM | $SED -e '\''s/B\([[^B]]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) && ([substr](\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | sort -u > $export_symbols'
+ fi
+ ;;
+ pw32*)
+ _LT_TAGVAR(export_symbols_cmds, $1)=$ltdll_cmds
+ ;;
+ cygwin* | mingw* | cegcc*)
+ case $cc_basename in
+ cl*)
+ _LT_TAGVAR(exclude_expsyms, $1)='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*'
+ ;;
+ *)
+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols'
+ _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname']
+ ;;
+ esac
+ ;;
+ *)
+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+ ;;
+ esac
+], [
+ runpath_var=
+ _LT_TAGVAR(allow_undefined_flag, $1)=
+ _LT_TAGVAR(always_export_symbols, $1)=no
+ _LT_TAGVAR(archive_cmds, $1)=
+ _LT_TAGVAR(archive_expsym_cmds, $1)=
+ _LT_TAGVAR(compiler_needs_object, $1)=no
+ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)=
+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+ _LT_TAGVAR(hardcode_automatic, $1)=no
+ _LT_TAGVAR(hardcode_direct, $1)=no
+ _LT_TAGVAR(hardcode_direct_absolute, $1)=no
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=
+ _LT_TAGVAR(hardcode_minus_L, $1)=no
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported
+ _LT_TAGVAR(inherit_rpath, $1)=no
+ _LT_TAGVAR(link_all_deplibs, $1)=unknown
+ _LT_TAGVAR(module_cmds, $1)=
+ _LT_TAGVAR(module_expsym_cmds, $1)=
+ _LT_TAGVAR(old_archive_from_new_cmds, $1)=
+ _LT_TAGVAR(old_archive_from_expsyms_cmds, $1)=
+ _LT_TAGVAR(thread_safe_flag_spec, $1)=
+ _LT_TAGVAR(whole_archive_flag_spec, $1)=
+ # include_expsyms should be a list of space-separated symbols to be *always*
+ # included in the symbol list
+ _LT_TAGVAR(include_expsyms, $1)=
+ # exclude_expsyms can be an extended regexp of symbols to exclude
+ # it will be wrapped by ' (' and ')$', so one must not match beginning or
+ # end of line. Example: 'a|bc|.*d.*' will exclude the symbols 'a' and 'bc',
+ # as well as any symbol that contains 'd'.
+ _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*']
+ # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out
+ # platforms (ab)use it in PIC code, but their linkers get confused if
+ # the symbol is explicitly referenced. Since portable code cannot
+ # rely on this symbol name, it's probably fine to never include it in
+ # preloaded symbol tables.
+ # Exclude shared library initialization/finalization symbols.
+dnl Note also adjust exclude_expsyms for C++ above.
+ extract_expsyms_cmds=
+
+ case $host_os in
+ cygwin* | mingw* | pw32* | cegcc*)
+ # FIXME: the MSVC++ port hasn't been tested in a loooong time
+ # When not using gcc, we currently assume that we are using
+ # Microsoft Visual C++.
+ if test yes != "$GCC"; then
+ with_gnu_ld=no
+ fi
+ ;;
+ interix*)
+ # we just hope/assume this is gcc and not c89 (= MSVC++)
+ with_gnu_ld=yes
+ ;;
+ openbsd* | bitrig*)
+ with_gnu_ld=no
+ ;;
+ esac
+
+ _LT_TAGVAR(ld_shlibs, $1)=yes
+
+ # On some targets, GNU ld is compatible enough with the native linker
+ # that we're better off using the native interface for both.
+ lt_use_gnu_ld_interface=no
+ if test yes = "$with_gnu_ld"; then
+ case $host_os in
+ aix*)
+ # The AIX port of GNU ld has always aspired to compatibility
+ # with the native linker. However, as the warning in the GNU ld
+ # block says, versions before 2.19.5* couldn't really create working
+ # shared libraries, regardless of the interface used.
+ case `$LD -v 2>&1` in
+ *\ \(GNU\ Binutils\)\ 2.19.5*) ;;
+ *\ \(GNU\ Binutils\)\ 2.[[2-9]]*) ;;
+ *\ \(GNU\ Binutils\)\ [[3-9]]*) ;;
+ *)
+ lt_use_gnu_ld_interface=yes
+ ;;
+ esac
+ ;;
+ *)
+ lt_use_gnu_ld_interface=yes
+ ;;
+ esac
+ fi
+
+ if test yes = "$lt_use_gnu_ld_interface"; then
+ # If archive_cmds runs LD, not CC, wlarc should be empty
+ wlarc='$wl'
+
+ # Set some defaults for GNU ld with shared library support. These
+ # are reset later if shared libraries are not supported. Putting them
+ # here allows them to be overridden if necessary.
+ runpath_var=LD_RUN_PATH
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir'
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic'
+ # ancient GNU ld didn't support --whole-archive et. al.
+ if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then
+ _LT_TAGVAR(whole_archive_flag_spec, $1)=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive'
+ else
+ _LT_TAGVAR(whole_archive_flag_spec, $1)=
+ fi
+ supports_anon_versioning=no
+ case `$LD -v | $SED -e 's/([^)]\+)\s\+//' 2>&1` in
+ *GNU\ gold*) supports_anon_versioning=yes ;;
+ *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.10.*) ;; # catch versions < 2.11
+ *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ...
+ *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ...
+ *\ 2.11.*) ;; # other 2.11 versions
+ *) supports_anon_versioning=yes ;;
+ esac
+
+ # See if GNU ld supports shared libraries.
+ case $host_os in
+ aix[[3-9]]*)
+ # On AIX/PPC, the GNU linker is very broken
+ if test ia64 != "$host_cpu"; then
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ cat <<_LT_EOF 1>&2
+
+*** Warning: the GNU linker, at least up to release 2.19, is reported
+*** to be unable to reliably create shared libraries on AIX.
+*** Therefore, libtool is disabling shared libraries support. If you
+*** really care for shared libraries, you may want to install binutils
+*** 2.20 or above, or modify your PATH so that a non-GNU linker is found.
+*** You will then need to restart the configuration process.
+
+_LT_EOF
+ fi
+ ;;
+
+ amigaos*)
+ case $host_cpu in
+ powerpc)
+ # see comment about AmigaOS4 .so support
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)=''
+ ;;
+ m68k)
+ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes
+ ;;
+ esac
+ ;;
+
+ beos*)
+ if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+ # Joseph Beckenbach says some releases of gcc
+ # support --undefined. This deserves some investigation. FIXME
+ _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib'
+ else
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+
+ cygwin* | mingw* | pw32* | cegcc*)
+ # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless,
+ # as there is no search path for DLLs.
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-all-symbols'
+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+ _LT_TAGVAR(always_export_symbols, $1)=no
+ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols'
+ _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname']
+
+ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+ # If the export-symbols file already is a .def file, use it as
+ # is; otherwise, prepend EXPORTS...
+ _LT_TAGVAR(archive_expsym_cmds, $1)='if _LT_DLL_DEF_P([$export_symbols]); then
+ cp $export_symbols $output_objdir/$soname.def;
+ else
+ echo EXPORTS > $output_objdir/$soname.def;
+ cat $export_symbols >> $output_objdir/$soname.def;
+ fi~
+ $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+ else
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+
+ haiku*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib'
+ _LT_TAGVAR(link_all_deplibs, $1)=yes
+ ;;
+
+ os2*)
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes
+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+ shrext_cmds=.dll
+ _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~
+ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~
+ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~
+ $ECHO EXPORTS >> $output_objdir/$libname.def~
+ emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~
+ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~
+ emximp -o $lib $output_objdir/$libname.def'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~
+ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~
+ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~
+ $ECHO EXPORTS >> $output_objdir/$libname.def~
+ prefix_cmds="$SED"~
+ if test EXPORTS = "`$SED 1q $export_symbols`"; then
+ prefix_cmds="$prefix_cmds -e 1d";
+ fi~
+ prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~
+ cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~
+ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~
+ emximp -o $lib $output_objdir/$libname.def'
+ _LT_TAGVAR(old_archive_From_new_cmds, $1)='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def'
+ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+ ;;
+
+ interix[[3-9]]*)
+ _LT_TAGVAR(hardcode_direct, $1)=no
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir'
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E'
+ # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc.
+ # Instead, shared libraries are loaded at an image base (0x10000000 by
+ # default) and relocated if they conflict, which is a slow very memory
+ # consuming and fragmenting process. To avoid this, we pick a random,
+ # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link
+ # time. Moving up from 0x10000000 also allows more sbrk(2) space.
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s|^|_|" $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--retain-symbols-file,$output_objdir/$soname.expsym $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+ ;;
+
+ gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu)
+ tmp_diet=no
+ if test linux-dietlibc = "$host_os"; then
+ case $cc_basename in
+ diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn)
+ esac
+ fi
+ if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \
+ && test no = "$tmp_diet"
+ then
+ tmp_addflag=' $pic_flag'
+ tmp_sharedflag='-shared'
+ case $cc_basename,$host_cpu in
+ pgcc*) # Portland Group C compiler
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive'
+ tmp_addflag=' $pic_flag'
+ ;;
+ pgf77* | pgf90* | pgf95* | pgfortran*)
+ # Portland Group f77 and f90 compilers
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive'
+ tmp_addflag=' $pic_flag -Mnomain' ;;
+ ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64
+ tmp_addflag=' -i_dynamic' ;;
+ efc*,ia64* | ifort*,ia64*) # Intel Fortran compiler on ia64
+ tmp_addflag=' -i_dynamic -nofor_main' ;;
+ ifc* | ifort*) # Intel Fortran compiler
+ tmp_addflag=' -nofor_main' ;;
+ lf95*) # Lahey Fortran 8.1
+ _LT_TAGVAR(whole_archive_flag_spec, $1)=
+ tmp_sharedflag='--shared' ;;
+ nagfor*) # NAGFOR 5.3
+ tmp_sharedflag='-Wl,-shared' ;;
+ xl[[cC]]* | bgxl[[cC]]* | mpixl[[cC]]*) # IBM XL C 8.0 on PPC (deal with xlf below)
+ tmp_sharedflag='-qmkshrobj'
+ tmp_addflag= ;;
+ nvcc*) # Cuda Compiler Driver 2.2
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive'
+ _LT_TAGVAR(compiler_needs_object, $1)=yes
+ ;;
+ esac
+ case `$CC -V 2>&1 | sed 5q` in
+ *Sun\ C*) # Sun C 5.9
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive'
+ _LT_TAGVAR(compiler_needs_object, $1)=yes
+ tmp_sharedflag='-G' ;;
+ *Sun\ F*) # Sun Fortran 8.3
+ tmp_sharedflag='-G' ;;
+ esac
+ _LT_TAGVAR(archive_cmds, $1)='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib'
+
+ if test yes = "$supports_anon_versioning"; then
+ _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~
+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+ echo "local: *; };" >> $output_objdir/$libname.ver~
+ $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-version-script $wl$output_objdir/$libname.ver -o $lib'
+ fi
+
+ case $cc_basename in
+ tcc*)
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='-rdynamic'
+ ;;
+ xlf* | bgf* | bgxlf* | mpixlf*)
+ # IBM XL Fortran 10.1 on PPC cannot create shared libs itself
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='--whole-archive$convenience --no-whole-archive'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir'
+ _LT_TAGVAR(archive_cmds, $1)='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
+ if test yes = "$supports_anon_versioning"; then
+ _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~
+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+ echo "local: *; };" >> $output_objdir/$libname.ver~
+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
+ fi
+ ;;
+ esac
+ else
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+
+ netbsd*)
+ if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+ _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
+ wlarc=
+ else
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib'
+ fi
+ ;;
+
+ solaris*)
+ if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ cat <<_LT_EOF 1>&2
+
+*** Warning: The releases 2.8.* of the GNU linker cannot reliably
+*** create shared libraries on Solaris systems. Therefore, libtool
+*** is disabling shared libraries support. We urge you to upgrade GNU
+*** binutils to release 2.9.1 or newer. Another option is to modify
+*** your PATH or compiler configuration so that the native linker is
+*** used, and then restart.
+
+_LT_EOF
+ elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+
+ sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*)
+ case `$LD -v 2>&1` in
+ *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.1[[0-5]].*)
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ cat <<_LT_EOF 1>&2
+
+*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 cannot
+*** reliably create shared libraries on SCO systems. Therefore, libtool
+*** is disabling shared libraries support. We urge you to upgrade GNU
+*** binutils to release 2.16.91.0.3 or newer. Another option is to modify
+*** your PATH or compiler configuration so that the native linker is
+*** used, and then restart.
+
+_LT_EOF
+ ;;
+ *)
+ # For security reasons, it is highly recommended that you always
+ # use absolute paths for naming shared libraries, and exclude the
+ # DT_RUNPATH tag from executables and libraries. But doing so
+ # requires that you compile everything twice, which is a pain.
+ if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir'
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+ esac
+ ;;
+
+ sunos4*)
+ _LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+ wlarc=
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ ;;
+
+ *)
+ if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+ esac
+
+ if test no = "$_LT_TAGVAR(ld_shlibs, $1)"; then
+ runpath_var=
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)=
+ _LT_TAGVAR(whole_archive_flag_spec, $1)=
+ fi
+ else
+ # PORTME fill in a description of your system's linker (not GNU ld)
+ case $host_os in
+ aix3*)
+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+ _LT_TAGVAR(always_export_symbols, $1)=yes
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname'
+ # Note: this linker hardcodes the directories in LIBPATH if there
+ # are no directories specified by -L.
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes
+ if test yes = "$GCC" && test -z "$lt_prog_compiler_static"; then
+ # Neither direct hardcoding nor static linking is supported with a
+ # broken collect2.
+ _LT_TAGVAR(hardcode_direct, $1)=unsupported
+ fi
+ ;;
+
+ aix[[4-9]]*)
+ if test ia64 = "$host_cpu"; then
+ # On IA64, the linker does run time linking by default, so we don't
+ # have to do anything special.
+ aix_use_runtimelinking=no
+ exp_sym_flag='-Bexport'
+ no_entry_flag=
+ else
+ # If we're using GNU nm, then we don't want the "-C" option.
+ # -C means demangle to GNU nm, but means don't demangle to AIX nm.
+ # Without the "-l" option, or with the "-B" option, AIX nm treats
+ # weak defined symbols like other global defined symbols, whereas
+ # GNU nm marks them as "W".
+ # While the 'weak' keyword is ignored in the Export File, we need
+ # it in the Import File for the 'aix-soname' feature, so we have
+ # to replace the "-B" option with "-P" for AIX nm.
+ if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } else { print \$ 3 } } }'\'' | sort -u > $export_symbols'
+ else
+ _LT_TAGVAR(export_symbols_cmds, $1)='`func_echo_all $NM | $SED -e '\''s/B\([[^B]]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) && ([substr](\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | sort -u > $export_symbols'
+ fi
+ aix_use_runtimelinking=no
+
+ # Test if we are trying to use run time linking or normal
+ # AIX style linking. If -brtl is somewhere in LDFLAGS, we
+ # have runtime linking enabled, and use it for executables.
+ # For shared libraries, we enable/disable runtime linking
+ # depending on the kind of the shared library created -
+ # when "with_aix_soname,aix_use_runtimelinking" is:
+ # "aix,no" lib.a(lib.so.V) shared, rtl:no, for executables
+ # "aix,yes" lib.so shared, rtl:yes, for executables
+ # lib.a static archive
+ # "both,no" lib.so.V(shr.o) shared, rtl:yes
+ # lib.a(lib.so.V) shared, rtl:no, for executables
+ # "both,yes" lib.so.V(shr.o) shared, rtl:yes, for executables
+ # lib.a(lib.so.V) shared, rtl:no
+ # "svr4,*" lib.so.V(shr.o) shared, rtl:yes, for executables
+ # lib.a static archive
+ case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*)
+ for ld_flag in $LDFLAGS; do
+ if (test x-brtl = "x$ld_flag" || test x-Wl,-brtl = "x$ld_flag"); then
+ aix_use_runtimelinking=yes
+ break
+ fi
+ done
+ if test svr4,no = "$with_aix_soname,$aix_use_runtimelinking"; then
+ # With aix-soname=svr4, we create the lib.so.V shared archives only,
+ # so we don't have lib.a shared libs to link our executables.
+ # We have to force runtime linking in this case.
+ aix_use_runtimelinking=yes
+ LDFLAGS="$LDFLAGS -Wl,-brtl"
+ fi
+ ;;
+ esac
+
+ exp_sym_flag='-bexport'
+ no_entry_flag='-bnoentry'
+ fi
+
+ # When large executables or shared objects are built, AIX ld can
+ # have problems creating the table of contents. If linking a library
+ # or program results in "error TOC overflow" add -mminimal-toc to
+ # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not
+ # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS.
+
+ _LT_TAGVAR(archive_cmds, $1)=''
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+ _LT_TAGVAR(link_all_deplibs, $1)=yes
+ _LT_TAGVAR(file_list_spec, $1)='$wl-f,'
+ case $with_aix_soname,$aix_use_runtimelinking in
+ aix,*) ;; # traditional, no import file
+ svr4,* | *,yes) # use import file
+ # The Import File defines what to hardcode.
+ _LT_TAGVAR(hardcode_direct, $1)=no
+ _LT_TAGVAR(hardcode_direct_absolute, $1)=no
+ ;;
+ esac
+
+ if test yes = "$GCC"; then
+ case $host_os in aix4.[[012]]|aix4.[[012]].*)
+ # We only want to do this on AIX 4.2 and lower, the check
+ # below for broken collect2 doesn't work under 4.3+
+ collect2name=`$CC -print-prog-name=collect2`
+ if test -f "$collect2name" &&
+ strings "$collect2name" | $GREP resolve_lib_name >/dev/null
+ then
+ # We have reworked collect2
+ :
+ else
+ # We have old collect2
+ _LT_TAGVAR(hardcode_direct, $1)=unsupported
+ # It fails to find uninstalled libraries when the uninstalled
+ # path is not listed in the libpath. Setting hardcode_minus_L
+ # to unsupported forces relinking
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=
+ fi
+ ;;
+ esac
+ shared_flag='-shared'
+ if test yes = "$aix_use_runtimelinking"; then
+ shared_flag="$shared_flag "'$wl-G'
+ fi
+ # Need to ensure runtime linking is disabled for the traditional
+ # shared library, or the linker may eventually find shared libraries
+ # /with/ Import File - we do not want to mix them.
+ shared_flag_aix='-shared'
+ shared_flag_svr4='-shared $wl-G'
+ else
+ # not using gcc
+ if test ia64 = "$host_cpu"; then
+ # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release
+ # chokes on -Wl,-G. The following line is correct:
+ shared_flag='-G'
+ else
+ if test yes = "$aix_use_runtimelinking"; then
+ shared_flag='$wl-G'
+ else
+ shared_flag='$wl-bM:SRE'
+ fi
+ shared_flag_aix='$wl-bM:SRE'
+ shared_flag_svr4='$wl-G'
+ fi
+ fi
+
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-bexpall'
+ # It seems that -bexpall does not export symbols beginning with
+ # underscore (_), so it is better to generate a list of symbols to export.
+ _LT_TAGVAR(always_export_symbols, $1)=yes
+ if test aix,yes = "$with_aix_soname,$aix_use_runtimelinking"; then
+ # Warning - without using the other runtime loading flags (-brtl),
+ # -berok will link without error, but may produce a broken library.
+ _LT_TAGVAR(allow_undefined_flag, $1)='-berok'
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+ _LT_SYS_MODULE_PATH_AIX([$1])
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-blibpath:$libdir:'"$aix_libpath"
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags `if test -n "$allow_undefined_flag"; then func_echo_all "$wl$allow_undefined_flag"; else :; fi` $wl'$exp_sym_flag:\$export_symbols' '$shared_flag
+ else
+ if test ia64 = "$host_cpu"; then
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R $libdir:/usr/lib:/lib'
+ _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs"
+ _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\$wl$no_entry_flag"' $compiler_flags $wl$allow_undefined_flag '"\$wl$exp_sym_flag:\$export_symbols"
+ else
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+ _LT_SYS_MODULE_PATH_AIX([$1])
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-blibpath:$libdir:'"$aix_libpath"
+ # Warning - without using the other run time loading flags,
+ # -berok will link without error, but may produce a broken library.
+ _LT_TAGVAR(no_undefined_flag, $1)=' $wl-bernotok'
+ _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-berok'
+ if test yes = "$with_gnu_ld"; then
+ # We only use this code for GNU lds that support --whole-archive.
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive$convenience $wl--no-whole-archive'
+ else
+ # Exported symbols can be pulled into shared objects from archives
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience'
+ fi
+ _LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$RM -r $output_objdir/$realname.d~$MKDIR $output_objdir/$realname.d'
+ # -brtl affects multiple linker settings, -berok does not and is overridden later
+ compiler_flags_filtered='`func_echo_all "$compiler_flags " | $SED -e "s%-brtl\\([[, ]]\\)%-berok\\1%g"`'
+ if test svr4 != "$with_aix_soname"; then
+ # This is similar to how AIX traditionally builds its shared libraries.
+ _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$CC '$shared_flag_aix' -o $output_objdir/$realname.d/$soname $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$realname.d/$soname'
+ fi
+ if test aix != "$with_aix_soname"; then
+ _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$CC '$shared_flag_svr4' -o $output_objdir/$realname.d/$shared_archive_member_spec.o $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$STRIP -e $output_objdir/$realname.d/$shared_archive_member_spec.o~( func_echo_all "#! $soname($shared_archive_member_spec.o)"; if test shr_64 = "$shared_archive_member_spec"; then func_echo_all "# 64"; else func_echo_all "# 32"; fi; cat $export_symbols ) > $output_objdir/$realname.d/$shared_archive_member_spec.imp~$AR $AR_FLAGS $output_objdir/$soname $output_objdir/$realname.d/$shared_archive_member_spec.o $output_objdir/$realname.d/$shared_archive_member_spec.imp'
+ else
+ # used by -dlpreopen to get the symbols
+ _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$MV $output_objdir/$realname.d/$soname $output_objdir'
+ fi
+ _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$RM -r $output_objdir/$realname.d'
+ fi
+ fi
+ ;;
+
+ amigaos*)
+ case $host_cpu in
+ powerpc)
+ # see comment about AmigaOS4 .so support
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)=''
+ ;;
+ m68k)
+ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes
+ ;;
+ esac
+ ;;
+
+ bsdi[[45]]*)
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)=-rdynamic
+ ;;
+
+ cygwin* | mingw* | pw32* | cegcc*)
+ # When not using gcc, we currently assume that we are using
+ # Microsoft Visual C++.
+ # hardcode_libdir_flag_spec is actually meaningless, as there is
+ # no search path for DLLs.
+ case $cc_basename in
+ cl*)
+ # Native MSVC
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+ _LT_TAGVAR(always_export_symbols, $1)=yes
+ _LT_TAGVAR(file_list_spec, $1)='@'
+ # Tell ltmain to make .lib files, not .a files.
+ libext=lib
+ # Tell ltmain to make .dll files, not .so files.
+ shrext_cmds=.dll
+ # FIXME: Setting linknames here is a bad hack.
+ _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~linknames='
+ _LT_TAGVAR(archive_expsym_cmds, $1)='if _LT_DLL_DEF_P([$export_symbols]); then
+ cp "$export_symbols" "$output_objdir/$soname.def";
+ echo "$tool_output_objdir$soname.def" > "$output_objdir/$soname.exp";
+ else
+ $SED -e '\''s/^/-link -EXPORT:/'\'' < $export_symbols > $output_objdir/$soname.exp;
+ fi~
+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
+ linknames='
+ # The linker will not automatically build a static lib if we build a DLL.
+ # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
+ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+ _LT_TAGVAR(exclude_expsyms, $1)='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*'
+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1,DATA/'\'' | $SED -e '\''/^[[AITW]][[ ]]/s/.*[[ ]]//'\'' | sort | uniq > $export_symbols'
+ # Don't use ranlib
+ _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib'
+ _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~
+ lt_tool_outputfile="@TOOL_OUTPUT@"~
+ case $lt_outputfile in
+ *.exe|*.EXE) ;;
+ *)
+ lt_outputfile=$lt_outputfile.exe
+ lt_tool_outputfile=$lt_tool_outputfile.exe
+ ;;
+ esac~
+ if test : != "$MANIFEST_TOOL" && test -f "$lt_outputfile.manifest"; then
+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
+ $RM "$lt_outputfile.manifest";
+ fi'
+ ;;
+ *)
+ # Assume MSVC wrapper
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+ # Tell ltmain to make .lib files, not .a files.
+ libext=lib
+ # Tell ltmain to make .dll files, not .so files.
+ shrext_cmds=.dll
+ # FIXME: Setting linknames here is a bad hack.
+ _LT_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
+ # The linker will automatically build a .lib file if we build a DLL.
+ _LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
+ # FIXME: Should let the user specify the lib program.
+ _LT_TAGVAR(old_archive_cmds, $1)='lib -OUT:$oldlib$oldobjs$old_deplibs'
+ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+ ;;
+ esac
+ ;;
+
+ darwin* | rhapsody*)
+ _LT_DARWIN_LINKER_FEATURES($1)
+ ;;
+
+ dgux*)
+ _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ ;;
+
+ # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor
+ # support. Future versions do this automatically, but an explicit c++rt0.o
+ # does not break anything, and helps significantly (at the cost of a little
+ # extra space).
+ freebsd2.2*)
+ _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ ;;
+
+ # Unfortunately, older versions of FreeBSD 2 do not have this feature.
+ freebsd2.*)
+ _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ ;;
+
+ # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
+ freebsd* | dragonfly*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ ;;
+
+ hpux9*)
+ if test yes = "$GCC"; then
+ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared $pic_flag $wl+b $wl$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib'
+ else
+ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib'
+ fi
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+
+ # hardcode_minus_L: Not really in the search PATH,
+ # but as the default location of the library.
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E'
+ ;;
+
+ hpux10*)
+ if test yes,no = "$GCC,$with_gnu_ld"; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ else
+ _LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
+ fi
+ if test no = "$with_gnu_ld"; then
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E'
+ # hardcode_minus_L: Not really in the search PATH,
+ # but as the default location of the library.
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes
+ fi
+ ;;
+
+ hpux11*)
+ if test yes,no = "$GCC,$with_gnu_ld"; then
+ case $host_cpu in
+ hppa*64*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl+h $wl$soname -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ ia64*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $wl+h $wl$soname $wl+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ *)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ esac
+ else
+ case $host_cpu in
+ hppa*64*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ ia64*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ *)
+ m4_if($1, [], [
+ # Older versions of the 11.00 compiler do not understand -b yet
+ # (HP92453-01 A.11.01.20 doesn't, HP92453-01 B.11.X.35175-35176.GP does)
+ _LT_LINKER_OPTION([if $CC understands -b],
+ _LT_TAGVAR(lt_cv_prog_compiler__b, $1), [-b],
+ [_LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags'],
+ [_LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'])],
+ [_LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags'])
+ ;;
+ esac
+ fi
+ if test no = "$with_gnu_ld"; then
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+ case $host_cpu in
+ hppa*64*|ia64*)
+ _LT_TAGVAR(hardcode_direct, $1)=no
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ ;;
+ *)
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E'
+
+ # hardcode_minus_L: Not really in the search PATH,
+ # but as the default location of the library.
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes
+ ;;
+ esac
+ fi
+ ;;
+
+ irix5* | irix6* | nonstopux*)
+ if test yes = "$GCC"; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib'
+ # Try to use the -exported_symbol ld option, if it does not
+ # work, assume that -exports_file does not work either and
+ # implicitly export all symbols.
+ # This should be the same for all languages, so no per-tag cache variable.
+ AC_CACHE_CHECK([whether the $host_os linker accepts -exported_symbol],
+ [lt_cv_irix_exported_symbol],
+ [save_LDFLAGS=$LDFLAGS
+ LDFLAGS="$LDFLAGS -shared $wl-exported_symbol ${wl}foo $wl-update_registry $wl/dev/null"
+ AC_LINK_IFELSE(
+ [AC_LANG_SOURCE(
+ [AC_LANG_CASE([C], [[int foo (void) { return 0; }]],
+ [C++], [[int foo (void) { return 0; }]],
+ [Fortran 77], [[
+ subroutine foo
+ end]],
+ [Fortran], [[
+ subroutine foo
+ end]])])],
+ [lt_cv_irix_exported_symbol=yes],
+ [lt_cv_irix_exported_symbol=no])
+ LDFLAGS=$save_LDFLAGS])
+ if test yes = "$lt_cv_irix_exported_symbol"; then
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations $wl-exports_file $wl$export_symbols -o $lib'
+ fi
+ else
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -exports_file $export_symbols -o $lib'
+ fi
+ _LT_TAGVAR(archive_cmds_need_lc, $1)='no'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+ _LT_TAGVAR(inherit_rpath, $1)=yes
+ _LT_TAGVAR(link_all_deplibs, $1)=yes
+ ;;
+
+ linux*)
+ case $cc_basename in
+ tcc*)
+ # Fabrice Bellard et al's Tiny C Compiler
+ _LT_TAGVAR(ld_shlibs, $1)=yes
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ esac
+ ;;
+
+ netbsd*)
+ if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+ _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out
+ else
+ _LT_TAGVAR(archive_cmds, $1)='$LD -shared -o $lib $libobjs $deplibs $linker_flags' # ELF
+ fi
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ ;;
+
+ newsos6)
+ _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ ;;
+
+ *nto* | *qnx*)
+ ;;
+
+ openbsd* | bitrig*)
+ if test -f /usr/libexec/ld.so; then
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+ if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags $wl-retain-symbols-file,$export_symbols'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir'
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E'
+ else
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir'
+ fi
+ else
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+
+ os2*)
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes
+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+ shrext_cmds=.dll
+ _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~
+ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~
+ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~
+ $ECHO EXPORTS >> $output_objdir/$libname.def~
+ emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~
+ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~
+ emximp -o $lib $output_objdir/$libname.def'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~
+ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~
+ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~
+ $ECHO EXPORTS >> $output_objdir/$libname.def~
+ prefix_cmds="$SED"~
+ if test EXPORTS = "`$SED 1q $export_symbols`"; then
+ prefix_cmds="$prefix_cmds -e 1d";
+ fi~
+ prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~
+ cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~
+ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~
+ emximp -o $lib $output_objdir/$libname.def'
+ _LT_TAGVAR(old_archive_From_new_cmds, $1)='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def'
+ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+ ;;
+
+ osf3*)
+ if test yes = "$GCC"; then
+ _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-expect_unresolved $wl\*'
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib'
+ else
+ _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib'
+ fi
+ _LT_TAGVAR(archive_cmds_need_lc, $1)='no'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+ ;;
+
+ osf4* | osf5*) # as osf3* with the addition of -msym flag
+ if test yes = "$GCC"; then
+ _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-expect_unresolved $wl\*'
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $pic_flag $libobjs $deplibs $compiler_flags $wl-msym $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir'
+ else
+ _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~
+ $CC -shared$allow_undefined_flag $wl-input $wl$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib~$RM $lib.exp'
+
+ # Both c and cxx compiler support -rpath directly
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir'
+ fi
+ _LT_TAGVAR(archive_cmds_need_lc, $1)='no'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+ ;;
+
+ solaris*)
+ _LT_TAGVAR(no_undefined_flag, $1)=' -z defs'
+ if test yes = "$GCC"; then
+ wlarc='$wl'
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $wl-z ${wl}text $wl-h $wl$soname -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+ $CC -shared $pic_flag $wl-z ${wl}text $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+ else
+ case `$CC -V 2>&1` in
+ *"Compilers 5.0"*)
+ wlarc=''
+ _LT_TAGVAR(archive_cmds, $1)='$LD -G$allow_undefined_flag -h $soname -o $lib $libobjs $deplibs $linker_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+ $LD -G$allow_undefined_flag -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp'
+ ;;
+ *)
+ wlarc='$wl'
+ _LT_TAGVAR(archive_cmds, $1)='$CC -G$allow_undefined_flag -h $soname -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+ $CC -G$allow_undefined_flag -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+ ;;
+ esac
+ fi
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ case $host_os in
+ solaris2.[[0-5]] | solaris2.[[0-5]].*) ;;
+ *)
+ # The compiler driver will combine and reorder linker options,
+ # but understands '-z linker_flag'. GCC discards it without '$wl',
+ # but is careful enough not to reorder.
+ # Supported since Solaris 2.6 (maybe 2.5.1?)
+ if test yes = "$GCC"; then
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl-z ${wl}allextract$convenience $wl-z ${wl}defaultextract'
+ else
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract'
+ fi
+ ;;
+ esac
+ _LT_TAGVAR(link_all_deplibs, $1)=yes
+ ;;
+
+ sunos4*)
+ if test sequent = "$host_vendor"; then
+ # Use $CC to link under sequent, because it throws in some extra .o
+ # files that make .init and .fini sections work.
+ _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h $soname -o $lib $libobjs $deplibs $compiler_flags'
+ else
+ _LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags'
+ fi
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ ;;
+
+ sysv4)
+ case $host_vendor in
+ sni)
+ _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+ _LT_TAGVAR(hardcode_direct, $1)=yes # is this really true???
+ ;;
+ siemens)
+ ## LD is ld it makes a PLAMLIB
+ ## CC just makes a GrossModule.
+ _LT_TAGVAR(archive_cmds, $1)='$LD -G -o $lib $libobjs $deplibs $linker_flags'
+ _LT_TAGVAR(reload_cmds, $1)='$CC -r -o $output$reload_objs'
+ _LT_TAGVAR(hardcode_direct, $1)=no
+ ;;
+ motorola)
+ _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+ _LT_TAGVAR(hardcode_direct, $1)=no #Motorola manual says yes, but my tests say they lie
+ ;;
+ esac
+ runpath_var='LD_RUN_PATH'
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ ;;
+
+ sysv4.3*)
+ _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='-Bexport'
+ ;;
+
+ sysv4*MP*)
+ if test -d /usr/nec; then
+ _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ runpath_var=LD_RUN_PATH
+ hardcode_runpath_var=yes
+ _LT_TAGVAR(ld_shlibs, $1)=yes
+ fi
+ ;;
+
+ sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*)
+ _LT_TAGVAR(no_undefined_flag, $1)='$wl-z,text'
+ _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ runpath_var='LD_RUN_PATH'
+
+ if test yes = "$GCC"; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ else
+ _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ fi
+ ;;
+
+ sysv5* | sco3.2v5* | sco5v6*)
+ # Note: We CANNOT use -z defs as we might desire, because we do not
+ # link with -lc, and that would cause any symbols used from libc to
+ # always be unresolved, which means just about no library would
+ # ever link correctly. If we're not using GNU ld we use -z text
+ # though, which does catch some bad symbols but isn't as heavy-handed
+ # as -z defs.
+ _LT_TAGVAR(no_undefined_flag, $1)='$wl-z,text'
+ _LT_TAGVAR(allow_undefined_flag, $1)='$wl-z,nodefs'
+ _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R,$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+ _LT_TAGVAR(link_all_deplibs, $1)=yes
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-Bexport'
+ runpath_var='LD_RUN_PATH'
+
+ if test yes = "$GCC"; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ else
+ _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ fi
+ ;;
+
+ uts4*)
+ _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ ;;
+
+ *)
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ esac
+
+ if test sni = "$host_vendor"; then
+ case $host in
+ sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*)
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-Blargedynsym'
+ ;;
+ esac
+ fi
+ fi
+])
+AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)])
+test no = "$_LT_TAGVAR(ld_shlibs, $1)" && can_build_shared=no
+
+_LT_TAGVAR(with_gnu_ld, $1)=$with_gnu_ld
+
+_LT_DECL([], [libext], [0], [Old archive suffix (normally "a")])dnl
+_LT_DECL([], [shrext_cmds], [1], [Shared library suffix (normally ".so")])dnl
+_LT_DECL([], [extract_expsyms_cmds], [2],
+ [The commands to extract the exported symbol list from a shared archive])
+
+#
+# Do we need to explicitly link libc?
+#
+case "x$_LT_TAGVAR(archive_cmds_need_lc, $1)" in
+x|xyes)
+ # Assume -lc should be added
+ _LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+
+ if test yes,yes = "$GCC,$enable_shared"; then
+ case $_LT_TAGVAR(archive_cmds, $1) in
+ *'~'*)
+ # FIXME: we may have to deal with multi-command sequences.
+ ;;
+ '$CC '*)
+ # Test whether the compiler implicitly links with -lc since on some
+ # systems, -lgcc has to come before -lc. If gcc already passes -lc
+ # to ld, don't add -lc before -lgcc.
+ AC_CACHE_CHECK([whether -lc should be explicitly linked in],
+ [lt_cv_]_LT_TAGVAR(archive_cmds_need_lc, $1),
+ [$RM conftest*
+ echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+
+ if AC_TRY_EVAL(ac_compile) 2>conftest.err; then
+ soname=conftest
+ lib=conftest
+ libobjs=conftest.$ac_objext
+ deplibs=
+ wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1)
+ pic_flag=$_LT_TAGVAR(lt_prog_compiler_pic, $1)
+ compiler_flags=-v
+ linker_flags=-v
+ verstring=
+ output_objdir=.
+ libname=conftest
+ lt_save_allow_undefined_flag=$_LT_TAGVAR(allow_undefined_flag, $1)
+ _LT_TAGVAR(allow_undefined_flag, $1)=
+ if AC_TRY_EVAL(_LT_TAGVAR(archive_cmds, $1) 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1)
+ then
+ lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+ else
+ lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+ fi
+ _LT_TAGVAR(allow_undefined_flag, $1)=$lt_save_allow_undefined_flag
+ else
+ cat conftest.err 1>&5
+ fi
+ $RM conftest*
+ ])
+ _LT_TAGVAR(archive_cmds_need_lc, $1)=$lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)
+ ;;
+ esac
+ fi
+ ;;
+esac
+
+_LT_TAGDECL([build_libtool_need_lc], [archive_cmds_need_lc], [0],
+ [Whether or not to add -lc for building shared libraries])
+_LT_TAGDECL([allow_libtool_libs_with_static_runtimes],
+ [enable_shared_with_static_runtimes], [0],
+ [Whether or not to disallow shared libs when runtime libs are static])
+_LT_TAGDECL([], [export_dynamic_flag_spec], [1],
+ [Compiler flag to allow reflexive dlopens])
+_LT_TAGDECL([], [whole_archive_flag_spec], [1],
+ [Compiler flag to generate shared objects directly from archives])
+_LT_TAGDECL([], [compiler_needs_object], [1],
+ [Whether the compiler copes with passing no objects directly])
+_LT_TAGDECL([], [old_archive_from_new_cmds], [2],
+ [Create an old-style archive from a shared archive])
+_LT_TAGDECL([], [old_archive_from_expsyms_cmds], [2],
+ [Create a temporary old-style archive to link instead of a shared archive])
+_LT_TAGDECL([], [archive_cmds], [2], [Commands used to build a shared archive])
+_LT_TAGDECL([], [archive_expsym_cmds], [2])
+_LT_TAGDECL([], [module_cmds], [2],
+ [Commands used to build a loadable module if different from building
+ a shared archive.])
+_LT_TAGDECL([], [module_expsym_cmds], [2])
+_LT_TAGDECL([], [with_gnu_ld], [1],
+ [Whether we are building with GNU ld or not])
+_LT_TAGDECL([], [allow_undefined_flag], [1],
+ [Flag that allows shared libraries with undefined symbols to be built])
+_LT_TAGDECL([], [no_undefined_flag], [1],
+ [Flag that enforces no undefined symbols])
+_LT_TAGDECL([], [hardcode_libdir_flag_spec], [1],
+ [Flag to hardcode $libdir into a binary during linking.
+ This must work even if $libdir does not exist])
+_LT_TAGDECL([], [hardcode_libdir_separator], [1],
+ [Whether we need a single "-rpath" flag with a separated argument])
+_LT_TAGDECL([], [hardcode_direct], [0],
+ [Set to "yes" if using DIR/libNAME$shared_ext during linking hardcodes
+ DIR into the resulting binary])
+_LT_TAGDECL([], [hardcode_direct_absolute], [0],
+ [Set to "yes" if using DIR/libNAME$shared_ext during linking hardcodes
+ DIR into the resulting binary and the resulting library dependency is
+ "absolute", i.e impossible to change by setting $shlibpath_var if the
+ library is relocated])
+_LT_TAGDECL([], [hardcode_minus_L], [0],
+ [Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+ into the resulting binary])
+_LT_TAGDECL([], [hardcode_shlibpath_var], [0],
+ [Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+ into the resulting binary])
+_LT_TAGDECL([], [hardcode_automatic], [0],
+ [Set to "yes" if building a shared library automatically hardcodes DIR
+ into the library and all subsequent libraries and executables linked
+ against it])
+_LT_TAGDECL([], [inherit_rpath], [0],
+ [Set to yes if linker adds runtime paths of dependent libraries
+ to runtime path list])
+_LT_TAGDECL([], [link_all_deplibs], [0],
+ [Whether libtool must link a program against all its dependency libraries])
+_LT_TAGDECL([], [always_export_symbols], [0],
+ [Set to "yes" if exported symbols are required])
+_LT_TAGDECL([], [export_symbols_cmds], [2],
+ [The commands to list exported symbols])
+_LT_TAGDECL([], [exclude_expsyms], [1],
+ [Symbols that should not be listed in the preloaded symbols])
+_LT_TAGDECL([], [include_expsyms], [1],
+ [Symbols that must always be exported])
+_LT_TAGDECL([], [prelink_cmds], [2],
+ [Commands necessary for linking programs (against libraries) with templates])
+_LT_TAGDECL([], [postlink_cmds], [2],
+ [Commands necessary for finishing linking programs])
+_LT_TAGDECL([], [file_list_spec], [1],
+ [Specify filename containing input files])
+dnl FIXME: Not yet implemented
+dnl _LT_TAGDECL([], [thread_safe_flag_spec], [1],
+dnl [Compiler flag to generate thread safe objects])
+])# _LT_LINKER_SHLIBS
+
+
+# _LT_LANG_C_CONFIG([TAG])
+# ------------------------
+# Ensure that the configuration variables for a C compiler are suitably
+# defined. These variables are subsequently used by _LT_CONFIG to write
+# the compiler configuration to 'libtool'.
+m4_defun([_LT_LANG_C_CONFIG],
+[m4_require([_LT_DECL_EGREP])dnl
+lt_save_CC=$CC
+AC_LANG_PUSH(C)
+
+# Source file extension for C test sources.
+ac_ext=c
+
+# Object file extension for compiled C test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code="int some_variable = 0;"
+
+# Code to be used in simple link tests
+lt_simple_link_test_code='int main(){return(0);}'
+
+_LT_TAG_COMPILER
+# Save the default compiler, since it gets overwritten when the other
+# tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP.
+compiler_DEFAULT=$CC
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+if test -n "$compiler"; then
+ _LT_COMPILER_NO_RTTI($1)
+ _LT_COMPILER_PIC($1)
+ _LT_COMPILER_C_O($1)
+ _LT_COMPILER_FILE_LOCKS($1)
+ _LT_LINKER_SHLIBS($1)
+ _LT_SYS_DYNAMIC_LINKER($1)
+ _LT_LINKER_HARDCODE_LIBPATH($1)
+ LT_SYS_DLOPEN_SELF
+ _LT_CMD_STRIPLIB
+
+ # Report what library types will actually be built
+ AC_MSG_CHECKING([if libtool supports shared libraries])
+ AC_MSG_RESULT([$can_build_shared])
+
+ AC_MSG_CHECKING([whether to build shared libraries])
+ test no = "$can_build_shared" && enable_shared=no
+
+ # On AIX, shared libraries and static libraries use the same namespace, and
+ # are all built from PIC.
+ case $host_os in
+ aix3*)
+ test yes = "$enable_shared" && enable_static=no
+ if test -n "$RANLIB"; then
+ archive_cmds="$archive_cmds~\$RANLIB \$lib"
+ postinstall_cmds='$RANLIB $lib'
+ fi
+ ;;
+
+ aix[[4-9]]*)
+ if test ia64 != "$host_cpu"; then
+ case $enable_shared,$with_aix_soname,$aix_use_runtimelinking in
+ yes,aix,yes) ;; # shared object as lib.so file only
+ yes,svr4,*) ;; # shared object as lib.so archive member only
+ yes,*) enable_static=no ;; # shared object in lib.a archive as well
+ esac
+ fi
+ ;;
+ esac
+ AC_MSG_RESULT([$enable_shared])
+
+ AC_MSG_CHECKING([whether to build static libraries])
+ # Make sure either enable_shared or enable_static is yes.
+ test yes = "$enable_shared" || enable_static=yes
+ AC_MSG_RESULT([$enable_static])
+
+ _LT_CONFIG($1)
+fi
+AC_LANG_POP
+CC=$lt_save_CC
+])# _LT_LANG_C_CONFIG
+
+
+# _LT_LANG_CXX_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for a C++ compiler are suitably
+# defined. These variables are subsequently used by _LT_CONFIG to write
+# the compiler configuration to 'libtool'.
+m4_defun([_LT_LANG_CXX_CONFIG],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_PATH_MANIFEST_TOOL])dnl
+if test -n "$CXX" && ( test no != "$CXX" &&
+ ( (test g++ = "$CXX" && `g++ -v >/dev/null 2>&1` ) ||
+ (test g++ != "$CXX"))); then
+ AC_PROG_CXXCPP
+else
+ _lt_caught_CXX_error=yes
+fi
+
+AC_LANG_PUSH(C++)
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+_LT_TAGVAR(allow_undefined_flag, $1)=
+_LT_TAGVAR(always_export_symbols, $1)=no
+_LT_TAGVAR(archive_expsym_cmds, $1)=
+_LT_TAGVAR(compiler_needs_object, $1)=no
+_LT_TAGVAR(export_dynamic_flag_spec, $1)=
+_LT_TAGVAR(hardcode_direct, $1)=no
+_LT_TAGVAR(hardcode_direct_absolute, $1)=no
+_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+_LT_TAGVAR(hardcode_libdir_separator, $1)=
+_LT_TAGVAR(hardcode_minus_L, $1)=no
+_LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported
+_LT_TAGVAR(hardcode_automatic, $1)=no
+_LT_TAGVAR(inherit_rpath, $1)=no
+_LT_TAGVAR(module_cmds, $1)=
+_LT_TAGVAR(module_expsym_cmds, $1)=
+_LT_TAGVAR(link_all_deplibs, $1)=unknown
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+_LT_TAGVAR(no_undefined_flag, $1)=
+_LT_TAGVAR(whole_archive_flag_spec, $1)=
+_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+
+# Source file extension for C++ test sources.
+ac_ext=cpp
+
+# Object file extension for compiled C++ test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# No sense in running all these tests if we already determined that
+# the CXX compiler isn't working. Some variables (like enable_shared)
+# are currently assumed to apply to all compilers on this platform,
+# and will be corrupted by setting them based on a non-working compiler.
+if test yes != "$_lt_caught_CXX_error"; then
+ # Code to be used in simple compile tests
+ lt_simple_compile_test_code="int some_variable = 0;"
+
+ # Code to be used in simple link tests
+ lt_simple_link_test_code='int main(int, char *[[]]) { return(0); }'
+
+ # ltmain only uses $CC for tagged configurations so make sure $CC is set.
+ _LT_TAG_COMPILER
+
+ # save warnings/boilerplate of simple test code
+ _LT_COMPILER_BOILERPLATE
+ _LT_LINKER_BOILERPLATE
+
+ # Allow CC to be a program name with arguments.
+ lt_save_CC=$CC
+ lt_save_CFLAGS=$CFLAGS
+ lt_save_LD=$LD
+ lt_save_GCC=$GCC
+ GCC=$GXX
+ lt_save_with_gnu_ld=$with_gnu_ld
+ lt_save_path_LD=$lt_cv_path_LD
+ if test -n "${lt_cv_prog_gnu_ldcxx+set}"; then
+ lt_cv_prog_gnu_ld=$lt_cv_prog_gnu_ldcxx
+ else
+ $as_unset lt_cv_prog_gnu_ld
+ fi
+ if test -n "${lt_cv_path_LDCXX+set}"; then
+ lt_cv_path_LD=$lt_cv_path_LDCXX
+ else
+ $as_unset lt_cv_path_LD
+ fi
+ test -z "${LDCXX+set}" || LD=$LDCXX
+ CC=${CXX-"c++"}
+ CFLAGS=$CXXFLAGS
+ compiler=$CC
+ _LT_TAGVAR(compiler, $1)=$CC
+ _LT_CC_BASENAME([$compiler])
+
+ if test -n "$compiler"; then
+ # We don't want -fno-exception when compiling C++ code, so set the
+ # no_builtin_flag separately
+ if test yes = "$GXX"; then
+ _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin'
+ else
+ _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=
+ fi
+
+ if test yes = "$GXX"; then
+ # Set up default GNU C++ configuration
+
+ LT_PATH_LD
+
+ # Check if GNU C++ uses GNU ld as the underlying linker, since the
+ # archiving commands below assume that GNU ld is being used.
+ if test yes = "$with_gnu_ld"; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib'
+
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir'
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic'
+
+ # If archive_cmds runs LD, not CC, wlarc should be empty
+ # XXX I think wlarc can be eliminated in ltcf-cxx, but I need to
+ # investigate it a little bit more. (MM)
+ wlarc='$wl'
+
+ # ancient GNU ld didn't support --whole-archive et. al.
+ if eval "`$CC -print-prog-name=ld` --help 2>&1" |
+ $GREP 'no-whole-archive' > /dev/null; then
+ _LT_TAGVAR(whole_archive_flag_spec, $1)=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive'
+ else
+ _LT_TAGVAR(whole_archive_flag_spec, $1)=
+ fi
+ else
+ with_gnu_ld=no
+ wlarc=
+
+ # A generic and very simple default shared library creation
+ # command for GNU C++ for the case where it uses the native
+ # linker, instead of GNU ld. If possible, this setting should
+ # overridden to take advantage of the native linker features on
+ # the platform it is being used on.
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib'
+ fi
+
+ # Commands to make compiler produce verbose output that lists
+ # what "hidden" libraries, object files and flags are used when
+ # linking a shared library.
+ output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"'
+
+ else
+ GXX=no
+ with_gnu_ld=no
+ wlarc=
+ fi
+
+ # PORTME: fill in a description of your system's C++ link characteristics
+ AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries])
+ _LT_TAGVAR(ld_shlibs, $1)=yes
+ case $host_os in
+ aix3*)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ aix[[4-9]]*)
+ if test ia64 = "$host_cpu"; then
+ # On IA64, the linker does run time linking by default, so we don't
+ # have to do anything special.
+ aix_use_runtimelinking=no
+ exp_sym_flag='-Bexport'
+ no_entry_flag=
+ else
+ aix_use_runtimelinking=no
+
+ # Test if we are trying to use run time linking or normal
+ # AIX style linking. If -brtl is somewhere in LDFLAGS, we
+ # have runtime linking enabled, and use it for executables.
+ # For shared libraries, we enable/disable runtime linking
+ # depending on the kind of the shared library created -
+ # when "with_aix_soname,aix_use_runtimelinking" is:
+ # "aix,no" lib.a(lib.so.V) shared, rtl:no, for executables
+ # "aix,yes" lib.so shared, rtl:yes, for executables
+ # lib.a static archive
+ # "both,no" lib.so.V(shr.o) shared, rtl:yes
+ # lib.a(lib.so.V) shared, rtl:no, for executables
+ # "both,yes" lib.so.V(shr.o) shared, rtl:yes, for executables
+ # lib.a(lib.so.V) shared, rtl:no
+ # "svr4,*" lib.so.V(shr.o) shared, rtl:yes, for executables
+ # lib.a static archive
+ case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*)
+ for ld_flag in $LDFLAGS; do
+ case $ld_flag in
+ *-brtl*)
+ aix_use_runtimelinking=yes
+ break
+ ;;
+ esac
+ done
+ if test svr4,no = "$with_aix_soname,$aix_use_runtimelinking"; then
+ # With aix-soname=svr4, we create the lib.so.V shared archives only,
+ # so we don't have lib.a shared libs to link our executables.
+ # We have to force runtime linking in this case.
+ aix_use_runtimelinking=yes
+ LDFLAGS="$LDFLAGS -Wl,-brtl"
+ fi
+ ;;
+ esac
+
+ exp_sym_flag='-bexport'
+ no_entry_flag='-bnoentry'
+ fi
+
+ # When large executables or shared objects are built, AIX ld can
+ # have problems creating the table of contents. If linking a library
+ # or program results in "error TOC overflow" add -mminimal-toc to
+ # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not
+ # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS.
+
+ _LT_TAGVAR(archive_cmds, $1)=''
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+ _LT_TAGVAR(link_all_deplibs, $1)=yes
+ _LT_TAGVAR(file_list_spec, $1)='$wl-f,'
+ case $with_aix_soname,$aix_use_runtimelinking in
+ aix,*) ;; # no import file
+ svr4,* | *,yes) # use import file
+ # The Import File defines what to hardcode.
+ _LT_TAGVAR(hardcode_direct, $1)=no
+ _LT_TAGVAR(hardcode_direct_absolute, $1)=no
+ ;;
+ esac
+
+ if test yes = "$GXX"; then
+ case $host_os in aix4.[[012]]|aix4.[[012]].*)
+ # We only want to do this on AIX 4.2 and lower, the check
+ # below for broken collect2 doesn't work under 4.3+
+ collect2name=`$CC -print-prog-name=collect2`
+ if test -f "$collect2name" &&
+ strings "$collect2name" | $GREP resolve_lib_name >/dev/null
+ then
+ # We have reworked collect2
+ :
+ else
+ # We have old collect2
+ _LT_TAGVAR(hardcode_direct, $1)=unsupported
+ # It fails to find uninstalled libraries when the uninstalled
+ # path is not listed in the libpath. Setting hardcode_minus_L
+ # to unsupported forces relinking
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=
+ fi
+ esac
+ shared_flag='-shared'
+ if test yes = "$aix_use_runtimelinking"; then
+ shared_flag=$shared_flag' $wl-G'
+ fi
+ # Need to ensure runtime linking is disabled for the traditional
+ # shared library, or the linker may eventually find shared libraries
+ # /with/ Import File - we do not want to mix them.
+ shared_flag_aix='-shared'
+ shared_flag_svr4='-shared $wl-G'
+ else
+ # not using gcc
+ if test ia64 = "$host_cpu"; then
+ # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release
+ # chokes on -Wl,-G. The following line is correct:
+ shared_flag='-G'
+ else
+ if test yes = "$aix_use_runtimelinking"; then
+ shared_flag='$wl-G'
+ else
+ shared_flag='$wl-bM:SRE'
+ fi
+ shared_flag_aix='$wl-bM:SRE'
+ shared_flag_svr4='$wl-G'
+ fi
+ fi
+
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-bexpall'
+ # It seems that -bexpall does not export symbols beginning with
+ # underscore (_), so it is better to generate a list of symbols to
+ # export.
+ _LT_TAGVAR(always_export_symbols, $1)=yes
+ if test aix,yes = "$with_aix_soname,$aix_use_runtimelinking"; then
+ # Warning - without using the other runtime loading flags (-brtl),
+ # -berok will link without error, but may produce a broken library.
+ # The "-G" linker flag allows undefined symbols.
+ _LT_TAGVAR(no_undefined_flag, $1)='-bernotok'
+ # Determine the default libpath from the value encoded in an empty
+ # executable.
+ _LT_SYS_MODULE_PATH_AIX([$1])
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-blibpath:$libdir:'"$aix_libpath"
+
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags `if test -n "$allow_undefined_flag"; then func_echo_all "$wl$allow_undefined_flag"; else :; fi` $wl'$exp_sym_flag:\$export_symbols' '$shared_flag
+ else
+ if test ia64 = "$host_cpu"; then
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R $libdir:/usr/lib:/lib'
+ _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs"
+ _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\$wl$no_entry_flag"' $compiler_flags $wl$allow_undefined_flag '"\$wl$exp_sym_flag:\$export_symbols"
+ else
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+ _LT_SYS_MODULE_PATH_AIX([$1])
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-blibpath:$libdir:'"$aix_libpath"
+ # Warning - without using the other run time loading flags,
+ # -berok will link without error, but may produce a broken library.
+ _LT_TAGVAR(no_undefined_flag, $1)=' $wl-bernotok'
+ _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-berok'
+ if test yes = "$with_gnu_ld"; then
+ # We only use this code for GNU lds that support --whole-archive.
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive$convenience $wl--no-whole-archive'
+ else
+ # Exported symbols can be pulled into shared objects from archives
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience'
+ fi
+ _LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$RM -r $output_objdir/$realname.d~$MKDIR $output_objdir/$realname.d'
+ # -brtl affects multiple linker settings, -berok does not and is overridden later
+ compiler_flags_filtered='`func_echo_all "$compiler_flags " | $SED -e "s%-brtl\\([[, ]]\\)%-berok\\1%g"`'
+ if test svr4 != "$with_aix_soname"; then
+ # This is similar to how AIX traditionally builds its shared
+ # libraries. Need -bnortl late, we may have -brtl in LDFLAGS.
+ _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$CC '$shared_flag_aix' -o $output_objdir/$realname.d/$soname $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$realname.d/$soname'
+ fi
+ if test aix != "$with_aix_soname"; then
+ _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$CC '$shared_flag_svr4' -o $output_objdir/$realname.d/$shared_archive_member_spec.o $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$STRIP -e $output_objdir/$realname.d/$shared_archive_member_spec.o~( func_echo_all "#! $soname($shared_archive_member_spec.o)"; if test shr_64 = "$shared_archive_member_spec"; then func_echo_all "# 64"; else func_echo_all "# 32"; fi; cat $export_symbols ) > $output_objdir/$realname.d/$shared_archive_member_spec.imp~$AR $AR_FLAGS $output_objdir/$soname $output_objdir/$realname.d/$shared_archive_member_spec.o $output_objdir/$realname.d/$shared_archive_member_spec.imp'
+ else
+ # used by -dlpreopen to get the symbols
+ _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$MV $output_objdir/$realname.d/$soname $output_objdir'
+ fi
+ _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$RM -r $output_objdir/$realname.d'
+ fi
+ fi
+ ;;
+
+ beos*)
+ if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+ # Joseph Beckenbach says some releases of gcc
+ # support --undefined. This deserves some investigation. FIXME
+ _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib'
+ else
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+
+ chorus*)
+ case $cc_basename in
+ *)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ esac
+ ;;
+
+ cygwin* | mingw* | pw32* | cegcc*)
+ case $GXX,$cc_basename in
+ ,cl* | no,cl*)
+ # Native MSVC
+ # hardcode_libdir_flag_spec is actually meaningless, as there is
+ # no search path for DLLs.
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+ _LT_TAGVAR(always_export_symbols, $1)=yes
+ _LT_TAGVAR(file_list_spec, $1)='@'
+ # Tell ltmain to make .lib files, not .a files.
+ libext=lib
+ # Tell ltmain to make .dll files, not .so files.
+ shrext_cmds=.dll
+ # FIXME: Setting linknames here is a bad hack.
+ _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~linknames='
+ _LT_TAGVAR(archive_expsym_cmds, $1)='if _LT_DLL_DEF_P([$export_symbols]); then
+ cp "$export_symbols" "$output_objdir/$soname.def";
+ echo "$tool_output_objdir$soname.def" > "$output_objdir/$soname.exp";
+ else
+ $SED -e '\''s/^/-link -EXPORT:/'\'' < $export_symbols > $output_objdir/$soname.exp;
+ fi~
+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
+ linknames='
+ # The linker will not automatically build a static lib if we build a DLL.
+ # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
+ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+ # Don't use ranlib
+ _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib'
+ _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~
+ lt_tool_outputfile="@TOOL_OUTPUT@"~
+ case $lt_outputfile in
+ *.exe|*.EXE) ;;
+ *)
+ lt_outputfile=$lt_outputfile.exe
+ lt_tool_outputfile=$lt_tool_outputfile.exe
+ ;;
+ esac~
+ func_to_tool_file "$lt_outputfile"~
+ if test : != "$MANIFEST_TOOL" && test -f "$lt_outputfile.manifest"; then
+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
+ $RM "$lt_outputfile.manifest";
+ fi'
+ ;;
+ *)
+ # g++
+ # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless,
+ # as there is no search path for DLLs.
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-all-symbols'
+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+ _LT_TAGVAR(always_export_symbols, $1)=no
+ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+
+ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+ # If the export-symbols file already is a .def file, use it as
+ # is; otherwise, prepend EXPORTS...
+ _LT_TAGVAR(archive_expsym_cmds, $1)='if _LT_DLL_DEF_P([$export_symbols]); then
+ cp $export_symbols $output_objdir/$soname.def;
+ else
+ echo EXPORTS > $output_objdir/$soname.def;
+ cat $export_symbols >> $output_objdir/$soname.def;
+ fi~
+ $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+ else
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+ esac
+ ;;
+ darwin* | rhapsody*)
+ _LT_DARWIN_LINKER_FEATURES($1)
+ ;;
+
+ os2*)
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes
+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+ shrext_cmds=.dll
+ _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~
+ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~
+ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~
+ $ECHO EXPORTS >> $output_objdir/$libname.def~
+ emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~
+ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~
+ emximp -o $lib $output_objdir/$libname.def'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~
+ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~
+ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~
+ $ECHO EXPORTS >> $output_objdir/$libname.def~
+ prefix_cmds="$SED"~
+ if test EXPORTS = "`$SED 1q $export_symbols`"; then
+ prefix_cmds="$prefix_cmds -e 1d";
+ fi~
+ prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~
+ cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~
+ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~
+ emximp -o $lib $output_objdir/$libname.def'
+ _LT_TAGVAR(old_archive_From_new_cmds, $1)='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def'
+ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+ ;;
+
+ dgux*)
+ case $cc_basename in
+ ec++*)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ ghcx*)
+ # Green Hills C++ Compiler
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ *)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ esac
+ ;;
+
+ freebsd2.*)
+ # C++ shared libraries reported to be fairly broken before
+ # switch to ELF
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+
+ freebsd-elf*)
+ _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+ ;;
+
+ freebsd* | dragonfly*)
+ # FreeBSD 3 and later use GNU C++ and GNU ld with standard ELF
+ # conventions
+ _LT_TAGVAR(ld_shlibs, $1)=yes
+ ;;
+
+ haiku*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib'
+ _LT_TAGVAR(link_all_deplibs, $1)=yes
+ ;;
+
+ hpux9*)
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E'
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH,
+ # but as the default
+ # location of the library.
+
+ case $cc_basename in
+ CC*)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ aCC*)
+ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -b $wl+b $wl$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib'
+ # Commands to make compiler produce verbose output that lists
+ # what "hidden" libraries, object files and flags are used when
+ # linking a shared library.
+ #
+ # There doesn't appear to be a way to prevent this compiler from
+ # explicitly linking system object files so we need to strip them
+ # from the output so that they don't get included in the library
+ # dependencies.
+ output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $EGREP "\-L"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"'
+ ;;
+ *)
+ if test yes = "$GXX"; then
+ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag $wl+b $wl$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib'
+ else
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+ esac
+ ;;
+
+ hpux10*|hpux11*)
+ if test no = "$with_gnu_ld"; then
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+ case $host_cpu in
+ hppa*64*|ia64*)
+ ;;
+ *)
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E'
+ ;;
+ esac
+ fi
+ case $host_cpu in
+ hppa*64*|ia64*)
+ _LT_TAGVAR(hardcode_direct, $1)=no
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ ;;
+ *)
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH,
+ # but as the default
+ # location of the library.
+ ;;
+ esac
+
+ case $cc_basename in
+ CC*)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ aCC*)
+ case $host_cpu in
+ hppa*64*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ ;;
+ ia64*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ ;;
+ *)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ ;;
+ esac
+ # Commands to make compiler produce verbose output that lists
+ # what "hidden" libraries, object files and flags are used when
+ # linking a shared library.
+ #
+ # There doesn't appear to be a way to prevent this compiler from
+ # explicitly linking system object files so we need to strip them
+ # from the output so that they don't get included in the library
+ # dependencies.
+ output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $GREP "\-L"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"'
+ ;;
+ *)
+ if test yes = "$GXX"; then
+ if test no = "$with_gnu_ld"; then
+ case $host_cpu in
+ hppa*64*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC $wl+h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ ;;
+ ia64*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag $wl+h $wl$soname $wl+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ ;;
+ *)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ ;;
+ esac
+ fi
+ else
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+ esac
+ ;;
+
+ interix[[3-9]]*)
+ _LT_TAGVAR(hardcode_direct, $1)=no
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir'
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E'
+ # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc.
+ # Instead, shared libraries are loaded at an image base (0x10000000 by
+ # default) and relocated if they conflict, which is a slow very memory
+ # consuming and fragmenting process. To avoid this, we pick a random,
+ # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link
+ # time. Moving up from 0x10000000 also allows more sbrk(2) space.
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s|^|_|" $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--retain-symbols-file,$output_objdir/$soname.expsym $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+ ;;
+ irix5* | irix6*)
+ case $cc_basename in
+ CC*)
+ # SGI C++
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -all -multigot $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib'
+
+ # Archives containing C++ object files must be created using
+ # "CC -ar", where "CC" is the IRIX C++ compiler. This is
+ # necessary to make sure instantiated templates are included
+ # in the archive.
+ _LT_TAGVAR(old_archive_cmds, $1)='$CC -ar -WR,-u -o $oldlib $oldobjs'
+ ;;
+ *)
+ if test yes = "$GXX"; then
+ if test no = "$with_gnu_ld"; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib'
+ else
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` -o $lib'
+ fi
+ fi
+ _LT_TAGVAR(link_all_deplibs, $1)=yes
+ ;;
+ esac
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+ _LT_TAGVAR(inherit_rpath, $1)=yes
+ ;;
+
+ linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*)
+ case $cc_basename in
+ KCC*)
+ # Kuck and Associates, Inc. (KAI) C++ Compiler
+
+ # KCC will only create a shared library if the output file
+ # ends with ".so" (or ".sl" for HP-UX), so rename the library
+ # to its proper name (with version) after linking.
+ _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\$tempext\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\$tempext\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib $wl-retain-symbols-file,$export_symbols; mv \$templib $lib'
+ # Commands to make compiler produce verbose output that lists
+ # what "hidden" libraries, object files and flags are used when
+ # linking a shared library.
+ #
+ # There doesn't appear to be a way to prevent this compiler from
+ # explicitly linking system object files so we need to strip them
+ # from the output so that they don't get included in the library
+ # dependencies.
+ output_verbose_link_cmd='templist=`$CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 | $GREP "ld"`; rm -f libconftest$shared_ext; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"'
+
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir'
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic'
+
+ # Archives containing C++ object files must be created using
+ # "CC -Bstatic", where "CC" is the KAI C++ compiler.
+ _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs'
+ ;;
+ icpc* | ecpc* )
+ # Intel C++
+ with_gnu_ld=yes
+ # version 8.0 and above of icpc choke on multiply defined symbols
+ # if we add $predep_objects and $postdep_objects, however 7.1 and
+ # earlier do not add the objects themselves.
+ case `$CC -V 2>&1` in
+ *"Version 7."*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib'
+ ;;
+ *) # Version 8.0 or newer
+ tmp_idyn=
+ case $host_cpu in
+ ia64*) tmp_idyn=' -i_dynamic';;
+ esac
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib'
+ ;;
+ esac
+ _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir'
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic'
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive$convenience $wl--no-whole-archive'
+ ;;
+ pgCC* | pgcpp*)
+ # Portland Group C++ compiler
+ case `$CC -V` in
+ *pgCC\ [[1-5]].* | *pgcpp\ [[1-5]].*)
+ _LT_TAGVAR(prelink_cmds, $1)='tpldir=Template.dir~
+ rm -rf $tpldir~
+ $CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~
+ compile_command="$compile_command `find $tpldir -name \*.o | sort | $NL2SP`"'
+ _LT_TAGVAR(old_archive_cmds, $1)='tpldir=Template.dir~
+ rm -rf $tpldir~
+ $CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~
+ $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | sort | $NL2SP`~
+ $RANLIB $oldlib'
+ _LT_TAGVAR(archive_cmds, $1)='tpldir=Template.dir~
+ rm -rf $tpldir~
+ $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~
+ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='tpldir=Template.dir~
+ rm -rf $tpldir~
+ $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~
+ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib'
+ ;;
+ *) # Version 6 and above use weak symbols
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib'
+ ;;
+ esac
+
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl--rpath $wl$libdir'
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic'
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive'
+ ;;
+ cxx*)
+ # Compaq C++
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib $wl-retain-symbols-file $wl$export_symbols'
+
+ runpath_var=LD_RUN_PATH
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+ # Commands to make compiler produce verbose output that lists
+ # what "hidden" libraries, object files and flags are used when
+ # linking a shared library.
+ #
+ # There doesn't appear to be a way to prevent this compiler from
+ # explicitly linking system object files so we need to strip them
+ # from the output so that they don't get included in the library
+ # dependencies.
+ output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld .*$\)/\1/"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "X$list" | $Xsed'
+ ;;
+ xl* | mpixl* | bgxl*)
+ # IBM XL 8.0 on PPC, with GNU ld
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir'
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic'
+ _LT_TAGVAR(archive_cmds, $1)='$CC -qmkshrobj $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib'
+ if test yes = "$supports_anon_versioning"; then
+ _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~
+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+ echo "local: *; };" >> $output_objdir/$libname.ver~
+ $CC -qmkshrobj $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-version-script $wl$output_objdir/$libname.ver -o $lib'
+ fi
+ ;;
+ *)
+ case `$CC -V 2>&1 | sed 5q` in
+ *Sun\ C*)
+ # Sun C++ 5.9
+ _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs'
+ _LT_TAGVAR(archive_cmds, $1)='$CC -G$allow_undefined_flag -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G$allow_undefined_flag -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-retain-symbols-file $wl$export_symbols'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive'
+ _LT_TAGVAR(compiler_needs_object, $1)=yes
+
+ # Not sure whether something based on
+ # $CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1
+ # would be better.
+ output_verbose_link_cmd='func_echo_all'
+
+ # Archives containing C++ object files must be created using
+ # "CC -xar", where "CC" is the Sun C++ compiler. This is
+ # necessary to make sure instantiated templates are included
+ # in the archive.
+ _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs'
+ ;;
+ esac
+ ;;
+ esac
+ ;;
+
+ lynxos*)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+
+ m88k*)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+
+ mvs*)
+ case $cc_basename in
+ cxx*)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ *)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ esac
+ ;;
+
+ netbsd*)
+ if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+ _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $predep_objects $libobjs $deplibs $postdep_objects $linker_flags'
+ wlarc=
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ fi
+ # Workaround some broken pre-1.5 toolchains
+ output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP conftest.$objext | $SED -e "s:-lgcc -lc -lgcc::"'
+ ;;
+
+ *nto* | *qnx*)
+ _LT_TAGVAR(ld_shlibs, $1)=yes
+ ;;
+
+ openbsd* | bitrig*)
+ if test -f /usr/libexec/ld.so; then
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir'
+ if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`"; then
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-retain-symbols-file,$export_symbols -o $lib'
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E'
+ _LT_TAGVAR(whole_archive_flag_spec, $1)=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive'
+ fi
+ output_verbose_link_cmd=func_echo_all
+ else
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+
+ osf3* | osf4* | osf5*)
+ case $cc_basename in
+ KCC*)
+ # Kuck and Associates, Inc. (KAI) C++ Compiler
+
+ # KCC will only create a shared library if the output file
+ # ends with ".so" (or ".sl" for HP-UX), so rename the library
+ # to its proper name (with version) after linking.
+ _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo "$lib" | $SED -e "s/\$tempext\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib'
+
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+ # Archives containing C++ object files must be created using
+ # the KAI C++ compiler.
+ case $host in
+ osf3*) _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs' ;;
+ *) _LT_TAGVAR(old_archive_cmds, $1)='$CC -o $oldlib $oldobjs' ;;
+ esac
+ ;;
+ RCC*)
+ # Rational C++ 2.4.1
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ cxx*)
+ case $host in
+ osf3*)
+ _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-expect_unresolved $wl\*'
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $soname `test -n "$verstring" && func_echo_all "$wl-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir'
+ ;;
+ *)
+ _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done~
+ echo "-hidden">> $lib.exp~
+ $CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname $wl-input $wl$lib.exp `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib~
+ $RM $lib.exp'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir'
+ ;;
+ esac
+
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+ # Commands to make compiler produce verbose output that lists
+ # what "hidden" libraries, object files and flags are used when
+ # linking a shared library.
+ #
+ # There doesn't appear to be a way to prevent this compiler from
+ # explicitly linking system object files so we need to strip them
+ # from the output so that they don't get included in the library
+ # dependencies.
+ output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld" | $GREP -v "ld:"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"'
+ ;;
+ *)
+ if test yes,no = "$GXX,$with_gnu_ld"; then
+ _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-expect_unresolved $wl\*'
+ case $host in
+ osf3*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib'
+ ;;
+ *)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-msym $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib'
+ ;;
+ esac
+
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+ # Commands to make compiler produce verbose output that lists
+ # what "hidden" libraries, object files and flags are used when
+ # linking a shared library.
+ output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"'
+
+ else
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+ esac
+ ;;
+
+ psos*)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+
+ sunos4*)
+ case $cc_basename in
+ CC*)
+ # Sun C++ 4.x
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ lcc*)
+ # Lucid
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ *)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ esac
+ ;;
+
+ solaris*)
+ case $cc_basename in
+ CC* | sunCC*)
+ # Sun C++ 4.2, 5.x and Centerline C++
+ _LT_TAGVAR(archive_cmds_need_lc,$1)=yes
+ _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs'
+ _LT_TAGVAR(archive_cmds, $1)='$CC -G$allow_undefined_flag -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+ $CC -G$allow_undefined_flag $wl-M $wl$lib.exp -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
+
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ case $host_os in
+ solaris2.[[0-5]] | solaris2.[[0-5]].*) ;;
+ *)
+ # The compiler driver will combine and reorder linker options,
+ # but understands '-z linker_flag'.
+ # Supported since Solaris 2.6 (maybe 2.5.1?)
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract'
+ ;;
+ esac
+ _LT_TAGVAR(link_all_deplibs, $1)=yes
+
+ output_verbose_link_cmd='func_echo_all'
+
+ # Archives containing C++ object files must be created using
+ # "CC -xar", where "CC" is the Sun C++ compiler. This is
+ # necessary to make sure instantiated templates are included
+ # in the archive.
+ _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs'
+ ;;
+ gcx*)
+ # Green Hills C++ Compiler
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-h $wl$soname -o $lib'
+
+ # The C++ compiler must be used to create the archive.
+ _LT_TAGVAR(old_archive_cmds, $1)='$CC $LDFLAGS -archive -o $oldlib $oldobjs'
+ ;;
+ *)
+ # GNU C++ compiler with Solaris linker
+ if test yes,no = "$GXX,$with_gnu_ld"; then
+ _LT_TAGVAR(no_undefined_flag, $1)=' $wl-z ${wl}defs'
+ if $CC --version | $GREP -v '^2\.7' > /dev/null; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-h $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+ $CC -shared $pic_flag -nostdlib $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
+
+ # Commands to make compiler produce verbose output that lists
+ # what "hidden" libraries, object files and flags are used when
+ # linking a shared library.
+ output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"'
+ else
+ # g++ 2.7 appears to require '-G' NOT '-shared' on this
+ # platform.
+ _LT_TAGVAR(archive_cmds, $1)='$CC -G -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-h $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+ $CC -G -nostdlib $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
+
+ # Commands to make compiler produce verbose output that lists
+ # what "hidden" libraries, object files and flags are used when
+ # linking a shared library.
+ output_verbose_link_cmd='$CC -G $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"'
+ fi
+
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R $wl$libdir'
+ case $host_os in
+ solaris2.[[0-5]] | solaris2.[[0-5]].*) ;;
+ *)
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl-z ${wl}allextract$convenience $wl-z ${wl}defaultextract'
+ ;;
+ esac
+ fi
+ ;;
+ esac
+ ;;
+
+ sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*)
+ _LT_TAGVAR(no_undefined_flag, $1)='$wl-z,text'
+ _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ runpath_var='LD_RUN_PATH'
+
+ case $cc_basename in
+ CC*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ *)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ esac
+ ;;
+
+ sysv5* | sco3.2v5* | sco5v6*)
+ # Note: We CANNOT use -z defs as we might desire, because we do not
+ # link with -lc, and that would cause any symbols used from libc to
+ # always be unresolved, which means just about no library would
+ # ever link correctly. If we're not using GNU ld we use -z text
+ # though, which does catch some bad symbols but isn't as heavy-handed
+ # as -z defs.
+ _LT_TAGVAR(no_undefined_flag, $1)='$wl-z,text'
+ _LT_TAGVAR(allow_undefined_flag, $1)='$wl-z,nodefs'
+ _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R,$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+ _LT_TAGVAR(link_all_deplibs, $1)=yes
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-Bexport'
+ runpath_var='LD_RUN_PATH'
+
+ case $cc_basename in
+ CC*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(old_archive_cmds, $1)='$CC -Tprelink_objects $oldobjs~
+ '"$_LT_TAGVAR(old_archive_cmds, $1)"
+ _LT_TAGVAR(reload_cmds, $1)='$CC -Tprelink_objects $reload_objs~
+ '"$_LT_TAGVAR(reload_cmds, $1)"
+ ;;
+ *)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ esac
+ ;;
+
+ tandem*)
+ case $cc_basename in
+ NCC*)
+ # NonStop-UX NCC 3.20
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ *)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ esac
+ ;;
+
+ vxworks*)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+
+ *)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ esac
+
+ AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)])
+ test no = "$_LT_TAGVAR(ld_shlibs, $1)" && can_build_shared=no
+
+ _LT_TAGVAR(GCC, $1)=$GXX
+ _LT_TAGVAR(LD, $1)=$LD
+
+ ## CAVEAT EMPTOR:
+ ## There is no encapsulation within the following macros, do not change
+ ## the running order or otherwise move them around unless you know exactly
+ ## what you are doing...
+ _LT_SYS_HIDDEN_LIBDEPS($1)
+ _LT_COMPILER_PIC($1)
+ _LT_COMPILER_C_O($1)
+ _LT_COMPILER_FILE_LOCKS($1)
+ _LT_LINKER_SHLIBS($1)
+ _LT_SYS_DYNAMIC_LINKER($1)
+ _LT_LINKER_HARDCODE_LIBPATH($1)
+
+ _LT_CONFIG($1)
+ fi # test -n "$compiler"
+
+ CC=$lt_save_CC
+ CFLAGS=$lt_save_CFLAGS
+ LDCXX=$LD
+ LD=$lt_save_LD
+ GCC=$lt_save_GCC
+ with_gnu_ld=$lt_save_with_gnu_ld
+ lt_cv_path_LDCXX=$lt_cv_path_LD
+ lt_cv_path_LD=$lt_save_path_LD
+ lt_cv_prog_gnu_ldcxx=$lt_cv_prog_gnu_ld
+ lt_cv_prog_gnu_ld=$lt_save_with_gnu_ld
+fi # test yes != "$_lt_caught_CXX_error"
+
+AC_LANG_POP
+])# _LT_LANG_CXX_CONFIG
+
+
+# _LT_FUNC_STRIPNAME_CNF
+# ----------------------
+# func_stripname_cnf prefix suffix name
+# strip PREFIX and SUFFIX off of NAME.
+# PREFIX and SUFFIX must not contain globbing or regex special
+# characters, hashes, percent signs, but SUFFIX may contain a leading
+# dot (in which case that matches only a dot).
+#
+# This function is identical to the (non-XSI) version of func_stripname,
+# except this one can be used by m4 code that may be executed by configure,
+# rather than the libtool script.
+m4_defun([_LT_FUNC_STRIPNAME_CNF],[dnl
+AC_REQUIRE([_LT_DECL_SED])
+AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH])
+func_stripname_cnf ()
+{
+ case @S|@2 in
+ .*) func_stripname_result=`$ECHO "@S|@3" | $SED "s%^@S|@1%%; s%\\\\@S|@2\$%%"`;;
+ *) func_stripname_result=`$ECHO "@S|@3" | $SED "s%^@S|@1%%; s%@S|@2\$%%"`;;
+ esac
+} # func_stripname_cnf
+])# _LT_FUNC_STRIPNAME_CNF
+
+
+# _LT_SYS_HIDDEN_LIBDEPS([TAGNAME])
+# ---------------------------------
+# Figure out "hidden" library dependencies from verbose
+# compiler output when linking a shared library.
+# Parse the compiler output and extract the necessary
+# objects, libraries and library flags.
+m4_defun([_LT_SYS_HIDDEN_LIBDEPS],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+AC_REQUIRE([_LT_FUNC_STRIPNAME_CNF])dnl
+# Dependencies to place before and after the object being linked:
+_LT_TAGVAR(predep_objects, $1)=
+_LT_TAGVAR(postdep_objects, $1)=
+_LT_TAGVAR(predeps, $1)=
+_LT_TAGVAR(postdeps, $1)=
+_LT_TAGVAR(compiler_lib_search_path, $1)=
+
+dnl we can't use the lt_simple_compile_test_code here,
+dnl because it contains code intended for an executable,
+dnl not a library. It's possible we should let each
+dnl tag define a new lt_????_link_test_code variable,
+dnl but it's only used here...
+m4_if([$1], [], [cat > conftest.$ac_ext <<_LT_EOF
+int a;
+void foo (void) { a = 0; }
+_LT_EOF
+], [$1], [CXX], [cat > conftest.$ac_ext <<_LT_EOF
+class Foo
+{
+public:
+ Foo (void) { a = 0; }
+private:
+ int a;
+};
+_LT_EOF
+], [$1], [F77], [cat > conftest.$ac_ext <<_LT_EOF
+ subroutine foo
+ implicit none
+ integer*4 a
+ a=0
+ return
+ end
+_LT_EOF
+], [$1], [FC], [cat > conftest.$ac_ext <<_LT_EOF
+ subroutine foo
+ implicit none
+ integer a
+ a=0
+ return
+ end
+_LT_EOF
+], [$1], [GCJ], [cat > conftest.$ac_ext <<_LT_EOF
+public class foo {
+ private int a;
+ public void bar (void) {
+ a = 0;
+ }
+};
+_LT_EOF
+], [$1], [GO], [cat > conftest.$ac_ext <<_LT_EOF
+package foo
+func foo() {
+}
+_LT_EOF
+])
+
+_lt_libdeps_save_CFLAGS=$CFLAGS
+case "$CC $CFLAGS " in #(
+*\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;;
+*\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;;
+*\ -fuse-linker-plugin*\ *) CFLAGS="$CFLAGS -fno-use-linker-plugin" ;;
+esac
+
+dnl Parse the compiler output and extract the necessary
+dnl objects, libraries and library flags.
+if AC_TRY_EVAL(ac_compile); then
+ # Parse the compiler output and extract the necessary
+ # objects, libraries and library flags.
+
+ # Sentinel used to keep track of whether or not we are before
+ # the conftest object file.
+ pre_test_object_deps_done=no
+
+ for p in `eval "$output_verbose_link_cmd"`; do
+ case $prev$p in
+
+ -L* | -R* | -l*)
+ # Some compilers place space between "-{L,R}" and the path.
+ # Remove the space.
+ if test x-L = "$p" ||
+ test x-R = "$p"; then
+ prev=$p
+ continue
+ fi
+
+ # Expand the sysroot to ease extracting the directories later.
+ if test -z "$prev"; then
+ case $p in
+ -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;;
+ -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;;
+ -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;;
+ esac
+ fi
+ case $p in
+ =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;;
+ esac
+ if test no = "$pre_test_object_deps_done"; then
+ case $prev in
+ -L | -R)
+ # Internal compiler library paths should come after those
+ # provided the user. The postdeps already come after the
+ # user supplied libs so there is no need to process them.
+ if test -z "$_LT_TAGVAR(compiler_lib_search_path, $1)"; then
+ _LT_TAGVAR(compiler_lib_search_path, $1)=$prev$p
+ else
+ _LT_TAGVAR(compiler_lib_search_path, $1)="${_LT_TAGVAR(compiler_lib_search_path, $1)} $prev$p"
+ fi
+ ;;
+ # The "-l" case would never come before the object being
+ # linked, so don't bother handling this case.
+ esac
+ else
+ if test -z "$_LT_TAGVAR(postdeps, $1)"; then
+ _LT_TAGVAR(postdeps, $1)=$prev$p
+ else
+ _LT_TAGVAR(postdeps, $1)="${_LT_TAGVAR(postdeps, $1)} $prev$p"
+ fi
+ fi
+ prev=
+ ;;
+
+ *.lto.$objext) ;; # Ignore GCC LTO objects
+ *.$objext)
+ # This assumes that the test object file only shows up
+ # once in the compiler output.
+ if test "$p" = "conftest.$objext"; then
+ pre_test_object_deps_done=yes
+ continue
+ fi
+
+ if test no = "$pre_test_object_deps_done"; then
+ if test -z "$_LT_TAGVAR(predep_objects, $1)"; then
+ _LT_TAGVAR(predep_objects, $1)=$p
+ else
+ _LT_TAGVAR(predep_objects, $1)="$_LT_TAGVAR(predep_objects, $1) $p"
+ fi
+ else
+ if test -z "$_LT_TAGVAR(postdep_objects, $1)"; then
+ _LT_TAGVAR(postdep_objects, $1)=$p
+ else
+ _LT_TAGVAR(postdep_objects, $1)="$_LT_TAGVAR(postdep_objects, $1) $p"
+ fi
+ fi
+ ;;
+
+ *) ;; # Ignore the rest.
+
+ esac
+ done
+
+ # Clean up.
+ rm -f a.out a.exe
+else
+ echo "libtool.m4: error: problem compiling $1 test program"
+fi
+
+$RM -f confest.$objext
+CFLAGS=$_lt_libdeps_save_CFLAGS
+
+# PORTME: override above test on systems where it is broken
+m4_if([$1], [CXX],
+[case $host_os in
+interix[[3-9]]*)
+ # Interix 3.5 installs completely hosed .la files for C++, so rather than
+ # hack all around it, let's just trust "g++" to DTRT.
+ _LT_TAGVAR(predep_objects,$1)=
+ _LT_TAGVAR(postdep_objects,$1)=
+ _LT_TAGVAR(postdeps,$1)=
+ ;;
+esac
+])
+
+case " $_LT_TAGVAR(postdeps, $1) " in
+*" -lc "*) _LT_TAGVAR(archive_cmds_need_lc, $1)=no ;;
+esac
+ _LT_TAGVAR(compiler_lib_search_dirs, $1)=
+if test -n "${_LT_TAGVAR(compiler_lib_search_path, $1)}"; then
+ _LT_TAGVAR(compiler_lib_search_dirs, $1)=`echo " ${_LT_TAGVAR(compiler_lib_search_path, $1)}" | $SED -e 's! -L! !g' -e 's!^ !!'`
+fi
+_LT_TAGDECL([], [compiler_lib_search_dirs], [1],
+ [The directories searched by this compiler when creating a shared library])
+_LT_TAGDECL([], [predep_objects], [1],
+ [Dependencies to place before and after the objects being linked to
+ create a shared library])
+_LT_TAGDECL([], [postdep_objects], [1])
+_LT_TAGDECL([], [predeps], [1])
+_LT_TAGDECL([], [postdeps], [1])
+_LT_TAGDECL([], [compiler_lib_search_path], [1],
+ [The library search path used internally by the compiler when linking
+ a shared library])
+])# _LT_SYS_HIDDEN_LIBDEPS
+
+
+# _LT_LANG_F77_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for a Fortran 77 compiler are
+# suitably defined. These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to 'libtool'.
+m4_defun([_LT_LANG_F77_CONFIG],
+[AC_LANG_PUSH(Fortran 77)
+if test -z "$F77" || test no = "$F77"; then
+ _lt_disable_F77=yes
+fi
+
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+_LT_TAGVAR(allow_undefined_flag, $1)=
+_LT_TAGVAR(always_export_symbols, $1)=no
+_LT_TAGVAR(archive_expsym_cmds, $1)=
+_LT_TAGVAR(export_dynamic_flag_spec, $1)=
+_LT_TAGVAR(hardcode_direct, $1)=no
+_LT_TAGVAR(hardcode_direct_absolute, $1)=no
+_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+_LT_TAGVAR(hardcode_libdir_separator, $1)=
+_LT_TAGVAR(hardcode_minus_L, $1)=no
+_LT_TAGVAR(hardcode_automatic, $1)=no
+_LT_TAGVAR(inherit_rpath, $1)=no
+_LT_TAGVAR(module_cmds, $1)=
+_LT_TAGVAR(module_expsym_cmds, $1)=
+_LT_TAGVAR(link_all_deplibs, $1)=unknown
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+_LT_TAGVAR(no_undefined_flag, $1)=
+_LT_TAGVAR(whole_archive_flag_spec, $1)=
+_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+
+# Source file extension for f77 test sources.
+ac_ext=f
+
+# Object file extension for compiled f77 test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# No sense in running all these tests if we already determined that
+# the F77 compiler isn't working. Some variables (like enable_shared)
+# are currently assumed to apply to all compilers on this platform,
+# and will be corrupted by setting them based on a non-working compiler.
+if test yes != "$_lt_disable_F77"; then
+ # Code to be used in simple compile tests
+ lt_simple_compile_test_code="\
+ subroutine t
+ return
+ end
+"
+
+ # Code to be used in simple link tests
+ lt_simple_link_test_code="\
+ program t
+ end
+"
+
+ # ltmain only uses $CC for tagged configurations so make sure $CC is set.
+ _LT_TAG_COMPILER
+
+ # save warnings/boilerplate of simple test code
+ _LT_COMPILER_BOILERPLATE
+ _LT_LINKER_BOILERPLATE
+
+ # Allow CC to be a program name with arguments.
+ lt_save_CC=$CC
+ lt_save_GCC=$GCC
+ lt_save_CFLAGS=$CFLAGS
+ CC=${F77-"f77"}
+ CFLAGS=$FFLAGS
+ compiler=$CC
+ _LT_TAGVAR(compiler, $1)=$CC
+ _LT_CC_BASENAME([$compiler])
+ GCC=$G77
+ if test -n "$compiler"; then
+ AC_MSG_CHECKING([if libtool supports shared libraries])
+ AC_MSG_RESULT([$can_build_shared])
+
+ AC_MSG_CHECKING([whether to build shared libraries])
+ test no = "$can_build_shared" && enable_shared=no
+
+ # On AIX, shared libraries and static libraries use the same namespace, and
+ # are all built from PIC.
+ case $host_os in
+ aix3*)
+ test yes = "$enable_shared" && enable_static=no
+ if test -n "$RANLIB"; then
+ archive_cmds="$archive_cmds~\$RANLIB \$lib"
+ postinstall_cmds='$RANLIB $lib'
+ fi
+ ;;
+ aix[[4-9]]*)
+ if test ia64 != "$host_cpu"; then
+ case $enable_shared,$with_aix_soname,$aix_use_runtimelinking in
+ yes,aix,yes) ;; # shared object as lib.so file only
+ yes,svr4,*) ;; # shared object as lib.so archive member only
+ yes,*) enable_static=no ;; # shared object in lib.a archive as well
+ esac
+ fi
+ ;;
+ esac
+ AC_MSG_RESULT([$enable_shared])
+
+ AC_MSG_CHECKING([whether to build static libraries])
+ # Make sure either enable_shared or enable_static is yes.
+ test yes = "$enable_shared" || enable_static=yes
+ AC_MSG_RESULT([$enable_static])
+
+ _LT_TAGVAR(GCC, $1)=$G77
+ _LT_TAGVAR(LD, $1)=$LD
+
+ ## CAVEAT EMPTOR:
+ ## There is no encapsulation within the following macros, do not change
+ ## the running order or otherwise move them around unless you know exactly
+ ## what you are doing...
+ _LT_COMPILER_PIC($1)
+ _LT_COMPILER_C_O($1)
+ _LT_COMPILER_FILE_LOCKS($1)
+ _LT_LINKER_SHLIBS($1)
+ _LT_SYS_DYNAMIC_LINKER($1)
+ _LT_LINKER_HARDCODE_LIBPATH($1)
+
+ _LT_CONFIG($1)
+ fi # test -n "$compiler"
+
+ GCC=$lt_save_GCC
+ CC=$lt_save_CC
+ CFLAGS=$lt_save_CFLAGS
+fi # test yes != "$_lt_disable_F77"
+
+AC_LANG_POP
+])# _LT_LANG_F77_CONFIG
+
+
+# _LT_LANG_FC_CONFIG([TAG])
+# -------------------------
+# Ensure that the configuration variables for a Fortran compiler are
+# suitably defined. These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to 'libtool'.
+m4_defun([_LT_LANG_FC_CONFIG],
+[AC_LANG_PUSH(Fortran)
+
+if test -z "$FC" || test no = "$FC"; then
+ _lt_disable_FC=yes
+fi
+
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+_LT_TAGVAR(allow_undefined_flag, $1)=
+_LT_TAGVAR(always_export_symbols, $1)=no
+_LT_TAGVAR(archive_expsym_cmds, $1)=
+_LT_TAGVAR(export_dynamic_flag_spec, $1)=
+_LT_TAGVAR(hardcode_direct, $1)=no
+_LT_TAGVAR(hardcode_direct_absolute, $1)=no
+_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+_LT_TAGVAR(hardcode_libdir_separator, $1)=
+_LT_TAGVAR(hardcode_minus_L, $1)=no
+_LT_TAGVAR(hardcode_automatic, $1)=no
+_LT_TAGVAR(inherit_rpath, $1)=no
+_LT_TAGVAR(module_cmds, $1)=
+_LT_TAGVAR(module_expsym_cmds, $1)=
+_LT_TAGVAR(link_all_deplibs, $1)=unknown
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+_LT_TAGVAR(no_undefined_flag, $1)=
+_LT_TAGVAR(whole_archive_flag_spec, $1)=
+_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+
+# Source file extension for fc test sources.
+ac_ext=${ac_fc_srcext-f}
+
+# Object file extension for compiled fc test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# No sense in running all these tests if we already determined that
+# the FC compiler isn't working. Some variables (like enable_shared)
+# are currently assumed to apply to all compilers on this platform,
+# and will be corrupted by setting them based on a non-working compiler.
+if test yes != "$_lt_disable_FC"; then
+ # Code to be used in simple compile tests
+ lt_simple_compile_test_code="\
+ subroutine t
+ return
+ end
+"
+
+ # Code to be used in simple link tests
+ lt_simple_link_test_code="\
+ program t
+ end
+"
+
+ # ltmain only uses $CC for tagged configurations so make sure $CC is set.
+ _LT_TAG_COMPILER
+
+ # save warnings/boilerplate of simple test code
+ _LT_COMPILER_BOILERPLATE
+ _LT_LINKER_BOILERPLATE
+
+ # Allow CC to be a program name with arguments.
+ lt_save_CC=$CC
+ lt_save_GCC=$GCC
+ lt_save_CFLAGS=$CFLAGS
+ CC=${FC-"f95"}
+ CFLAGS=$FCFLAGS
+ compiler=$CC
+ GCC=$ac_cv_fc_compiler_gnu
+
+ _LT_TAGVAR(compiler, $1)=$CC
+ _LT_CC_BASENAME([$compiler])
+
+ if test -n "$compiler"; then
+ AC_MSG_CHECKING([if libtool supports shared libraries])
+ AC_MSG_RESULT([$can_build_shared])
+
+ AC_MSG_CHECKING([whether to build shared libraries])
+ test no = "$can_build_shared" && enable_shared=no
+
+ # On AIX, shared libraries and static libraries use the same namespace, and
+ # are all built from PIC.
+ case $host_os in
+ aix3*)
+ test yes = "$enable_shared" && enable_static=no
+ if test -n "$RANLIB"; then
+ archive_cmds="$archive_cmds~\$RANLIB \$lib"
+ postinstall_cmds='$RANLIB $lib'
+ fi
+ ;;
+ aix[[4-9]]*)
+ if test ia64 != "$host_cpu"; then
+ case $enable_shared,$with_aix_soname,$aix_use_runtimelinking in
+ yes,aix,yes) ;; # shared object as lib.so file only
+ yes,svr4,*) ;; # shared object as lib.so archive member only
+ yes,*) enable_static=no ;; # shared object in lib.a archive as well
+ esac
+ fi
+ ;;
+ esac
+ AC_MSG_RESULT([$enable_shared])
+
+ AC_MSG_CHECKING([whether to build static libraries])
+ # Make sure either enable_shared or enable_static is yes.
+ test yes = "$enable_shared" || enable_static=yes
+ AC_MSG_RESULT([$enable_static])
+
+ _LT_TAGVAR(GCC, $1)=$ac_cv_fc_compiler_gnu
+ _LT_TAGVAR(LD, $1)=$LD
+
+ ## CAVEAT EMPTOR:
+ ## There is no encapsulation within the following macros, do not change
+ ## the running order or otherwise move them around unless you know exactly
+ ## what you are doing...
+ _LT_SYS_HIDDEN_LIBDEPS($1)
+ _LT_COMPILER_PIC($1)
+ _LT_COMPILER_C_O($1)
+ _LT_COMPILER_FILE_LOCKS($1)
+ _LT_LINKER_SHLIBS($1)
+ _LT_SYS_DYNAMIC_LINKER($1)
+ _LT_LINKER_HARDCODE_LIBPATH($1)
+
+ _LT_CONFIG($1)
+ fi # test -n "$compiler"
+
+ GCC=$lt_save_GCC
+ CC=$lt_save_CC
+ CFLAGS=$lt_save_CFLAGS
+fi # test yes != "$_lt_disable_FC"
+
+AC_LANG_POP
+])# _LT_LANG_FC_CONFIG
+
+
+# _LT_LANG_GCJ_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for the GNU Java Compiler compiler
+# are suitably defined. These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to 'libtool'.
+m4_defun([_LT_LANG_GCJ_CONFIG],
+[AC_REQUIRE([LT_PROG_GCJ])dnl
+AC_LANG_SAVE
+
+# Source file extension for Java test sources.
+ac_ext=java
+
+# Object file extension for compiled Java test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code="class foo {}"
+
+# Code to be used in simple link tests
+lt_simple_link_test_code='public class conftest { public static void main(String[[]] argv) {}; }'
+
+# ltmain only uses $CC for tagged configurations so make sure $CC is set.
+_LT_TAG_COMPILER
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+# Allow CC to be a program name with arguments.
+lt_save_CC=$CC
+lt_save_CFLAGS=$CFLAGS
+lt_save_GCC=$GCC
+GCC=yes
+CC=${GCJ-"gcj"}
+CFLAGS=$GCJFLAGS
+compiler=$CC
+_LT_TAGVAR(compiler, $1)=$CC
+_LT_TAGVAR(LD, $1)=$LD
+_LT_CC_BASENAME([$compiler])
+
+# GCJ did not exist at the time GCC didn't implicitly link libc in.
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+
+if test -n "$compiler"; then
+ _LT_COMPILER_NO_RTTI($1)
+ _LT_COMPILER_PIC($1)
+ _LT_COMPILER_C_O($1)
+ _LT_COMPILER_FILE_LOCKS($1)
+ _LT_LINKER_SHLIBS($1)
+ _LT_LINKER_HARDCODE_LIBPATH($1)
+
+ _LT_CONFIG($1)
+fi
+
+AC_LANG_RESTORE
+
+GCC=$lt_save_GCC
+CC=$lt_save_CC
+CFLAGS=$lt_save_CFLAGS
+])# _LT_LANG_GCJ_CONFIG
+
+
+# _LT_LANG_GO_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for the GNU Go compiler
+# are suitably defined. These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to 'libtool'.
+m4_defun([_LT_LANG_GO_CONFIG],
+[AC_REQUIRE([LT_PROG_GO])dnl
+AC_LANG_SAVE
+
+# Source file extension for Go test sources.
+ac_ext=go
+
+# Object file extension for compiled Go test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code="package main; func main() { }"
+
+# Code to be used in simple link tests
+lt_simple_link_test_code='package main; func main() { }'
+
+# ltmain only uses $CC for tagged configurations so make sure $CC is set.
+_LT_TAG_COMPILER
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+# Allow CC to be a program name with arguments.
+lt_save_CC=$CC
+lt_save_CFLAGS=$CFLAGS
+lt_save_GCC=$GCC
+GCC=yes
+CC=${GOC-"gccgo"}
+CFLAGS=$GOFLAGS
+compiler=$CC
+_LT_TAGVAR(compiler, $1)=$CC
+_LT_TAGVAR(LD, $1)=$LD
+_LT_CC_BASENAME([$compiler])
+
+# Go did not exist at the time GCC didn't implicitly link libc in.
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+
+if test -n "$compiler"; then
+ _LT_COMPILER_NO_RTTI($1)
+ _LT_COMPILER_PIC($1)
+ _LT_COMPILER_C_O($1)
+ _LT_COMPILER_FILE_LOCKS($1)
+ _LT_LINKER_SHLIBS($1)
+ _LT_LINKER_HARDCODE_LIBPATH($1)
+
+ _LT_CONFIG($1)
+fi
+
+AC_LANG_RESTORE
+
+GCC=$lt_save_GCC
+CC=$lt_save_CC
+CFLAGS=$lt_save_CFLAGS
+])# _LT_LANG_GO_CONFIG
+
+
+# _LT_LANG_RC_CONFIG([TAG])
+# -------------------------
+# Ensure that the configuration variables for the Windows resource compiler
+# are suitably defined. These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to 'libtool'.
+m4_defun([_LT_LANG_RC_CONFIG],
+[AC_REQUIRE([LT_PROG_RC])dnl
+AC_LANG_SAVE
+
+# Source file extension for RC test sources.
+ac_ext=rc
+
+# Object file extension for compiled RC test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code='sample MENU { MENUITEM "&Soup", 100, CHECKED }'
+
+# Code to be used in simple link tests
+lt_simple_link_test_code=$lt_simple_compile_test_code
+
+# ltmain only uses $CC for tagged configurations so make sure $CC is set.
+_LT_TAG_COMPILER
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+# Allow CC to be a program name with arguments.
+lt_save_CC=$CC
+lt_save_CFLAGS=$CFLAGS
+lt_save_GCC=$GCC
+GCC=
+CC=${RC-"windres"}
+CFLAGS=
+compiler=$CC
+_LT_TAGVAR(compiler, $1)=$CC
+_LT_CC_BASENAME([$compiler])
+_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes
+
+if test -n "$compiler"; then
+ :
+ _LT_CONFIG($1)
+fi
+
+GCC=$lt_save_GCC
+AC_LANG_RESTORE
+CC=$lt_save_CC
+CFLAGS=$lt_save_CFLAGS
+])# _LT_LANG_RC_CONFIG
+
+
+# LT_PROG_GCJ
+# -----------
+AC_DEFUN([LT_PROG_GCJ],
+[m4_ifdef([AC_PROG_GCJ], [AC_PROG_GCJ],
+ [m4_ifdef([A][M_PROG_GCJ], [A][M_PROG_GCJ],
+ [AC_CHECK_TOOL(GCJ, gcj,)
+ test set = "${GCJFLAGS+set}" || GCJFLAGS="-g -O2"
+ AC_SUBST(GCJFLAGS)])])[]dnl
+])
+
+# Old name:
+AU_ALIAS([LT_AC_PROG_GCJ], [LT_PROG_GCJ])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([LT_AC_PROG_GCJ], [])
+
+
+# LT_PROG_GO
+# ----------
+AC_DEFUN([LT_PROG_GO],
+[AC_CHECK_TOOL(GOC, gccgo,)
+])
+
+
+# LT_PROG_RC
+# ----------
+AC_DEFUN([LT_PROG_RC],
+[AC_CHECK_TOOL(RC, windres,)
+])
+
+# Old name:
+AU_ALIAS([LT_AC_PROG_RC], [LT_PROG_RC])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([LT_AC_PROG_RC], [])
+
+
+# _LT_DECL_EGREP
+# --------------
+# If we don't have a new enough Autoconf to choose the best grep
+# available, choose the one first in the user's PATH.
+m4_defun([_LT_DECL_EGREP],
+[AC_REQUIRE([AC_PROG_EGREP])dnl
+AC_REQUIRE([AC_PROG_FGREP])dnl
+test -z "$GREP" && GREP=grep
+_LT_DECL([], [GREP], [1], [A grep program that handles long lines])
+_LT_DECL([], [EGREP], [1], [An ERE matcher])
+_LT_DECL([], [FGREP], [1], [A literal string matcher])
+dnl Non-bleeding-edge autoconf doesn't subst GREP, so do it here too
+AC_SUBST([GREP])
+])
+
+
+# _LT_DECL_OBJDUMP
+# --------------
+# If we don't have a new enough Autoconf to choose the best objdump
+# available, choose the one first in the user's PATH.
+m4_defun([_LT_DECL_OBJDUMP],
+[AC_CHECK_TOOL(OBJDUMP, objdump, false)
+test -z "$OBJDUMP" && OBJDUMP=objdump
+_LT_DECL([], [OBJDUMP], [1], [An object symbol dumper])
+AC_SUBST([OBJDUMP])
+])
+
+# _LT_DECL_DLLTOOL
+# ----------------
+# Ensure DLLTOOL variable is set.
+m4_defun([_LT_DECL_DLLTOOL],
+[AC_CHECK_TOOL(DLLTOOL, dlltool, false)
+test -z "$DLLTOOL" && DLLTOOL=dlltool
+_LT_DECL([], [DLLTOOL], [1], [DLL creation program])
+AC_SUBST([DLLTOOL])
+])
+
+# _LT_DECL_SED
+# ------------
+# Check for a fully-functional sed program, that truncates
+# as few characters as possible. Prefer GNU sed if found.
+m4_defun([_LT_DECL_SED],
+[AC_PROG_SED
+test -z "$SED" && SED=sed
+Xsed="$SED -e 1s/^X//"
+_LT_DECL([], [SED], [1], [A sed program that does not truncate output])
+_LT_DECL([], [Xsed], ["\$SED -e 1s/^X//"],
+ [Sed that helps us avoid accidentally triggering echo(1) options like -n])
+])# _LT_DECL_SED
+
+m4_ifndef([AC_PROG_SED], [
+# NOTE: This macro has been submitted for inclusion into #
+# GNU Autoconf as AC_PROG_SED. When it is available in #
+# a released version of Autoconf we should remove this #
+# macro and use it instead. #
+
+m4_defun([AC_PROG_SED],
+[AC_MSG_CHECKING([for a sed that does not truncate output])
+AC_CACHE_VAL(lt_cv_path_SED,
+[# Loop through the user's path and test for sed and gsed.
+# Then use that list of sed's as ones to test for truncation.
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for lt_ac_prog in sed gsed; do
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if $as_executable_p "$as_dir/$lt_ac_prog$ac_exec_ext"; then
+ lt_ac_sed_list="$lt_ac_sed_list $as_dir/$lt_ac_prog$ac_exec_ext"
+ fi
+ done
+ done
+done
+IFS=$as_save_IFS
+lt_ac_max=0
+lt_ac_count=0
+# Add /usr/xpg4/bin/sed as it is typically found on Solaris
+# along with /bin/sed that truncates output.
+for lt_ac_sed in $lt_ac_sed_list /usr/xpg4/bin/sed; do
+ test ! -f "$lt_ac_sed" && continue
+ cat /dev/null > conftest.in
+ lt_ac_count=0
+ echo $ECHO_N "0123456789$ECHO_C" >conftest.in
+ # Check for GNU sed and select it if it is found.
+ if "$lt_ac_sed" --version 2>&1 < /dev/null | grep 'GNU' > /dev/null; then
+ lt_cv_path_SED=$lt_ac_sed
+ break
+ fi
+ while true; do
+ cat conftest.in conftest.in >conftest.tmp
+ mv conftest.tmp conftest.in
+ cp conftest.in conftest.nl
+ echo >>conftest.nl
+ $lt_ac_sed -e 's/a$//' < conftest.nl >conftest.out || break
+ cmp -s conftest.out conftest.nl || break
+ # 10000 chars as input seems more than enough
+ test 10 -lt "$lt_ac_count" && break
+ lt_ac_count=`expr $lt_ac_count + 1`
+ if test "$lt_ac_count" -gt "$lt_ac_max"; then
+ lt_ac_max=$lt_ac_count
+ lt_cv_path_SED=$lt_ac_sed
+ fi
+ done
+done
+])
+SED=$lt_cv_path_SED
+AC_SUBST([SED])
+AC_MSG_RESULT([$SED])
+])#AC_PROG_SED
+])#m4_ifndef
+
+# Old name:
+AU_ALIAS([LT_AC_PROG_SED], [AC_PROG_SED])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([LT_AC_PROG_SED], [])
+
+
+# _LT_CHECK_SHELL_FEATURES
+# ------------------------
+# Find out whether the shell is Bourne or XSI compatible,
+# or has some other useful features.
+m4_defun([_LT_CHECK_SHELL_FEATURES],
+[if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then
+ lt_unset=unset
+else
+ lt_unset=false
+fi
+_LT_DECL([], [lt_unset], [0], [whether the shell understands "unset"])dnl
+
+# test EBCDIC or ASCII
+case `echo X|tr X '\101'` in
+ A) # ASCII based system
+ # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr
+ lt_SP2NL='tr \040 \012'
+ lt_NL2SP='tr \015\012 \040\040'
+ ;;
+ *) # EBCDIC based system
+ lt_SP2NL='tr \100 \n'
+ lt_NL2SP='tr \r\n \100\100'
+ ;;
+esac
+_LT_DECL([SP2NL], [lt_SP2NL], [1], [turn spaces into newlines])dnl
+_LT_DECL([NL2SP], [lt_NL2SP], [1], [turn newlines into spaces])dnl
+])# _LT_CHECK_SHELL_FEATURES
+
+
+# _LT_PATH_CONVERSION_FUNCTIONS
+# -----------------------------
+# Determine what file name conversion functions should be used by
+# func_to_host_file (and, implicitly, by func_to_host_path). These are needed
+# for certain cross-compile configurations and native mingw.
+m4_defun([_LT_PATH_CONVERSION_FUNCTIONS],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_CANONICAL_BUILD])dnl
+AC_MSG_CHECKING([how to convert $build file names to $host format])
+AC_CACHE_VAL(lt_cv_to_host_file_cmd,
+[case $host in
+ *-*-mingw* )
+ case $build in
+ *-*-mingw* ) # actually msys
+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
+ ;;
+ *-*-cygwin* )
+ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
+ ;;
+ * ) # otherwise, assume *nix
+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
+ ;;
+ esac
+ ;;
+ *-*-cygwin* )
+ case $build in
+ *-*-mingw* ) # actually msys
+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
+ ;;
+ *-*-cygwin* )
+ lt_cv_to_host_file_cmd=func_convert_file_noop
+ ;;
+ * ) # otherwise, assume *nix
+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
+ ;;
+ esac
+ ;;
+ * ) # unhandled hosts (and "normal" native builds)
+ lt_cv_to_host_file_cmd=func_convert_file_noop
+ ;;
+esac
+])
+to_host_file_cmd=$lt_cv_to_host_file_cmd
+AC_MSG_RESULT([$lt_cv_to_host_file_cmd])
+_LT_DECL([to_host_file_cmd], [lt_cv_to_host_file_cmd],
+ [0], [convert $build file names to $host format])dnl
+
+AC_MSG_CHECKING([how to convert $build file names to toolchain format])
+AC_CACHE_VAL(lt_cv_to_tool_file_cmd,
+[#assume ordinary cross tools, or native build.
+lt_cv_to_tool_file_cmd=func_convert_file_noop
+case $host in
+ *-*-mingw* )
+ case $build in
+ *-*-mingw* ) # actually msys
+ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
+ ;;
+ esac
+ ;;
+esac
+])
+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
+AC_MSG_RESULT([$lt_cv_to_tool_file_cmd])
+_LT_DECL([to_tool_file_cmd], [lt_cv_to_tool_file_cmd],
+ [0], [convert $build files to toolchain format])dnl
+])# _LT_PATH_CONVERSION_FUNCTIONS
+
+# Helper functions for option handling. -*- Autoconf -*-
+#
+# Copyright (C) 2004-2005, 2007-2009, 2011-2015 Free Software
+# Foundation, Inc.
+# Written by Gary V. Vaughan, 2004
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# serial 8 ltoptions.m4
+
+# This is to help aclocal find these macros, as it can't see m4_define.
+AC_DEFUN([LTOPTIONS_VERSION], [m4_if([1])])
+
+
+# _LT_MANGLE_OPTION(MACRO-NAME, OPTION-NAME)
+# ------------------------------------------
+m4_define([_LT_MANGLE_OPTION],
+[[_LT_OPTION_]m4_bpatsubst($1__$2, [[^a-zA-Z0-9_]], [_])])
+
+
+# _LT_SET_OPTION(MACRO-NAME, OPTION-NAME)
+# ---------------------------------------
+# Set option OPTION-NAME for macro MACRO-NAME, and if there is a
+# matching handler defined, dispatch to it. Other OPTION-NAMEs are
+# saved as a flag.
+m4_define([_LT_SET_OPTION],
+[m4_define(_LT_MANGLE_OPTION([$1], [$2]))dnl
+m4_ifdef(_LT_MANGLE_DEFUN([$1], [$2]),
+ _LT_MANGLE_DEFUN([$1], [$2]),
+ [m4_warning([Unknown $1 option '$2'])])[]dnl
+])
+
+
+# _LT_IF_OPTION(MACRO-NAME, OPTION-NAME, IF-SET, [IF-NOT-SET])
+# ------------------------------------------------------------
+# Execute IF-SET if OPTION is set, IF-NOT-SET otherwise.
+m4_define([_LT_IF_OPTION],
+[m4_ifdef(_LT_MANGLE_OPTION([$1], [$2]), [$3], [$4])])
+
+
+# _LT_UNLESS_OPTIONS(MACRO-NAME, OPTION-LIST, IF-NOT-SET)
+# -------------------------------------------------------
+# Execute IF-NOT-SET unless all options in OPTION-LIST for MACRO-NAME
+# are set.
+m4_define([_LT_UNLESS_OPTIONS],
+[m4_foreach([_LT_Option], m4_split(m4_normalize([$2])),
+ [m4_ifdef(_LT_MANGLE_OPTION([$1], _LT_Option),
+ [m4_define([$0_found])])])[]dnl
+m4_ifdef([$0_found], [m4_undefine([$0_found])], [$3
+])[]dnl
+])
+
+
+# _LT_SET_OPTIONS(MACRO-NAME, OPTION-LIST)
+# ----------------------------------------
+# OPTION-LIST is a space-separated list of Libtool options associated
+# with MACRO-NAME. If any OPTION has a matching handler declared with
+# LT_OPTION_DEFINE, dispatch to that macro; otherwise complain about
+# the unknown option and exit.
+m4_defun([_LT_SET_OPTIONS],
+[# Set options
+m4_foreach([_LT_Option], m4_split(m4_normalize([$2])),
+ [_LT_SET_OPTION([$1], _LT_Option)])
+
+m4_if([$1],[LT_INIT],[
+ dnl
+ dnl Simply set some default values (i.e off) if boolean options were not
+ dnl specified:
+ _LT_UNLESS_OPTIONS([LT_INIT], [dlopen], [enable_dlopen=no
+ ])
+ _LT_UNLESS_OPTIONS([LT_INIT], [win32-dll], [enable_win32_dll=no
+ ])
+ dnl
+ dnl If no reference was made to various pairs of opposing options, then
+ dnl we run the default mode handler for the pair. For example, if neither
+ dnl 'shared' nor 'disable-shared' was passed, we enable building of shared
+ dnl archives by default:
+ _LT_UNLESS_OPTIONS([LT_INIT], [shared disable-shared], [_LT_ENABLE_SHARED])
+ _LT_UNLESS_OPTIONS([LT_INIT], [static disable-static], [_LT_ENABLE_STATIC])
+ _LT_UNLESS_OPTIONS([LT_INIT], [pic-only no-pic], [_LT_WITH_PIC])
+ _LT_UNLESS_OPTIONS([LT_INIT], [fast-install disable-fast-install],
+ [_LT_ENABLE_FAST_INSTALL])
+ _LT_UNLESS_OPTIONS([LT_INIT], [aix-soname=aix aix-soname=both aix-soname=svr4],
+ [_LT_WITH_AIX_SONAME([aix])])
+ ])
+])# _LT_SET_OPTIONS
+
+
+
+# _LT_MANGLE_DEFUN(MACRO-NAME, OPTION-NAME)
+# -----------------------------------------
+m4_define([_LT_MANGLE_DEFUN],
+[[_LT_OPTION_DEFUN_]m4_bpatsubst(m4_toupper([$1__$2]), [[^A-Z0-9_]], [_])])
+
+
+# LT_OPTION_DEFINE(MACRO-NAME, OPTION-NAME, CODE)
+# -----------------------------------------------
+m4_define([LT_OPTION_DEFINE],
+[m4_define(_LT_MANGLE_DEFUN([$1], [$2]), [$3])[]dnl
+])# LT_OPTION_DEFINE
+
+
+# dlopen
+# ------
+LT_OPTION_DEFINE([LT_INIT], [dlopen], [enable_dlopen=yes
+])
+
+AU_DEFUN([AC_LIBTOOL_DLOPEN],
+[_LT_SET_OPTION([LT_INIT], [dlopen])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you
+put the 'dlopen' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_DLOPEN], [])
+
+
+# win32-dll
+# ---------
+# Declare package support for building win32 dll's.
+LT_OPTION_DEFINE([LT_INIT], [win32-dll],
+[enable_win32_dll=yes
+
+case $host in
+*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-cegcc*)
+ AC_CHECK_TOOL(AS, as, false)
+ AC_CHECK_TOOL(DLLTOOL, dlltool, false)
+ AC_CHECK_TOOL(OBJDUMP, objdump, false)
+ ;;
+esac
+
+test -z "$AS" && AS=as
+_LT_DECL([], [AS], [1], [Assembler program])dnl
+
+test -z "$DLLTOOL" && DLLTOOL=dlltool
+_LT_DECL([], [DLLTOOL], [1], [DLL creation program])dnl
+
+test -z "$OBJDUMP" && OBJDUMP=objdump
+_LT_DECL([], [OBJDUMP], [1], [Object dumper program])dnl
+])# win32-dll
+
+AU_DEFUN([AC_LIBTOOL_WIN32_DLL],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+_LT_SET_OPTION([LT_INIT], [win32-dll])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you
+put the 'win32-dll' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_WIN32_DLL], [])
+
+
+# _LT_ENABLE_SHARED([DEFAULT])
+# ----------------------------
+# implement the --enable-shared flag, and supports the 'shared' and
+# 'disable-shared' LT_INIT options.
+# DEFAULT is either 'yes' or 'no'. If omitted, it defaults to 'yes'.
+m4_define([_LT_ENABLE_SHARED],
+[m4_define([_LT_ENABLE_SHARED_DEFAULT], [m4_if($1, no, no, yes)])dnl
+AC_ARG_ENABLE([shared],
+ [AS_HELP_STRING([--enable-shared@<:@=PKGS@:>@],
+ [build shared libraries @<:@default=]_LT_ENABLE_SHARED_DEFAULT[@:>@])],
+ [p=${PACKAGE-default}
+ case $enableval in
+ yes) enable_shared=yes ;;
+ no) enable_shared=no ;;
+ *)
+ enable_shared=no
+ # Look at the argument we got. We use all the common list separators.
+ lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR,
+ for pkg in $enableval; do
+ IFS=$lt_save_ifs
+ if test "X$pkg" = "X$p"; then
+ enable_shared=yes
+ fi
+ done
+ IFS=$lt_save_ifs
+ ;;
+ esac],
+ [enable_shared=]_LT_ENABLE_SHARED_DEFAULT)
+
+ _LT_DECL([build_libtool_libs], [enable_shared], [0],
+ [Whether or not to build shared libraries])
+])# _LT_ENABLE_SHARED
+
+LT_OPTION_DEFINE([LT_INIT], [shared], [_LT_ENABLE_SHARED([yes])])
+LT_OPTION_DEFINE([LT_INIT], [disable-shared], [_LT_ENABLE_SHARED([no])])
+
+# Old names:
+AC_DEFUN([AC_ENABLE_SHARED],
+[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[shared])
+])
+
+AC_DEFUN([AC_DISABLE_SHARED],
+[_LT_SET_OPTION([LT_INIT], [disable-shared])
+])
+
+AU_DEFUN([AM_ENABLE_SHARED], [AC_ENABLE_SHARED($@)])
+AU_DEFUN([AM_DISABLE_SHARED], [AC_DISABLE_SHARED($@)])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_ENABLE_SHARED], [])
+dnl AC_DEFUN([AM_DISABLE_SHARED], [])
+
+
+
+# _LT_ENABLE_STATIC([DEFAULT])
+# ----------------------------
+# implement the --enable-static flag, and support the 'static' and
+# 'disable-static' LT_INIT options.
+# DEFAULT is either 'yes' or 'no'. If omitted, it defaults to 'yes'.
+m4_define([_LT_ENABLE_STATIC],
+[m4_define([_LT_ENABLE_STATIC_DEFAULT], [m4_if($1, no, no, yes)])dnl
+AC_ARG_ENABLE([static],
+ [AS_HELP_STRING([--enable-static@<:@=PKGS@:>@],
+ [build static libraries @<:@default=]_LT_ENABLE_STATIC_DEFAULT[@:>@])],
+ [p=${PACKAGE-default}
+ case $enableval in
+ yes) enable_static=yes ;;
+ no) enable_static=no ;;
+ *)
+ enable_static=no
+ # Look at the argument we got. We use all the common list separators.
+ lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR,
+ for pkg in $enableval; do
+ IFS=$lt_save_ifs
+ if test "X$pkg" = "X$p"; then
+ enable_static=yes
+ fi
+ done
+ IFS=$lt_save_ifs
+ ;;
+ esac],
+ [enable_static=]_LT_ENABLE_STATIC_DEFAULT)
+
+ _LT_DECL([build_old_libs], [enable_static], [0],
+ [Whether or not to build static libraries])
+])# _LT_ENABLE_STATIC
+
+LT_OPTION_DEFINE([LT_INIT], [static], [_LT_ENABLE_STATIC([yes])])
+LT_OPTION_DEFINE([LT_INIT], [disable-static], [_LT_ENABLE_STATIC([no])])
+
+# Old names:
+AC_DEFUN([AC_ENABLE_STATIC],
+[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[static])
+])
+
+AC_DEFUN([AC_DISABLE_STATIC],
+[_LT_SET_OPTION([LT_INIT], [disable-static])
+])
+
+AU_DEFUN([AM_ENABLE_STATIC], [AC_ENABLE_STATIC($@)])
+AU_DEFUN([AM_DISABLE_STATIC], [AC_DISABLE_STATIC($@)])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_ENABLE_STATIC], [])
+dnl AC_DEFUN([AM_DISABLE_STATIC], [])
+
+
+
+# _LT_ENABLE_FAST_INSTALL([DEFAULT])
+# ----------------------------------
+# implement the --enable-fast-install flag, and support the 'fast-install'
+# and 'disable-fast-install' LT_INIT options.
+# DEFAULT is either 'yes' or 'no'. If omitted, it defaults to 'yes'.
+m4_define([_LT_ENABLE_FAST_INSTALL],
+[m4_define([_LT_ENABLE_FAST_INSTALL_DEFAULT], [m4_if($1, no, no, yes)])dnl
+AC_ARG_ENABLE([fast-install],
+ [AS_HELP_STRING([--enable-fast-install@<:@=PKGS@:>@],
+ [optimize for fast installation @<:@default=]_LT_ENABLE_FAST_INSTALL_DEFAULT[@:>@])],
+ [p=${PACKAGE-default}
+ case $enableval in
+ yes) enable_fast_install=yes ;;
+ no) enable_fast_install=no ;;
+ *)
+ enable_fast_install=no
+ # Look at the argument we got. We use all the common list separators.
+ lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR,
+ for pkg in $enableval; do
+ IFS=$lt_save_ifs
+ if test "X$pkg" = "X$p"; then
+ enable_fast_install=yes
+ fi
+ done
+ IFS=$lt_save_ifs
+ ;;
+ esac],
+ [enable_fast_install=]_LT_ENABLE_FAST_INSTALL_DEFAULT)
+
+_LT_DECL([fast_install], [enable_fast_install], [0],
+ [Whether or not to optimize for fast installation])dnl
+])# _LT_ENABLE_FAST_INSTALL
+
+LT_OPTION_DEFINE([LT_INIT], [fast-install], [_LT_ENABLE_FAST_INSTALL([yes])])
+LT_OPTION_DEFINE([LT_INIT], [disable-fast-install], [_LT_ENABLE_FAST_INSTALL([no])])
+
+# Old names:
+AU_DEFUN([AC_ENABLE_FAST_INSTALL],
+[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[fast-install])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you put
+the 'fast-install' option into LT_INIT's first parameter.])
+])
+
+AU_DEFUN([AC_DISABLE_FAST_INSTALL],
+[_LT_SET_OPTION([LT_INIT], [disable-fast-install])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you put
+the 'disable-fast-install' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_ENABLE_FAST_INSTALL], [])
+dnl AC_DEFUN([AM_DISABLE_FAST_INSTALL], [])
+
+
+# _LT_WITH_AIX_SONAME([DEFAULT])
+# ----------------------------------
+# implement the --with-aix-soname flag, and support the `aix-soname=aix'
+# and `aix-soname=both' and `aix-soname=svr4' LT_INIT options. DEFAULT
+# is either `aix', `both' or `svr4'. If omitted, it defaults to `aix'.
+m4_define([_LT_WITH_AIX_SONAME],
+[m4_define([_LT_WITH_AIX_SONAME_DEFAULT], [m4_if($1, svr4, svr4, m4_if($1, both, both, aix))])dnl
+shared_archive_member_spec=
+case $host,$enable_shared in
+power*-*-aix[[5-9]]*,yes)
+ AC_MSG_CHECKING([which variant of shared library versioning to provide])
+ AC_ARG_WITH([aix-soname],
+ [AS_HELP_STRING([--with-aix-soname=aix|svr4|both],
+ [shared library versioning (aka "SONAME") variant to provide on AIX, @<:@default=]_LT_WITH_AIX_SONAME_DEFAULT[@:>@.])],
+ [case $withval in
+ aix|svr4|both)
+ ;;
+ *)
+ AC_MSG_ERROR([Unknown argument to --with-aix-soname])
+ ;;
+ esac
+ lt_cv_with_aix_soname=$with_aix_soname],
+ [AC_CACHE_VAL([lt_cv_with_aix_soname],
+ [lt_cv_with_aix_soname=]_LT_WITH_AIX_SONAME_DEFAULT)
+ with_aix_soname=$lt_cv_with_aix_soname])
+ AC_MSG_RESULT([$with_aix_soname])
+ if test aix != "$with_aix_soname"; then
+ # For the AIX way of multilib, we name the shared archive member
+ # based on the bitwidth used, traditionally 'shr.o' or 'shr_64.o',
+ # and 'shr.imp' or 'shr_64.imp', respectively, for the Import File.
+ # Even when GNU compilers ignore OBJECT_MODE but need '-maix64' flag,
+ # the AIX toolchain works better with OBJECT_MODE set (default 32).
+ if test 64 = "${OBJECT_MODE-32}"; then
+ shared_archive_member_spec=shr_64
+ else
+ shared_archive_member_spec=shr
+ fi
+ fi
+ ;;
+*)
+ with_aix_soname=aix
+ ;;
+esac
+
+_LT_DECL([], [shared_archive_member_spec], [0],
+ [Shared archive member basename, for filename based shared library versioning on AIX])dnl
+])# _LT_WITH_AIX_SONAME
+
+LT_OPTION_DEFINE([LT_INIT], [aix-soname=aix], [_LT_WITH_AIX_SONAME([aix])])
+LT_OPTION_DEFINE([LT_INIT], [aix-soname=both], [_LT_WITH_AIX_SONAME([both])])
+LT_OPTION_DEFINE([LT_INIT], [aix-soname=svr4], [_LT_WITH_AIX_SONAME([svr4])])
+
+
+# _LT_WITH_PIC([MODE])
+# --------------------
+# implement the --with-pic flag, and support the 'pic-only' and 'no-pic'
+# LT_INIT options.
+# MODE is either 'yes' or 'no'. If omitted, it defaults to 'both'.
+m4_define([_LT_WITH_PIC],
+[AC_ARG_WITH([pic],
+ [AS_HELP_STRING([--with-pic@<:@=PKGS@:>@],
+ [try to use only PIC/non-PIC objects @<:@default=use both@:>@])],
+ [lt_p=${PACKAGE-default}
+ case $withval in
+ yes|no) pic_mode=$withval ;;
+ *)
+ pic_mode=default
+ # Look at the argument we got. We use all the common list separators.
+ lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR,
+ for lt_pkg in $withval; do
+ IFS=$lt_save_ifs
+ if test "X$lt_pkg" = "X$lt_p"; then
+ pic_mode=yes
+ fi
+ done
+ IFS=$lt_save_ifs
+ ;;
+ esac],
+ [pic_mode=m4_default([$1], [default])])
+
+_LT_DECL([], [pic_mode], [0], [What type of objects to build])dnl
+])# _LT_WITH_PIC
+
+LT_OPTION_DEFINE([LT_INIT], [pic-only], [_LT_WITH_PIC([yes])])
+LT_OPTION_DEFINE([LT_INIT], [no-pic], [_LT_WITH_PIC([no])])
+
+# Old name:
+AU_DEFUN([AC_LIBTOOL_PICMODE],
+[_LT_SET_OPTION([LT_INIT], [pic-only])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you
+put the 'pic-only' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_PICMODE], [])
+
+
+m4_define([_LTDL_MODE], [])
+LT_OPTION_DEFINE([LTDL_INIT], [nonrecursive],
+ [m4_define([_LTDL_MODE], [nonrecursive])])
+LT_OPTION_DEFINE([LTDL_INIT], [recursive],
+ [m4_define([_LTDL_MODE], [recursive])])
+LT_OPTION_DEFINE([LTDL_INIT], [subproject],
+ [m4_define([_LTDL_MODE], [subproject])])
+
+m4_define([_LTDL_TYPE], [])
+LT_OPTION_DEFINE([LTDL_INIT], [installable],
+ [m4_define([_LTDL_TYPE], [installable])])
+LT_OPTION_DEFINE([LTDL_INIT], [convenience],
+ [m4_define([_LTDL_TYPE], [convenience])])
+
+# ltsugar.m4 -- libtool m4 base layer. -*-Autoconf-*-
+#
+# Copyright (C) 2004-2005, 2007-2008, 2011-2015 Free Software
+# Foundation, Inc.
+# Written by Gary V. Vaughan, 2004
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# serial 6 ltsugar.m4
+
+# This is to help aclocal find these macros, as it can't see m4_define.
+AC_DEFUN([LTSUGAR_VERSION], [m4_if([0.1])])
+
+
+# lt_join(SEP, ARG1, [ARG2...])
+# -----------------------------
+# Produce ARG1SEPARG2...SEPARGn, omitting [] arguments and their
+# associated separator.
+# Needed until we can rely on m4_join from Autoconf 2.62, since all earlier
+# versions in m4sugar had bugs.
+m4_define([lt_join],
+[m4_if([$#], [1], [],
+ [$#], [2], [[$2]],
+ [m4_if([$2], [], [], [[$2]_])$0([$1], m4_shift(m4_shift($@)))])])
+m4_define([_lt_join],
+[m4_if([$#$2], [2], [],
+ [m4_if([$2], [], [], [[$1$2]])$0([$1], m4_shift(m4_shift($@)))])])
+
+
+# lt_car(LIST)
+# lt_cdr(LIST)
+# ------------
+# Manipulate m4 lists.
+# These macros are necessary as long as will still need to support
+# Autoconf-2.59, which quotes differently.
+m4_define([lt_car], [[$1]])
+m4_define([lt_cdr],
+[m4_if([$#], 0, [m4_fatal([$0: cannot be called without arguments])],
+ [$#], 1, [],
+ [m4_dquote(m4_shift($@))])])
+m4_define([lt_unquote], $1)
+
+
+# lt_append(MACRO-NAME, STRING, [SEPARATOR])
+# ------------------------------------------
+# Redefine MACRO-NAME to hold its former content plus 'SEPARATOR''STRING'.
+# Note that neither SEPARATOR nor STRING are expanded; they are appended
+# to MACRO-NAME as is (leaving the expansion for when MACRO-NAME is invoked).
+# No SEPARATOR is output if MACRO-NAME was previously undefined (different
+# than defined and empty).
+#
+# This macro is needed until we can rely on Autoconf 2.62, since earlier
+# versions of m4sugar mistakenly expanded SEPARATOR but not STRING.
+m4_define([lt_append],
+[m4_define([$1],
+ m4_ifdef([$1], [m4_defn([$1])[$3]])[$2])])
+
+
+
+# lt_combine(SEP, PREFIX-LIST, INFIX, SUFFIX1, [SUFFIX2...])
+# ----------------------------------------------------------
+# Produce a SEP delimited list of all paired combinations of elements of
+# PREFIX-LIST with SUFFIX1 through SUFFIXn. Each element of the list
+# has the form PREFIXmINFIXSUFFIXn.
+# Needed until we can rely on m4_combine added in Autoconf 2.62.
+m4_define([lt_combine],
+[m4_if(m4_eval([$# > 3]), [1],
+ [m4_pushdef([_Lt_sep], [m4_define([_Lt_sep], m4_defn([lt_car]))])]]dnl
+[[m4_foreach([_Lt_prefix], [$2],
+ [m4_foreach([_Lt_suffix],
+ ]m4_dquote(m4_dquote(m4_shift(m4_shift(m4_shift($@)))))[,
+ [_Lt_sep([$1])[]m4_defn([_Lt_prefix])[$3]m4_defn([_Lt_suffix])])])])])
+
+
+# lt_if_append_uniq(MACRO-NAME, VARNAME, [SEPARATOR], [UNIQ], [NOT-UNIQ])
+# -----------------------------------------------------------------------
+# Iff MACRO-NAME does not yet contain VARNAME, then append it (delimited
+# by SEPARATOR if supplied) and expand UNIQ, else NOT-UNIQ.
+m4_define([lt_if_append_uniq],
+[m4_ifdef([$1],
+ [m4_if(m4_index([$3]m4_defn([$1])[$3], [$3$2$3]), [-1],
+ [lt_append([$1], [$2], [$3])$4],
+ [$5])],
+ [lt_append([$1], [$2], [$3])$4])])
+
+
+# lt_dict_add(DICT, KEY, VALUE)
+# -----------------------------
+m4_define([lt_dict_add],
+[m4_define([$1($2)], [$3])])
+
+
+# lt_dict_add_subkey(DICT, KEY, SUBKEY, VALUE)
+# --------------------------------------------
+m4_define([lt_dict_add_subkey],
+[m4_define([$1($2:$3)], [$4])])
+
+
+# lt_dict_fetch(DICT, KEY, [SUBKEY])
+# ----------------------------------
+m4_define([lt_dict_fetch],
+[m4_ifval([$3],
+ m4_ifdef([$1($2:$3)], [m4_defn([$1($2:$3)])]),
+ m4_ifdef([$1($2)], [m4_defn([$1($2)])]))])
+
+
+# lt_if_dict_fetch(DICT, KEY, [SUBKEY], VALUE, IF-TRUE, [IF-FALSE])
+# -----------------------------------------------------------------
+m4_define([lt_if_dict_fetch],
+[m4_if(lt_dict_fetch([$1], [$2], [$3]), [$4],
+ [$5],
+ [$6])])
+
+
+# lt_dict_filter(DICT, [SUBKEY], VALUE, [SEPARATOR], KEY, [...])
+# --------------------------------------------------------------
+m4_define([lt_dict_filter],
+[m4_if([$5], [], [],
+ [lt_join(m4_quote(m4_default([$4], [[, ]])),
+ lt_unquote(m4_split(m4_normalize(m4_foreach(_Lt_key, lt_car([m4_shiftn(4, $@)]),
+ [lt_if_dict_fetch([$1], _Lt_key, [$2], [$3], [_Lt_key ])])))))])[]dnl
+])
+
+# ltversion.m4 -- version numbers -*- Autoconf -*-
+#
+# Copyright (C) 2004, 2011-2015 Free Software Foundation, Inc.
+# Written by Scott James Remnant, 2004
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# @configure_input@
+
+# serial 4179 ltversion.m4
+# This file is part of GNU Libtool
+
+m4_define([LT_PACKAGE_VERSION], [2.4.6])
+m4_define([LT_PACKAGE_REVISION], [2.4.6])
+
+AC_DEFUN([LTVERSION_VERSION],
+[macro_version='2.4.6'
+macro_revision='2.4.6'
+_LT_DECL(, macro_version, 0, [Which release of libtool.m4 was used?])
+_LT_DECL(, macro_revision, 0)
+])
+
+# lt~obsolete.m4 -- aclocal satisfying obsolete definitions. -*-Autoconf-*-
+#
+# Copyright (C) 2004-2005, 2007, 2009, 2011-2015 Free Software
+# Foundation, Inc.
+# Written by Scott James Remnant, 2004.
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# serial 5 lt~obsolete.m4
+
+# These exist entirely to fool aclocal when bootstrapping libtool.
+#
+# In the past libtool.m4 has provided macros via AC_DEFUN (or AU_DEFUN),
+# which have later been changed to m4_define as they aren't part of the
+# exported API, or moved to Autoconf or Automake where they belong.
+#
+# The trouble is, aclocal is a bit thick. It'll see the old AC_DEFUN
+# in /usr/share/aclocal/libtool.m4 and remember it, then when it sees us
+# using a macro with the same name in our local m4/libtool.m4 it'll
+# pull the old libtool.m4 in (it doesn't see our shiny new m4_define
+# and doesn't know about Autoconf macros at all.)
+#
+# So we provide this file, which has a silly filename so it's always
+# included after everything else. This provides aclocal with the
+# AC_DEFUNs it wants, but when m4 processes it, it doesn't do anything
+# because those macros already exist, or will be overwritten later.
+# We use AC_DEFUN over AU_DEFUN for compatibility with aclocal-1.6.
+#
+# Anytime we withdraw an AC_DEFUN or AU_DEFUN, remember to add it here.
+# Yes, that means every name once taken will need to remain here until
+# we give up compatibility with versions before 1.7, at which point
+# we need to keep only those names which we still refer to.
+
+# This is to help aclocal find these macros, as it can't see m4_define.
+AC_DEFUN([LTOBSOLETE_VERSION], [m4_if([1])])
+
+m4_ifndef([AC_LIBTOOL_LINKER_OPTION], [AC_DEFUN([AC_LIBTOOL_LINKER_OPTION])])
+m4_ifndef([AC_PROG_EGREP], [AC_DEFUN([AC_PROG_EGREP])])
+m4_ifndef([_LT_AC_PROG_ECHO_BACKSLASH], [AC_DEFUN([_LT_AC_PROG_ECHO_BACKSLASH])])
+m4_ifndef([_LT_AC_SHELL_INIT], [AC_DEFUN([_LT_AC_SHELL_INIT])])
+m4_ifndef([_LT_AC_SYS_LIBPATH_AIX], [AC_DEFUN([_LT_AC_SYS_LIBPATH_AIX])])
+m4_ifndef([_LT_PROG_LTMAIN], [AC_DEFUN([_LT_PROG_LTMAIN])])
+m4_ifndef([_LT_AC_TAGVAR], [AC_DEFUN([_LT_AC_TAGVAR])])
+m4_ifndef([AC_LTDL_ENABLE_INSTALL], [AC_DEFUN([AC_LTDL_ENABLE_INSTALL])])
+m4_ifndef([AC_LTDL_PREOPEN], [AC_DEFUN([AC_LTDL_PREOPEN])])
+m4_ifndef([_LT_AC_SYS_COMPILER], [AC_DEFUN([_LT_AC_SYS_COMPILER])])
+m4_ifndef([_LT_AC_LOCK], [AC_DEFUN([_LT_AC_LOCK])])
+m4_ifndef([AC_LIBTOOL_SYS_OLD_ARCHIVE], [AC_DEFUN([AC_LIBTOOL_SYS_OLD_ARCHIVE])])
+m4_ifndef([_LT_AC_TRY_DLOPEN_SELF], [AC_DEFUN([_LT_AC_TRY_DLOPEN_SELF])])
+m4_ifndef([AC_LIBTOOL_PROG_CC_C_O], [AC_DEFUN([AC_LIBTOOL_PROG_CC_C_O])])
+m4_ifndef([AC_LIBTOOL_SYS_HARD_LINK_LOCKS], [AC_DEFUN([AC_LIBTOOL_SYS_HARD_LINK_LOCKS])])
+m4_ifndef([AC_LIBTOOL_OBJDIR], [AC_DEFUN([AC_LIBTOOL_OBJDIR])])
+m4_ifndef([AC_LTDL_OBJDIR], [AC_DEFUN([AC_LTDL_OBJDIR])])
+m4_ifndef([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH], [AC_DEFUN([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH])])
+m4_ifndef([AC_LIBTOOL_SYS_LIB_STRIP], [AC_DEFUN([AC_LIBTOOL_SYS_LIB_STRIP])])
+m4_ifndef([AC_PATH_MAGIC], [AC_DEFUN([AC_PATH_MAGIC])])
+m4_ifndef([AC_PROG_LD_GNU], [AC_DEFUN([AC_PROG_LD_GNU])])
+m4_ifndef([AC_PROG_LD_RELOAD_FLAG], [AC_DEFUN([AC_PROG_LD_RELOAD_FLAG])])
+m4_ifndef([AC_DEPLIBS_CHECK_METHOD], [AC_DEFUN([AC_DEPLIBS_CHECK_METHOD])])
+m4_ifndef([AC_LIBTOOL_PROG_COMPILER_NO_RTTI], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_NO_RTTI])])
+m4_ifndef([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE], [AC_DEFUN([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE])])
+m4_ifndef([AC_LIBTOOL_PROG_COMPILER_PIC], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_PIC])])
+m4_ifndef([AC_LIBTOOL_PROG_LD_SHLIBS], [AC_DEFUN([AC_LIBTOOL_PROG_LD_SHLIBS])])
+m4_ifndef([AC_LIBTOOL_POSTDEP_PREDEP], [AC_DEFUN([AC_LIBTOOL_POSTDEP_PREDEP])])
+m4_ifndef([LT_AC_PROG_EGREP], [AC_DEFUN([LT_AC_PROG_EGREP])])
+m4_ifndef([LT_AC_PROG_SED], [AC_DEFUN([LT_AC_PROG_SED])])
+m4_ifndef([_LT_CC_BASENAME], [AC_DEFUN([_LT_CC_BASENAME])])
+m4_ifndef([_LT_COMPILER_BOILERPLATE], [AC_DEFUN([_LT_COMPILER_BOILERPLATE])])
+m4_ifndef([_LT_LINKER_BOILERPLATE], [AC_DEFUN([_LT_LINKER_BOILERPLATE])])
+m4_ifndef([_AC_PROG_LIBTOOL], [AC_DEFUN([_AC_PROG_LIBTOOL])])
+m4_ifndef([AC_LIBTOOL_SETUP], [AC_DEFUN([AC_LIBTOOL_SETUP])])
+m4_ifndef([_LT_AC_CHECK_DLFCN], [AC_DEFUN([_LT_AC_CHECK_DLFCN])])
+m4_ifndef([AC_LIBTOOL_SYS_DYNAMIC_LINKER], [AC_DEFUN([AC_LIBTOOL_SYS_DYNAMIC_LINKER])])
+m4_ifndef([_LT_AC_TAGCONFIG], [AC_DEFUN([_LT_AC_TAGCONFIG])])
+m4_ifndef([AC_DISABLE_FAST_INSTALL], [AC_DEFUN([AC_DISABLE_FAST_INSTALL])])
+m4_ifndef([_LT_AC_LANG_CXX], [AC_DEFUN([_LT_AC_LANG_CXX])])
+m4_ifndef([_LT_AC_LANG_F77], [AC_DEFUN([_LT_AC_LANG_F77])])
+m4_ifndef([_LT_AC_LANG_GCJ], [AC_DEFUN([_LT_AC_LANG_GCJ])])
+m4_ifndef([AC_LIBTOOL_LANG_C_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_C_CONFIG])])
+m4_ifndef([_LT_AC_LANG_C_CONFIG], [AC_DEFUN([_LT_AC_LANG_C_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_CXX_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_CXX_CONFIG])])
+m4_ifndef([_LT_AC_LANG_CXX_CONFIG], [AC_DEFUN([_LT_AC_LANG_CXX_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_F77_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_F77_CONFIG])])
+m4_ifndef([_LT_AC_LANG_F77_CONFIG], [AC_DEFUN([_LT_AC_LANG_F77_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_GCJ_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_GCJ_CONFIG])])
+m4_ifndef([_LT_AC_LANG_GCJ_CONFIG], [AC_DEFUN([_LT_AC_LANG_GCJ_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_RC_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_RC_CONFIG])])
+m4_ifndef([_LT_AC_LANG_RC_CONFIG], [AC_DEFUN([_LT_AC_LANG_RC_CONFIG])])
+m4_ifndef([AC_LIBTOOL_CONFIG], [AC_DEFUN([AC_LIBTOOL_CONFIG])])
+m4_ifndef([_LT_AC_FILE_LTDLL_C], [AC_DEFUN([_LT_AC_FILE_LTDLL_C])])
+m4_ifndef([_LT_REQUIRED_DARWIN_CHECKS], [AC_DEFUN([_LT_REQUIRED_DARWIN_CHECKS])])
+m4_ifndef([_LT_AC_PROG_CXXCPP], [AC_DEFUN([_LT_AC_PROG_CXXCPP])])
+m4_ifndef([_LT_PREPARE_SED_QUOTE_VARS], [AC_DEFUN([_LT_PREPARE_SED_QUOTE_VARS])])
+m4_ifndef([_LT_PROG_ECHO_BACKSLASH], [AC_DEFUN([_LT_PROG_ECHO_BACKSLASH])])
+m4_ifndef([_LT_PROG_F77], [AC_DEFUN([_LT_PROG_F77])])
+m4_ifndef([_LT_PROG_FC], [AC_DEFUN([_LT_PROG_FC])])
+m4_ifndef([_LT_PROG_CXX], [AC_DEFUN([_LT_PROG_CXX])])
+
diff --git a/BotZone2.8v1 Android/sqlcipher/art/sqlite370.eps b/BotZone2.8v1 Android/sqlcipher/art/sqlite370.eps
new file mode 100644
index 0000000..7701756
--- /dev/null
+++ b/BotZone2.8v1 Android/sqlcipher/art/sqlite370.eps
@@ -0,0 +1,5333 @@
+%!PS-Adobe-3.1 EPSF-3.0
+%ADO_DSC_Encoding: MacOS Roman
+%%Title: logo_color_eps.eps
+%%Creator: Adobe Illustrator(R) 13.0
+%%For: Mary Diarte
+%%CreationDate: 5/25/10
+%%BoundingBox: 0 0 372 212
+%%HiResBoundingBox: 0 0 372 211.3042
+%%CropBox: 0 0 372 211.3042
+%%LanguageLevel: 2
+%%DocumentData: Clean7Bit
+%ADOBeginClientInjection: DocumentHeader "AI11EPS"
+%%AI8_CreatorVersion: 13.0.2
%AI9_PrintingDataBegin
%AI3_Cropmarks: 36.0000 36.0000 336.0000 175.3042
+%ADO_BuildNumber: Adobe Illustrator(R) 13.0.2 x434 R agm 4.4379 ct 5.1039
%ADO_ContainsXMP: MainFirst
%AI7_Thumbnail: 128 76 8
%%BeginData: 6312 Hex Bytes
%0000330000660000990000CC0033000033330033660033990033CC0033FF
%0066000066330066660066990066CC0066FF009900009933009966009999
%0099CC0099FF00CC0000CC3300CC6600CC9900CCCC00CCFF00FF3300FF66
%00FF9900FFCC3300003300333300663300993300CC3300FF333300333333
%3333663333993333CC3333FF3366003366333366663366993366CC3366FF
%3399003399333399663399993399CC3399FF33CC0033CC3333CC6633CC99
%33CCCC33CCFF33FF0033FF3333FF6633FF9933FFCC33FFFF660000660033
%6600666600996600CC6600FF6633006633336633666633996633CC6633FF
%6666006666336666666666996666CC6666FF669900669933669966669999
%6699CC6699FF66CC0066CC3366CC6666CC9966CCCC66CCFF66FF0066FF33
%66FF6666FF9966FFCC66FFFF9900009900339900669900999900CC9900FF
%9933009933339933669933999933CC9933FF996600996633996666996699
%9966CC9966FF9999009999339999669999999999CC9999FF99CC0099CC33
%99CC6699CC9999CCCC99CCFF99FF0099FF3399FF6699FF9999FFCC99FFFF
%CC0000CC0033CC0066CC0099CC00CCCC00FFCC3300CC3333CC3366CC3399
%CC33CCCC33FFCC6600CC6633CC6666CC6699CC66CCCC66FFCC9900CC9933
%CC9966CC9999CC99CCCC99FFCCCC00CCCC33CCCC66CCCC99CCCCCCCCCCFF
%CCFF00CCFF33CCFF66CCFF99CCFFCCCCFFFFFF0033FF0066FF0099FF00CC
%FF3300FF3333FF3366FF3399FF33CCFF33FFFF6600FF6633FF6666FF6699
%FF66CCFF66FFFF9900FF9933FF9966FF9999FF99CCFF99FFFFCC00FFCC33
%FFCC66FFCC99FFCCCCFFCCFFFFFF33FFFF66FFFF99FFFFCC110000001100
%000011111111220000002200000022222222440000004400000044444444
%550000005500000055555555770000007700000077777777880000008800
%000088888888AA000000AA000000AAAAAAAABB000000BB000000BBBBBBBB
%DD000000DD000000DDDDDDDDEE000000EE000000EEEEEEEE0000000000FF
%00FF0000FFFFFF0000FF00FFFFFF00FFFFFF
%524C45FDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFD4AFFA90C0D
%0C35A8FD79FF3505060506050CA8FD5AFF84360E0E0D140E0E0D140E0E0D
%140E0E0D140E0E0D140E0E0D140E0E0D0D0506050C05060535FD59FF7E0E
%3685608560856085608560856085608560856085608560856085600D0506
%05060506050605FD59FF365A856085618560856185608561856085618560
%856185608561856035050605060506050605067EFD58FF0D615A615A615A
%615A615A615A615A615A615A615A615A615A615A5A050605060506050605
%060584FD58FF146185608561856085618560856185608561856085618560
%856161060C0506050C0606050C05067EFD58FF0D615A615A615A615A615A
%615A615A615A615A615A615A615A610D0605060506060C050605060584FD
%58FF0E5A855A615A855A615A855A615A855A615A855A615A855A61360605
%060506062F050605060506A8FD58FF0D5A5A615A5A5A615A5A5A615A5A5A
%615A5A5A615A5A5A615A0C05060506055905060506050605FD59FF0E5A61
%5A615A615A615A615A615A615A615A615A615A615A5A050C0506052F0C06
%050C05060535FD59FF0D5A365B5A5A365B5A5A365B5A5A365B5A5A365B5A
%5A365B0C0605060506590605060506050659FD59FF145A5B5A615A5B5A61
%5A5B5A615A5B5A615A5B5A615A5B360605060506350C05060506050606FD
%5AFF0D5A365A365A365A365A365A365A365A365A365A365A360D05060506
%05840506050605060535FD5AFF14365B365B365B365B365B365B365B365B
%365B365B365B050C0506055A0C06050C0506050CA8FD5AFF0D5A3636365A
%3636365A3636365A3636365A3636365A0D060506050C5906050605060506
%0CFD5BFF0E365A365A365A365A365A365A365A365A365A365A360D050605
%06590C05060506050605A9FD5BFF0D363036303630363036303630363036
%3036303630360506050605A90506050605060535FD5CFF0EFD14360D0C05
%06055A2F06050C0506050CAFFD5CFF0D360E3630360E3630360E3630360E
%3630360E36300D0506050684060506050605062FFD5DFF1436360E363636
%0E3636360E3636360E3636360E360C0605060C5905060506050606FD5EFF
%0D360E360D360E360D360E360D360E360D360E360D0C0506058406060506
%05060CA9FD5EFF140E360E360E360E360E360E360E360E360E360E0D0506
%050CA806050C062F2FA9FFFF2F0C050C06350C84FD07FF5A0C0C060D050C
%2FFD06FF0D0506050C0CFD3AFF0D360E140D360E140D360E140D360E140D
%360E14060605062E3505060506050CFFFF0C0659FFFFA806065AFD05FF7E
%0C0535A8FFFFFF590C0559FD05FF7E05062EFD3BFF0E0E360E140E360E14
%0E360E140E360E140E360E0C0506058406060506050684FF84062FFD05FF
%0584FD04FF590605A9FD06FFA80C052FFD04FF84060584FD3BFF0D0E0D14
%0D0E0D140D0E0D140D0E0D140D0E0D0D050605067E06050605062FFFFF35
%055AFD05FF2F59FFFFFF7E060559FD08FF84060535FD04FF05065AFD09FF
%060CFD30FF0E0E140E140E140E140E140E140E140E140E140D06050C0C5A
%050C05060CFFFFFF2F0C0CFD05FFA8FD04FF0D050CFD0AFF2F0605AFFFFF
%FF0C0584FD09FF0D05AFFD05FFA9FD29FF0D0E0D140D0E0D140D0E0D140D
%0E0D140D0E0D0D050605590606050605AFFFFFFF5905062FFD08FF840506
%2FFD0AFFA905062EFFFFFF05065AFD0AFFA9FD05FF5A06FD29FF140E140D
%140E140D140E140D140E140D140E140D0C05065906050606A9FD04FFA80C
%05060CFD07FF2F060584FD0BFF0C050CFFFFFF060584FD10FF060CFD29FF
%0D0E0D0E0D0E0D0E0D0E0D0E0D0E0D0E0D0E0D0E0606055A05062FFD07FF
%2F0605060559FD05FF0C0506FD0CFF060605FFFFFF050659FD07FFA85A2E
%0C06FFFF350506050C0684FD04FF0C06050C59FD1DFF140E140D140E140D
%140E140D140E0E0D140E0E0D0E050C2F0C84FD09FF350C0506050DAFFFFF
%FF0C0605FD0CFF0D0506FFFFFF060584FD07FFA92F0C050DFFFF7E5A050C
%7E84AFFFFFFF060C2F0C050CA9FD1CFF0D0E0D0E0D0E0D0E0D0E0D0E0D0E
%0D0E0D0E0D0E0D060C0C59FD0BFF840C0506050CA8FFFF0C0506FD0CFF06
%0605FFFFA9050659FD09FF840606FFFFFF590606FD05FF060C84FF59060C
%FD1DFF0E0D140E0E0D140E0E0D140E0E0D140E0E0D140E0E06590CFD0EFF
%5A0606050DFFFF0C060584FD0BFF0C050CFFFF7E060584FD0AFF050CFFFF
%FF84050DFD04FF840635FFFF2F05FD1EFF0D0E0D0E0D0E0D0E0D0E0D0E0D
%0E0D0E0D0E0D0E0D0C2E84FD0FFF84050605FFFF59050659FD0BFF05060C
%FFFF8405065AFD09FF590606FFFFFF5A0606FD04FF59055A590C0584FD1E
%FF0E0D140E0E0D140E0E0D140E0E0D140E0E0D140E0E0684FD0AFF59FD06
%FF5A050C7EFFFF0C052FFD0AFF59060584FFFF7E0C0584FD09FF84050DFF
%FFFF84050DFD04FF2F0606355AFD20FF0D0E0D0E0D0E0D0E0D0E0D0E0D0E
%0D0E0D0E0D0E0D0C59FD0AFF0C59FD05FFA80605A9FFFF59060584FD09FF
%0C050CFFFFFF8405065AFD09FF590606FFFFFF5A062EFD04FF350559FD05
%FFA9FD1CFF140E0E0D140E0E0D140E0E0D140E0E0D140E0E0D140684FD0A
%FF0C2FFD05FF84050CFD04FF2F0606FD08FF35050CAFFFFFFF7E060584FD
%05FF8459FFFF84050DFFFFFF840535FFFF84847E0606FD04FF8406FD1CFF
%5A0E0D0E0D0E0D0E0D0E0D0E0D0E0D0E0D0E0D0E0D0C84FD0AFF0C0559FD
%04FF060659FD05FF2E0606A8FD04FFA82F050CA8FD04FF5A05062EFD05FF
%0559FFFF5A0605FFFFFF590605842F0C7EFF0506065A592F055AFD1DFF5A
%0E0D140E0E0D140E0E0D140E0E0D140E0E0D140CA8FD0AFF0C0C060D2F2F
%050C59FD07FF7E0D052F2F590C0C065AFD05FF350506050C060D0C0C0506
%7EFF2F0C050C0C5AFFFF0C06050C84FFFFA90C06050C0635FD1FFFA98584
%A9848584A9848584A9848584A9848584355AFD0AFF84A8FF7E5A2F5AA8FD
%0AFFA80C0605062EFD07FF7E845A8459845A8459845AAFFD0AFF5A84FD06
%FFA88484FD34FF3584FD1EFF0C06050659FD5BFF842FFD1FFF0C0605067E
%FD5AFFA935FD20FF2F06050C59FD5AFF06FD21FF59060506062F2F5AFD7A
%FF8435592F5984FDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFC
%FFFDD1FFFF
%%EndData
+%ADOEndClientInjection: DocumentHeader "AI11EPS"
+%%Pages: 1
+%%DocumentNeededResources:
+%%DocumentSuppliedResources: procset Adobe_AGM_Image 1.0 0
+%%+ procset Adobe_CoolType_Utility_T42 1.0 0
+%%+ procset Adobe_CoolType_Utility_MAKEOCF 1.23 0
+%%+ procset Adobe_CoolType_Core 2.31 0
+%%+ procset Adobe_AGM_Core 2.0 0
+%%+ procset Adobe_AGM_Utils 1.0 0
+%%DocumentFonts:
+%%DocumentNeededFonts:
+%%DocumentNeededFeatures:
+%%DocumentSuppliedFeatures:
+%%DocumentProcessColors:
+%%DocumentCustomColors: (PANTONE 2905 U)
+%%+ (PANTONE 309 U)
+%%+ (PANTONE 3005 U)
+%%CMYKCustomColor: 0.4100 0.0200 0 0 (PANTONE 2905 U)
+%%+ 1 0 0.0900 0.7200 (PANTONE 309 U)
+%%+ 1 0.3400 0 0.0200 (PANTONE 3005 U)
+%%RGBCustomColor:
+%%EndComments
+
+
+
+
+
+
+%%BeginDefaults
+%%ViewingOrientation: 1 0 0 1
+%%EndDefaults
+%%BeginProlog
+%%BeginResource: procset Adobe_AGM_Utils 1.0 0
+%%Version: 1.0 0
+%%Copyright: Copyright(C)2000-2006 Adobe Systems, Inc. All Rights Reserved.
+systemdict/setpacking known
+{currentpacking true setpacking}if
+userdict/Adobe_AGM_Utils 73 dict dup begin put
+/bdf
+{bind def}bind def
+/nd{null def}bdf
+/xdf
+{exch def}bdf
+/ldf
+{load def}bdf
+/ddf
+{put}bdf
+/xddf
+{3 -1 roll put}bdf
+/xpt
+{exch put}bdf
+/ndf
+{
+ exch dup where{
+ pop pop pop
+ }{
+ xdf
+ }ifelse
+}def
+/cdndf
+{
+ exch dup currentdict exch known{
+ pop pop
+ }{
+ exch def
+ }ifelse
+}def
+/gx
+{get exec}bdf
+/ps_level
+ /languagelevel where{
+ pop systemdict/languagelevel gx
+ }{
+ 1
+ }ifelse
+def
+/level2
+ ps_level 2 ge
+def
+/level3
+ ps_level 3 ge
+def
+/ps_version
+ {version cvr}stopped{-1}if
+def
+/set_gvm
+{currentglobal exch setglobal}bdf
+/reset_gvm
+{setglobal}bdf
+/makereadonlyarray
+{
+ /packedarray where{pop packedarray
+ }{
+ array astore readonly}ifelse
+}bdf
+/map_reserved_ink_name
+{
+ dup type/stringtype eq{
+ dup/Red eq{
+ pop(_Red_)
+ }{
+ dup/Green eq{
+ pop(_Green_)
+ }{
+ dup/Blue eq{
+ pop(_Blue_)
+ }{
+ dup()cvn eq{
+ pop(Process)
+ }if
+ }ifelse
+ }ifelse
+ }ifelse
+ }if
+}bdf
+/AGMUTIL_GSTATE 22 dict def
+/get_gstate
+{
+ AGMUTIL_GSTATE begin
+ /AGMUTIL_GSTATE_clr_spc currentcolorspace def
+ /AGMUTIL_GSTATE_clr_indx 0 def
+ /AGMUTIL_GSTATE_clr_comps 12 array def
+ mark currentcolor counttomark
+ {AGMUTIL_GSTATE_clr_comps AGMUTIL_GSTATE_clr_indx 3 -1 roll put
+ /AGMUTIL_GSTATE_clr_indx AGMUTIL_GSTATE_clr_indx 1 add def}repeat pop
+ /AGMUTIL_GSTATE_fnt rootfont def
+ /AGMUTIL_GSTATE_lw currentlinewidth def
+ /AGMUTIL_GSTATE_lc currentlinecap def
+ /AGMUTIL_GSTATE_lj currentlinejoin def
+ /AGMUTIL_GSTATE_ml currentmiterlimit def
+ currentdash/AGMUTIL_GSTATE_do xdf/AGMUTIL_GSTATE_da xdf
+ /AGMUTIL_GSTATE_sa currentstrokeadjust def
+ /AGMUTIL_GSTATE_clr_rnd currentcolorrendering def
+ /AGMUTIL_GSTATE_op currentoverprint def
+ /AGMUTIL_GSTATE_bg currentblackgeneration cvlit def
+ /AGMUTIL_GSTATE_ucr currentundercolorremoval cvlit def
+ currentcolortransfer cvlit/AGMUTIL_GSTATE_gy_xfer xdf cvlit/AGMUTIL_GSTATE_b_xfer xdf
+ cvlit/AGMUTIL_GSTATE_g_xfer xdf cvlit/AGMUTIL_GSTATE_r_xfer xdf
+ /AGMUTIL_GSTATE_ht currenthalftone def
+ /AGMUTIL_GSTATE_flt currentflat def
+ end
+}def
+/set_gstate
+{
+ AGMUTIL_GSTATE begin
+ AGMUTIL_GSTATE_clr_spc setcolorspace
+ AGMUTIL_GSTATE_clr_indx{AGMUTIL_GSTATE_clr_comps AGMUTIL_GSTATE_clr_indx 1 sub get
+ /AGMUTIL_GSTATE_clr_indx AGMUTIL_GSTATE_clr_indx 1 sub def}repeat setcolor
+ AGMUTIL_GSTATE_fnt setfont
+ AGMUTIL_GSTATE_lw setlinewidth
+ AGMUTIL_GSTATE_lc setlinecap
+ AGMUTIL_GSTATE_lj setlinejoin
+ AGMUTIL_GSTATE_ml setmiterlimit
+ AGMUTIL_GSTATE_da AGMUTIL_GSTATE_do setdash
+ AGMUTIL_GSTATE_sa setstrokeadjust
+ AGMUTIL_GSTATE_clr_rnd setcolorrendering
+ AGMUTIL_GSTATE_op setoverprint
+ AGMUTIL_GSTATE_bg cvx setblackgeneration
+ AGMUTIL_GSTATE_ucr cvx setundercolorremoval
+ AGMUTIL_GSTATE_r_xfer cvx AGMUTIL_GSTATE_g_xfer cvx AGMUTIL_GSTATE_b_xfer cvx
+ AGMUTIL_GSTATE_gy_xfer cvx setcolortransfer
+ AGMUTIL_GSTATE_ht/HalftoneType get dup 9 eq exch 100 eq or
+ {
+ currenthalftone/HalftoneType get AGMUTIL_GSTATE_ht/HalftoneType get ne
+ {
+ mark AGMUTIL_GSTATE_ht{sethalftone}stopped cleartomark
+ }if
+ }{
+ AGMUTIL_GSTATE_ht sethalftone
+ }ifelse
+ AGMUTIL_GSTATE_flt setflat
+ end
+}def
+/get_gstate_and_matrix
+{
+ AGMUTIL_GSTATE begin
+ /AGMUTIL_GSTATE_ctm matrix currentmatrix def
+ end
+ get_gstate
+}def
+/set_gstate_and_matrix
+{
+ set_gstate
+ AGMUTIL_GSTATE begin
+ AGMUTIL_GSTATE_ctm setmatrix
+ end
+}def
+/AGMUTIL_str256 256 string def
+/AGMUTIL_src256 256 string def
+/AGMUTIL_dst64 64 string def
+/AGMUTIL_srcLen nd
+/AGMUTIL_ndx nd
+/AGMUTIL_cpd nd
+/capture_cpd{
+ //Adobe_AGM_Utils/AGMUTIL_cpd currentpagedevice ddf
+}def
+/thold_halftone
+{
+ level3
+ {sethalftone currenthalftone}
+ {
+ dup/HalftoneType get 3 eq
+ {
+ sethalftone currenthalftone
+ }{
+ begin
+ Width Height mul{
+ Thresholds read{pop}if
+ }repeat
+ end
+ currenthalftone
+ }ifelse
+ }ifelse
+}def
+/rdcmntline
+{
+ currentfile AGMUTIL_str256 readline pop
+ (%)anchorsearch{pop}if
+}bdf
+/filter_cmyk
+{
+ dup type/filetype ne{
+ exch()/SubFileDecode filter
+ }{
+ exch pop
+ }
+ ifelse
+ [
+ exch
+ {
+ AGMUTIL_src256 readstring pop
+ dup length/AGMUTIL_srcLen exch def
+ /AGMUTIL_ndx 0 def
+ AGMCORE_plate_ndx 4 AGMUTIL_srcLen 1 sub{
+ 1 index exch get
+ AGMUTIL_dst64 AGMUTIL_ndx 3 -1 roll put
+ /AGMUTIL_ndx AGMUTIL_ndx 1 add def
+ }for
+ pop
+ AGMUTIL_dst64 0 AGMUTIL_ndx getinterval
+ }
+ bind
+ /exec cvx
+ ]cvx
+}bdf
+/filter_indexed_devn
+{
+ cvi Names length mul names_index add Lookup exch get
+}bdf
+/filter_devn
+{
+ 4 dict begin
+ /srcStr xdf
+ /dstStr xdf
+ dup type/filetype ne{
+ 0()/SubFileDecode filter
+ }if
+ [
+ exch
+ [
+ /devicen_colorspace_dict/AGMCORE_gget cvx/begin cvx
+ currentdict/srcStr get/readstring cvx/pop cvx
+ /dup cvx/length cvx 0/gt cvx[
+ Adobe_AGM_Utils/AGMUTIL_ndx 0/ddf cvx
+ names_index Names length currentdict/srcStr get length 1 sub{
+ 1/index cvx/exch cvx/get cvx
+ currentdict/dstStr get/AGMUTIL_ndx/load cvx 3 -1/roll cvx/put cvx
+ Adobe_AGM_Utils/AGMUTIL_ndx/AGMUTIL_ndx/load cvx 1/add cvx/ddf cvx
+ }for
+ currentdict/dstStr get 0/AGMUTIL_ndx/load cvx/getinterval cvx
+ ]cvx/if cvx
+ /end cvx
+ ]cvx
+ bind
+ /exec cvx
+ ]cvx
+ end
+}bdf
+/AGMUTIL_imagefile nd
+/read_image_file
+{
+ AGMUTIL_imagefile 0 setfileposition
+ 10 dict begin
+ /imageDict xdf
+ /imbufLen Width BitsPerComponent mul 7 add 8 idiv def
+ /imbufIdx 0 def
+ /origDataSource imageDict/DataSource get def
+ /origMultipleDataSources imageDict/MultipleDataSources get def
+ /origDecode imageDict/Decode get def
+ /dstDataStr imageDict/Width get colorSpaceElemCnt mul string def
+ imageDict/MultipleDataSources known{MultipleDataSources}{false}ifelse
+ {
+ /imbufCnt imageDict/DataSource get length def
+ /imbufs imbufCnt array def
+ 0 1 imbufCnt 1 sub{
+ /imbufIdx xdf
+ imbufs imbufIdx imbufLen string put
+ imageDict/DataSource get imbufIdx[AGMUTIL_imagefile imbufs imbufIdx get/readstring cvx/pop cvx]cvx put
+ }for
+ DeviceN_PS2{
+ imageDict begin
+ /DataSource[DataSource/devn_sep_datasource cvx]cvx def
+ /MultipleDataSources false def
+ /Decode[0 1]def
+ end
+ }if
+ }{
+ /imbuf imbufLen string def
+ Indexed_DeviceN level3 not and DeviceN_NoneName or{
+ /srcDataStrs[imageDict begin
+ currentdict/MultipleDataSources known{MultipleDataSources{DataSource length}{1}ifelse}{1}ifelse
+ {
+ Width Decode length 2 div mul cvi string
+ }repeat
+ end]def
+ imageDict begin
+ /DataSource[AGMUTIL_imagefile Decode BitsPerComponent false 1/filter_indexed_devn load dstDataStr srcDataStrs devn_alt_datasource/exec cvx]cvx def
+ /Decode[0 1]def
+ end
+ }{
+ imageDict/DataSource[1 string dup 0 AGMUTIL_imagefile Decode length 2 idiv string/readstring cvx/pop cvx names_index/get cvx/put cvx]cvx put
+ imageDict/Decode[0 1]put
+ }ifelse
+ }ifelse
+ imageDict exch
+ load exec
+ imageDict/DataSource origDataSource put
+ imageDict/MultipleDataSources origMultipleDataSources put
+ imageDict/Decode origDecode put
+ end
+}bdf
+/write_image_file
+{
+ begin
+ {(AGMUTIL_imagefile)(w+)file}stopped{
+ false
+ }{
+ Adobe_AGM_Utils/AGMUTIL_imagefile xddf
+ 2 dict begin
+ /imbufLen Width BitsPerComponent mul 7 add 8 idiv def
+ MultipleDataSources{DataSource 0 get}{DataSource}ifelse type/filetype eq{
+ /imbuf imbufLen string def
+ }if
+ 1 1 Height MultipleDataSources not{Decode length 2 idiv mul}if{
+ pop
+ MultipleDataSources{
+ 0 1 DataSource length 1 sub{
+ DataSource type dup
+ /arraytype eq{
+ pop DataSource exch gx
+ }{
+ /filetype eq{
+ DataSource exch get imbuf readstring pop
+ }{
+ DataSource exch get
+ }ifelse
+ }ifelse
+ AGMUTIL_imagefile exch writestring
+ }for
+ }{
+ DataSource type dup
+ /arraytype eq{
+ pop DataSource exec
+ }{
+ /filetype eq{
+ DataSource imbuf readstring pop
+ }{
+ DataSource
+ }ifelse
+ }ifelse
+ AGMUTIL_imagefile exch writestring
+ }ifelse
+ }for
+ end
+ true
+ }ifelse
+ end
+}bdf
+/close_image_file
+{
+ AGMUTIL_imagefile closefile(AGMUTIL_imagefile)deletefile
+}def
+statusdict/product known userdict/AGMP_current_show known not and{
+ /pstr statusdict/product get def
+ pstr(HP LaserJet 2200)eq
+ pstr(HP LaserJet 4000 Series)eq or
+ pstr(HP LaserJet 4050 Series )eq or
+ pstr(HP LaserJet 8000 Series)eq or
+ pstr(HP LaserJet 8100 Series)eq or
+ pstr(HP LaserJet 8150 Series)eq or
+ pstr(HP LaserJet 5000 Series)eq or
+ pstr(HP LaserJet 5100 Series)eq or
+ pstr(HP Color LaserJet 4500)eq or
+ pstr(HP Color LaserJet 4600)eq or
+ pstr(HP LaserJet 5Si)eq or
+ pstr(HP LaserJet 1200 Series)eq or
+ pstr(HP LaserJet 1300 Series)eq or
+ pstr(HP LaserJet 4100 Series)eq or
+ {
+ userdict/AGMP_current_show/show load put
+ userdict/show{
+ currentcolorspace 0 get
+ /Pattern eq
+ {false charpath f}
+ {AGMP_current_show}ifelse
+ }put
+ }if
+ currentdict/pstr undef
+}if
+/consumeimagedata
+{
+ begin
+ AGMIMG_init_common
+ currentdict/MultipleDataSources known not
+ {/MultipleDataSources false def}if
+ MultipleDataSources
+ {
+ DataSource 0 get type
+ dup/filetype eq
+ {
+ 1 dict begin
+ /flushbuffer Width cvi string def
+ 1 1 Height cvi
+ {
+ pop
+ 0 1 DataSource length 1 sub
+ {
+ DataSource exch get
+ flushbuffer readstring pop pop
+ }for
+ }for
+ end
+ }if
+ dup/arraytype eq exch/packedarraytype eq or DataSource 0 get xcheck and
+ {
+ Width Height mul cvi
+ {
+ 0 1 DataSource length 1 sub
+ {dup DataSource exch gx length exch 0 ne{pop}if}for
+ dup 0 eq
+ {pop exit}if
+ sub dup 0 le
+ {exit}if
+ }loop
+ pop
+ }if
+ }
+ {
+ /DataSource load type
+ dup/filetype eq
+ {
+ 1 dict begin
+ /flushbuffer Width Decode length 2 idiv mul cvi string def
+ 1 1 Height{pop DataSource flushbuffer readstring pop pop}for
+ end
+ }if
+ dup/arraytype eq exch/packedarraytype eq or/DataSource load xcheck and
+ {
+ Height Width BitsPerComponent mul 8 BitsPerComponent sub add 8 idiv Decode length 2 idiv mul mul
+ {
+ DataSource length dup 0 eq
+ {pop exit}if
+ sub dup 0 le
+ {exit}if
+ }loop
+ pop
+ }if
+ }ifelse
+ end
+}bdf
+/addprocs
+{
+ 2{/exec load}repeat
+ 3 1 roll
+ [5 1 roll]bind cvx
+}def
+/modify_halftone_xfer
+{
+ currenthalftone dup length dict copy begin
+ currentdict 2 index known{
+ 1 index load dup length dict copy begin
+ currentdict/TransferFunction known{
+ /TransferFunction load
+ }{
+ currenttransfer
+ }ifelse
+ addprocs/TransferFunction xdf
+ currentdict end def
+ currentdict end sethalftone
+ }{
+ currentdict/TransferFunction known{
+ /TransferFunction load
+ }{
+ currenttransfer
+ }ifelse
+ addprocs/TransferFunction xdf
+ currentdict end sethalftone
+ pop
+ }ifelse
+}def
+/clonearray
+{
+ dup xcheck exch
+ dup length array exch
+ Adobe_AGM_Core/AGMCORE_tmp -1 ddf
+ {
+ Adobe_AGM_Core/AGMCORE_tmp 2 copy get 1 add ddf
+ dup type/dicttype eq
+ {
+ Adobe_AGM_Core/AGMCORE_tmp get
+ exch
+ clonedict
+ Adobe_AGM_Core/AGMCORE_tmp 4 -1 roll ddf
+ }if
+ dup type/arraytype eq
+ {
+ Adobe_AGM_Core/AGMCORE_tmp get exch
+ clonearray
+ Adobe_AGM_Core/AGMCORE_tmp 4 -1 roll ddf
+ }if
+ exch dup
+ Adobe_AGM_Core/AGMCORE_tmp get 4 -1 roll put
+ }forall
+ exch{cvx}if
+}bdf
+/clonedict
+{
+ dup length dict
+ begin
+ {
+ dup type/dicttype eq
+ {clonedict}if
+ dup type/arraytype eq
+ {clonearray}if
+ def
+ }forall
+ currentdict
+ end
+}bdf
+/DeviceN_PS2
+{
+ /currentcolorspace AGMCORE_gget 0 get/DeviceN eq level3 not and
+}bdf
+/Indexed_DeviceN
+{
+ /indexed_colorspace_dict AGMCORE_gget dup null ne{
+ dup/CSDBase known{
+ /CSDBase get/CSD get_res/Names known
+ }{
+ pop false
+ }ifelse
+ }{
+ pop false
+ }ifelse
+}bdf
+/DeviceN_NoneName
+{
+ /Names where{
+ pop
+ false Names
+ {
+ (None)eq or
+ }forall
+ }{
+ false
+ }ifelse
+}bdf
+/DeviceN_PS2_inRip_seps
+{
+ /AGMCORE_in_rip_sep where
+ {
+ pop dup type dup/arraytype eq exch/packedarraytype eq or
+ {
+ dup 0 get/DeviceN eq level3 not and AGMCORE_in_rip_sep and
+ {
+ /currentcolorspace exch AGMCORE_gput
+ false
+ }{
+ true
+ }ifelse
+ }{
+ true
+ }ifelse
+ }{
+ true
+ }ifelse
+}bdf
+/base_colorspace_type
+{
+ dup type/arraytype eq{0 get}if
+}bdf
+/currentdistillerparams where{pop currentdistillerparams/CoreDistVersion get 5000 lt}{true}ifelse
+{
+ /pdfmark_5{cleartomark}bind def
+}{
+ /pdfmark_5{pdfmark}bind def
+}ifelse
+/ReadBypdfmark_5
+{
+ currentfile exch 0 exch/SubFileDecode filter
+ /currentdistillerparams where
+ {pop currentdistillerparams/CoreDistVersion get 5000 lt}{true}ifelse
+ {flushfile cleartomark}
+ {/PUT pdfmark}ifelse
+}bdf
+/xpdfm
+{
+ {
+ dup 0 get/Label eq
+ {
+ aload length[exch 1 add 1 roll/PAGELABEL
+ }{
+ aload pop
+ [{ThisPage}<<5 -2 roll>>/PUT
+ }ifelse
+ pdfmark_5
+ }forall
+}bdf
+/ds{
+ Adobe_AGM_Utils begin
+}bdf
+/dt{
+ currentdict Adobe_AGM_Utils eq{
+ end
+ }if
+}bdf
+systemdict/setpacking known
+{setpacking}if
+%%EndResource
+%%BeginResource: procset Adobe_AGM_Core 2.0 0
+%%Version: 2.0 0
+%%Copyright: Copyright(C)1997-2007 Adobe Systems, Inc. All Rights Reserved.
+systemdict/setpacking known
+{
+ currentpacking
+ true setpacking
+}if
+userdict/Adobe_AGM_Core 209 dict dup begin put
+/Adobe_AGM_Core_Id/Adobe_AGM_Core_2.0_0 def
+/AGMCORE_str256 256 string def
+/AGMCORE_save nd
+/AGMCORE_graphicsave nd
+/AGMCORE_c 0 def
+/AGMCORE_m 0 def
+/AGMCORE_y 0 def
+/AGMCORE_k 0 def
+/AGMCORE_cmykbuf 4 array def
+/AGMCORE_screen[currentscreen]cvx def
+/AGMCORE_tmp 0 def
+/AGMCORE_&setgray nd
+/AGMCORE_&setcolor nd
+/AGMCORE_&setcolorspace nd
+/AGMCORE_&setcmykcolor nd
+/AGMCORE_cyan_plate nd
+/AGMCORE_magenta_plate nd
+/AGMCORE_yellow_plate nd
+/AGMCORE_black_plate nd
+/AGMCORE_plate_ndx nd
+/AGMCORE_get_ink_data nd
+/AGMCORE_is_cmyk_sep nd
+/AGMCORE_host_sep nd
+/AGMCORE_avoid_L2_sep_space nd
+/AGMCORE_distilling nd
+/AGMCORE_composite_job nd
+/AGMCORE_producing_seps nd
+/AGMCORE_ps_level -1 def
+/AGMCORE_ps_version -1 def
+/AGMCORE_environ_ok nd
+/AGMCORE_CSD_cache 0 dict def
+/AGMCORE_currentoverprint false def
+/AGMCORE_deltaX nd
+/AGMCORE_deltaY nd
+/AGMCORE_name nd
+/AGMCORE_sep_special nd
+/AGMCORE_err_strings 4 dict def
+/AGMCORE_cur_err nd
+/AGMCORE_current_spot_alias false def
+/AGMCORE_inverting false def
+/AGMCORE_feature_dictCount nd
+/AGMCORE_feature_opCount nd
+/AGMCORE_feature_ctm nd
+/AGMCORE_ConvertToProcess false def
+/AGMCORE_Default_CTM matrix def
+/AGMCORE_Default_PageSize nd
+/AGMCORE_Default_flatness nd
+/AGMCORE_currentbg nd
+/AGMCORE_currentucr nd
+/AGMCORE_pattern_paint_type 0 def
+/knockout_unitsq nd
+currentglobal true setglobal
+[/CSA/Gradient/Procedure]
+{
+ /Generic/Category findresource dup length dict copy/Category defineresource pop
+}forall
+setglobal
+/AGMCORE_key_known
+{
+ where{
+ /Adobe_AGM_Core_Id known
+ }{
+ false
+ }ifelse
+}ndf
+/flushinput
+{
+ save
+ 2 dict begin
+ /CompareBuffer 3 -1 roll def
+ /readbuffer 256 string def
+ mark
+ {
+ currentfile readbuffer{readline}stopped
+ {cleartomark mark}
+ {
+ not
+ {pop exit}
+ if
+ CompareBuffer eq
+ {exit}
+ if
+ }ifelse
+ }loop
+ cleartomark
+ end
+ restore
+}bdf
+/getspotfunction
+{
+ AGMCORE_screen exch pop exch pop
+ dup type/dicttype eq{
+ dup/HalftoneType get 1 eq{
+ /SpotFunction get
+ }{
+ dup/HalftoneType get 2 eq{
+ /GraySpotFunction get
+ }{
+ pop
+ {
+ abs exch abs 2 copy add 1 gt{
+ 1 sub dup mul exch 1 sub dup mul add 1 sub
+ }{
+ dup mul exch dup mul add 1 exch sub
+ }ifelse
+ }bind
+ }ifelse
+ }ifelse
+ }if
+}def
+/np
+{newpath}bdf
+/clp_npth
+{clip np}def
+/eoclp_npth
+{eoclip np}def
+/npth_clp
+{np clip}def
+/graphic_setup
+{
+ /AGMCORE_graphicsave save store
+ concat
+ 0 setgray
+ 0 setlinecap
+ 0 setlinejoin
+ 1 setlinewidth
+ []0 setdash
+ 10 setmiterlimit
+ np
+ false setoverprint
+ false setstrokeadjust
+ //Adobe_AGM_Core/spot_alias gx
+ /Adobe_AGM_Image where{
+ pop
+ Adobe_AGM_Image/spot_alias 2 copy known{
+ gx
+ }{
+ pop pop
+ }ifelse
+ }if
+ /sep_colorspace_dict null AGMCORE_gput
+ 100 dict begin
+ /dictstackcount countdictstack def
+ /showpage{}def
+ mark
+}def
+/graphic_cleanup
+{
+ cleartomark
+ dictstackcount 1 countdictstack 1 sub{end}for
+ end
+ AGMCORE_graphicsave restore
+}def
+/compose_error_msg
+{
+ grestoreall initgraphics
+ /Helvetica findfont 10 scalefont setfont
+ /AGMCORE_deltaY 100 def
+ /AGMCORE_deltaX 310 def
+ clippath pathbbox np pop pop 36 add exch 36 add exch moveto
+ 0 AGMCORE_deltaY rlineto AGMCORE_deltaX 0 rlineto
+ 0 AGMCORE_deltaY neg rlineto AGMCORE_deltaX neg 0 rlineto closepath
+ 0 AGMCORE_&setgray
+ gsave 1 AGMCORE_&setgray fill grestore
+ 1 setlinewidth gsave stroke grestore
+ currentpoint AGMCORE_deltaY 15 sub add exch 8 add exch moveto
+ /AGMCORE_deltaY 12 def
+ /AGMCORE_tmp 0 def
+ AGMCORE_err_strings exch get
+ {
+ dup 32 eq
+ {
+ pop
+ AGMCORE_str256 0 AGMCORE_tmp getinterval
+ stringwidth pop currentpoint pop add AGMCORE_deltaX 28 add gt
+ {
+ currentpoint AGMCORE_deltaY sub exch pop
+ clippath pathbbox pop pop pop 44 add exch moveto
+ }if
+ AGMCORE_str256 0 AGMCORE_tmp getinterval show( )show
+ 0 1 AGMCORE_str256 length 1 sub
+ {
+ AGMCORE_str256 exch 0 put
+ }for
+ /AGMCORE_tmp 0 def
+ }{
+ AGMCORE_str256 exch AGMCORE_tmp xpt
+ /AGMCORE_tmp AGMCORE_tmp 1 add def
+ }ifelse
+ }forall
+}bdf
+/AGMCORE_CMYKDeviceNColorspaces[
+ [/Separation/None/DeviceCMYK{0 0 0}]
+ [/Separation(Black)/DeviceCMYK{0 0 0 4 -1 roll}bind]
+ [/Separation(Yellow)/DeviceCMYK{0 0 3 -1 roll 0}bind]
+ [/DeviceN[(Yellow)(Black)]/DeviceCMYK{0 0 4 2 roll}bind]
+ [/Separation(Magenta)/DeviceCMYK{0 exch 0 0}bind]
+ [/DeviceN[(Magenta)(Black)]/DeviceCMYK{0 3 1 roll 0 exch}bind]
+ [/DeviceN[(Magenta)(Yellow)]/DeviceCMYK{0 3 1 roll 0}bind]
+ [/DeviceN[(Magenta)(Yellow)(Black)]/DeviceCMYK{0 4 1 roll}bind]
+ [/Separation(Cyan)/DeviceCMYK{0 0 0}]
+ [/DeviceN[(Cyan)(Black)]/DeviceCMYK{0 0 3 -1 roll}bind]
+ [/DeviceN[(Cyan)(Yellow)]/DeviceCMYK{0 exch 0}bind]
+ [/DeviceN[(Cyan)(Yellow)(Black)]/DeviceCMYK{0 3 1 roll}bind]
+ [/DeviceN[(Cyan)(Magenta)]/DeviceCMYK{0 0}]
+ [/DeviceN[(Cyan)(Magenta)(Black)]/DeviceCMYK{0 exch}bind]
+ [/DeviceN[(Cyan)(Magenta)(Yellow)]/DeviceCMYK{0}]
+ [/DeviceCMYK]
+]def
+/ds{
+ Adobe_AGM_Core begin
+ /currentdistillerparams where
+ {
+ pop currentdistillerparams/CoreDistVersion get 5000 lt
+ {<>setdistillerparams}if
+ }if
+ /AGMCORE_ps_version xdf
+ /AGMCORE_ps_level xdf
+ errordict/AGM_handleerror known not{
+ errordict/AGM_handleerror errordict/handleerror get put
+ errordict/handleerror{
+ Adobe_AGM_Core begin
+ $error/newerror get AGMCORE_cur_err null ne and{
+ $error/newerror false put
+ AGMCORE_cur_err compose_error_msg
+ }if
+ $error/newerror true put
+ end
+ errordict/AGM_handleerror get exec
+ }bind put
+ }if
+ /AGMCORE_environ_ok
+ ps_level AGMCORE_ps_level ge
+ ps_version AGMCORE_ps_version ge and
+ AGMCORE_ps_level -1 eq or
+ def
+ AGMCORE_environ_ok not
+ {/AGMCORE_cur_err/AGMCORE_bad_environ def}if
+ /AGMCORE_&setgray systemdict/setgray get def
+ level2{
+ /AGMCORE_&setcolor systemdict/setcolor get def
+ /AGMCORE_&setcolorspace systemdict/setcolorspace get def
+ }if
+ /AGMCORE_currentbg currentblackgeneration def
+ /AGMCORE_currentucr currentundercolorremoval def
+ /AGMCORE_Default_flatness currentflat def
+ /AGMCORE_distilling
+ /product where{
+ pop systemdict/setdistillerparams known product(Adobe PostScript Parser)ne and
+ }{
+ false
+ }ifelse
+ def
+ /AGMCORE_GSTATE AGMCORE_key_known not{
+ /AGMCORE_GSTATE 21 dict def
+ /AGMCORE_tmpmatrix matrix def
+ /AGMCORE_gstack 32 array def
+ /AGMCORE_gstackptr 0 def
+ /AGMCORE_gstacksaveptr 0 def
+ /AGMCORE_gstackframekeys 14 def
+ /AGMCORE_&gsave/gsave ldf
+ /AGMCORE_&grestore/grestore ldf
+ /AGMCORE_&grestoreall/grestoreall ldf
+ /AGMCORE_&save/save ldf
+ /AGMCORE_&setoverprint/setoverprint ldf
+ /AGMCORE_gdictcopy{
+ begin
+ {def}forall
+ end
+ }def
+ /AGMCORE_gput{
+ AGMCORE_gstack AGMCORE_gstackptr get
+ 3 1 roll
+ put
+ }def
+ /AGMCORE_gget{
+ AGMCORE_gstack AGMCORE_gstackptr get
+ exch
+ get
+ }def
+ /gsave{
+ AGMCORE_&gsave
+ AGMCORE_gstack AGMCORE_gstackptr get
+ AGMCORE_gstackptr 1 add
+ dup 32 ge{limitcheck}if
+ /AGMCORE_gstackptr exch store
+ AGMCORE_gstack AGMCORE_gstackptr get
+ AGMCORE_gdictcopy
+ }def
+ /grestore{
+ AGMCORE_&grestore
+ AGMCORE_gstackptr 1 sub
+ dup AGMCORE_gstacksaveptr lt{1 add}if
+ dup AGMCORE_gstack exch get dup/AGMCORE_currentoverprint known
+ {/AGMCORE_currentoverprint get setoverprint}{pop}ifelse
+ /AGMCORE_gstackptr exch store
+ }def
+ /grestoreall{
+ AGMCORE_&grestoreall
+ /AGMCORE_gstackptr AGMCORE_gstacksaveptr store
+ }def
+ /save{
+ AGMCORE_&save
+ AGMCORE_gstack AGMCORE_gstackptr get
+ AGMCORE_gstackptr 1 add
+ dup 32 ge{limitcheck}if
+ /AGMCORE_gstackptr exch store
+ /AGMCORE_gstacksaveptr AGMCORE_gstackptr store
+ AGMCORE_gstack AGMCORE_gstackptr get
+ AGMCORE_gdictcopy
+ }def
+ /setoverprint{
+ dup/AGMCORE_currentoverprint exch AGMCORE_gput AGMCORE_&setoverprint
+ }def
+ 0 1 AGMCORE_gstack length 1 sub{
+ AGMCORE_gstack exch AGMCORE_gstackframekeys dict put
+ }for
+ }if
+ level3/AGMCORE_&sysshfill AGMCORE_key_known not and
+ {
+ /AGMCORE_&sysshfill systemdict/shfill get def
+ /AGMCORE_&sysmakepattern systemdict/makepattern get def
+ /AGMCORE_&usrmakepattern/makepattern load def
+ }if
+ /currentcmykcolor[0 0 0 0]AGMCORE_gput
+ /currentstrokeadjust false AGMCORE_gput
+ /currentcolorspace[/DeviceGray]AGMCORE_gput
+ /sep_tint 0 AGMCORE_gput
+ /devicen_tints[0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]AGMCORE_gput
+ /sep_colorspace_dict null AGMCORE_gput
+ /devicen_colorspace_dict null AGMCORE_gput
+ /indexed_colorspace_dict null AGMCORE_gput
+ /currentcolor_intent()AGMCORE_gput
+ /customcolor_tint 1 AGMCORE_gput
+ /absolute_colorimetric_crd null AGMCORE_gput
+ /relative_colorimetric_crd null AGMCORE_gput
+ /saturation_crd null AGMCORE_gput
+ /perceptual_crd null AGMCORE_gput
+ currentcolortransfer cvlit/AGMCore_gray_xfer xdf cvlit/AGMCore_b_xfer xdf
+ cvlit/AGMCore_g_xfer xdf cvlit/AGMCore_r_xfer xdf
+ <<
+ /MaxPatternItem currentsystemparams/MaxPatternCache get
+ >>
+ setuserparams
+ end
+}def
+/ps
+{
+ /setcmykcolor where{
+ pop
+ Adobe_AGM_Core/AGMCORE_&setcmykcolor/setcmykcolor load put
+ }if
+ Adobe_AGM_Core begin
+ /setcmykcolor
+ {
+ 4 copy AGMCORE_cmykbuf astore/currentcmykcolor exch AGMCORE_gput
+ 1 sub 4 1 roll
+ 3{
+ 3 index add neg dup 0 lt{
+ pop 0
+ }if
+ 3 1 roll
+ }repeat
+ setrgbcolor pop
+ }ndf
+ /currentcmykcolor
+ {
+ /currentcmykcolor AGMCORE_gget aload pop
+ }ndf
+ /setoverprint
+ {pop}ndf
+ /currentoverprint
+ {false}ndf
+ /AGMCORE_cyan_plate 1 0 0 0 test_cmyk_color_plate def
+ /AGMCORE_magenta_plate 0 1 0 0 test_cmyk_color_plate def
+ /AGMCORE_yellow_plate 0 0 1 0 test_cmyk_color_plate def
+ /AGMCORE_black_plate 0 0 0 1 test_cmyk_color_plate def
+ /AGMCORE_plate_ndx
+ AGMCORE_cyan_plate{
+ 0
+ }{
+ AGMCORE_magenta_plate{
+ 1
+ }{
+ AGMCORE_yellow_plate{
+ 2
+ }{
+ AGMCORE_black_plate{
+ 3
+ }{
+ 4
+ }ifelse
+ }ifelse
+ }ifelse
+ }ifelse
+ def
+ /AGMCORE_have_reported_unsupported_color_space false def
+ /AGMCORE_report_unsupported_color_space
+ {
+ AGMCORE_have_reported_unsupported_color_space false eq
+ {
+ (Warning: Job contains content that cannot be separated with on-host methods. This content appears on the black plate, and knocks out all other plates.)==
+ Adobe_AGM_Core/AGMCORE_have_reported_unsupported_color_space true ddf
+ }if
+ }def
+ /AGMCORE_composite_job
+ AGMCORE_cyan_plate AGMCORE_magenta_plate and AGMCORE_yellow_plate and AGMCORE_black_plate and def
+ /AGMCORE_in_rip_sep
+ /AGMCORE_in_rip_sep where{
+ pop AGMCORE_in_rip_sep
+ }{
+ AGMCORE_distilling
+ {
+ false
+ }{
+ userdict/Adobe_AGM_OnHost_Seps known{
+ false
+ }{
+ level2{
+ currentpagedevice/Separations 2 copy known{
+ get
+ }{
+ pop pop false
+ }ifelse
+ }{
+ false
+ }ifelse
+ }ifelse
+ }ifelse
+ }ifelse
+ def
+ /AGMCORE_producing_seps AGMCORE_composite_job not AGMCORE_in_rip_sep or def
+ /AGMCORE_host_sep AGMCORE_producing_seps AGMCORE_in_rip_sep not and def
+ /AGM_preserve_spots
+ /AGM_preserve_spots where{
+ pop AGM_preserve_spots
+ }{
+ AGMCORE_distilling AGMCORE_producing_seps or
+ }ifelse
+ def
+ /AGM_is_distiller_preserving_spotimages
+ {
+ currentdistillerparams/PreserveOverprintSettings known
+ {
+ currentdistillerparams/PreserveOverprintSettings get
+ {
+ currentdistillerparams/ColorConversionStrategy known
+ {
+ currentdistillerparams/ColorConversionStrategy get
+ /sRGB ne
+ }{
+ true
+ }ifelse
+ }{
+ false
+ }ifelse
+ }{
+ false
+ }ifelse
+ }def
+ /convert_spot_to_process where{pop}{
+ /convert_spot_to_process
+ {
+ //Adobe_AGM_Core begin
+ dup map_alias{
+ /Name get exch pop
+ }if
+ dup dup(None)eq exch(All)eq or
+ {
+ pop false
+ }{
+ AGMCORE_host_sep
+ {
+ gsave
+ 1 0 0 0 setcmykcolor currentgray 1 exch sub
+ 0 1 0 0 setcmykcolor currentgray 1 exch sub
+ 0 0 1 0 setcmykcolor currentgray 1 exch sub
+ 0 0 0 1 setcmykcolor currentgray 1 exch sub
+ add add add 0 eq
+ {
+ pop false
+ }{
+ false setoverprint
+ current_spot_alias false set_spot_alias
+ 1 1 1 1 6 -1 roll findcmykcustomcolor 1 setcustomcolor
+ set_spot_alias
+ currentgray 1 ne
+ }ifelse
+ grestore
+ }{
+ AGMCORE_distilling
+ {
+ pop AGM_is_distiller_preserving_spotimages not
+ }{
+ //Adobe_AGM_Core/AGMCORE_name xddf
+ false
+ //Adobe_AGM_Core/AGMCORE_pattern_paint_type get 0 eq
+ AGMUTIL_cpd/OverrideSeparations known and
+ {
+ AGMUTIL_cpd/OverrideSeparations get
+ {
+ /HqnSpots/ProcSet resourcestatus
+ {
+ pop pop pop true
+ }if
+ }if
+ }if
+ {
+ AGMCORE_name/HqnSpots/ProcSet findresource/TestSpot gx not
+ }{
+ gsave
+ [/Separation AGMCORE_name/DeviceGray{}]AGMCORE_&setcolorspace
+ false
+ AGMUTIL_cpd/SeparationColorNames 2 copy known
+ {
+ get
+ {AGMCORE_name eq or}forall
+ not
+ }{
+ pop pop pop true
+ }ifelse
+ grestore
+ }ifelse
+ }ifelse
+ }ifelse
+ }ifelse
+ end
+ }def
+ }ifelse
+ /convert_to_process where{pop}{
+ /convert_to_process
+ {
+ dup length 0 eq
+ {
+ pop false
+ }{
+ AGMCORE_host_sep
+ {
+ dup true exch
+ {
+ dup(Cyan)eq exch
+ dup(Magenta)eq 3 -1 roll or exch
+ dup(Yellow)eq 3 -1 roll or exch
+ dup(Black)eq 3 -1 roll or
+ {pop}
+ {convert_spot_to_process and}ifelse
+ }
+ forall
+ {
+ true exch
+ {
+ dup(Cyan)eq exch
+ dup(Magenta)eq 3 -1 roll or exch
+ dup(Yellow)eq 3 -1 roll or exch
+ (Black)eq or and
+ }forall
+ not
+ }{pop false}ifelse
+ }{
+ false exch
+ {
+ /PhotoshopDuotoneList where{pop false}{true}ifelse
+ {
+ dup(Cyan)eq exch
+ dup(Magenta)eq 3 -1 roll or exch
+ dup(Yellow)eq 3 -1 roll or exch
+ dup(Black)eq 3 -1 roll or
+ {pop}
+ {convert_spot_to_process or}ifelse
+ }
+ {
+ convert_spot_to_process or
+ }
+ ifelse
+ }
+ forall
+ }ifelse
+ }ifelse
+ }def
+ }ifelse
+ /AGMCORE_avoid_L2_sep_space
+ version cvr 2012 lt
+ level2 and
+ AGMCORE_producing_seps not and
+ def
+ /AGMCORE_is_cmyk_sep
+ AGMCORE_cyan_plate AGMCORE_magenta_plate or AGMCORE_yellow_plate or AGMCORE_black_plate or
+ def
+ /AGM_avoid_0_cmyk where{
+ pop AGM_avoid_0_cmyk
+ }{
+ AGM_preserve_spots
+ userdict/Adobe_AGM_OnHost_Seps known
+ userdict/Adobe_AGM_InRip_Seps known or
+ not and
+ }ifelse
+ {
+ /setcmykcolor[
+ {
+ 4 copy add add add 0 eq currentoverprint and{
+ pop 0.0005
+ }if
+ }/exec cvx
+ /AGMCORE_&setcmykcolor load dup type/operatortype ne{
+ /exec cvx
+ }if
+ ]cvx def
+ }if
+ /AGMCORE_IsSeparationAProcessColor
+ {
+ dup(Cyan)eq exch dup(Magenta)eq exch dup(Yellow)eq exch(Black)eq or or or
+ }def
+ AGMCORE_host_sep{
+ /setcolortransfer
+ {
+ AGMCORE_cyan_plate{
+ pop pop pop
+ }{
+ AGMCORE_magenta_plate{
+ 4 3 roll pop pop pop
+ }{
+ AGMCORE_yellow_plate{
+ 4 2 roll pop pop pop
+ }{
+ 4 1 roll pop pop pop
+ }ifelse
+ }ifelse
+ }ifelse
+ settransfer
+ }
+ def
+ /AGMCORE_get_ink_data
+ AGMCORE_cyan_plate{
+ {pop pop pop}
+ }{
+ AGMCORE_magenta_plate{
+ {4 3 roll pop pop pop}
+ }{
+ AGMCORE_yellow_plate{
+ {4 2 roll pop pop pop}
+ }{
+ {4 1 roll pop pop pop}
+ }ifelse
+ }ifelse
+ }ifelse
+ def
+ /AGMCORE_RemoveProcessColorNames
+ {
+ 1 dict begin
+ /filtername
+ {
+ dup/Cyan eq 1 index(Cyan)eq or
+ {pop(_cyan_)}if
+ dup/Magenta eq 1 index(Magenta)eq or
+ {pop(_magenta_)}if
+ dup/Yellow eq 1 index(Yellow)eq or
+ {pop(_yellow_)}if
+ dup/Black eq 1 index(Black)eq or
+ {pop(_black_)}if
+ }def
+ dup type/arraytype eq
+ {[exch{filtername}forall]}
+ {filtername}ifelse
+ end
+ }def
+ level3{
+ /AGMCORE_IsCurrentColor
+ {
+ dup AGMCORE_IsSeparationAProcessColor
+ {
+ AGMCORE_plate_ndx 0 eq
+ {dup(Cyan)eq exch/Cyan eq or}if
+ AGMCORE_plate_ndx 1 eq
+ {dup(Magenta)eq exch/Magenta eq or}if
+ AGMCORE_plate_ndx 2 eq
+ {dup(Yellow)eq exch/Yellow eq or}if
+ AGMCORE_plate_ndx 3 eq
+ {dup(Black)eq exch/Black eq or}if
+ AGMCORE_plate_ndx 4 eq
+ {pop false}if
+ }{
+ gsave
+ false setoverprint
+ current_spot_alias false set_spot_alias
+ 1 1 1 1 6 -1 roll findcmykcustomcolor 1 setcustomcolor
+ set_spot_alias
+ currentgray 1 ne
+ grestore
+ }ifelse
+ }def
+ /AGMCORE_filter_functiondatasource
+ {
+ 5 dict begin
+ /data_in xdf
+ data_in type/stringtype eq
+ {
+ /ncomp xdf
+ /comp xdf
+ /string_out data_in length ncomp idiv string def
+ 0 ncomp data_in length 1 sub
+ {
+ string_out exch dup ncomp idiv exch data_in exch ncomp getinterval comp get 255 exch sub put
+ }for
+ string_out
+ }{
+ string/string_in xdf
+ /string_out 1 string def
+ /component xdf
+ [
+ data_in string_in/readstring cvx
+ [component/get cvx 255/exch cvx/sub cvx string_out/exch cvx 0/exch cvx/put cvx string_out]cvx
+ [/pop cvx()]cvx/ifelse cvx
+ ]cvx/ReusableStreamDecode filter
+ }ifelse
+ end
+ }def
+ /AGMCORE_separateShadingFunction
+ {
+ 2 dict begin
+ /paint? xdf
+ /channel xdf
+ dup type/dicttype eq
+ {
+ begin
+ FunctionType 0 eq
+ {
+ /DataSource channel Range length 2 idiv DataSource AGMCORE_filter_functiondatasource def
+ currentdict/Decode known
+ {/Decode Decode channel 2 mul 2 getinterval def}if
+ paint? not
+ {/Decode[1 1]def}if
+ }if
+ FunctionType 2 eq
+ {
+ paint?
+ {
+ /C0[C0 channel get 1 exch sub]def
+ /C1[C1 channel get 1 exch sub]def
+ }{
+ /C0[1]def
+ /C1[1]def
+ }ifelse
+ }if
+ FunctionType 3 eq
+ {
+ /Functions[Functions{channel paint? AGMCORE_separateShadingFunction}forall]def
+ }if
+ currentdict/Range known
+ {/Range[0 1]def}if
+ currentdict
+ end}{
+ channel get 0 paint? AGMCORE_separateShadingFunction
+ }ifelse
+ end
+ }def
+ /AGMCORE_separateShading
+ {
+ 3 -1 roll begin
+ currentdict/Function known
+ {
+ currentdict/Background known
+ {[1 index{Background 3 index get 1 exch sub}{1}ifelse]/Background xdf}if
+ Function 3 1 roll AGMCORE_separateShadingFunction/Function xdf
+ /ColorSpace[/DeviceGray]def
+ }{
+ ColorSpace dup type/arraytype eq{0 get}if/DeviceCMYK eq
+ {
+ /ColorSpace[/DeviceN[/_cyan_/_magenta_/_yellow_/_black_]/DeviceCMYK{}]def
+ }{
+ ColorSpace dup 1 get AGMCORE_RemoveProcessColorNames 1 exch put
+ }ifelse
+ ColorSpace 0 get/Separation eq
+ {
+ {
+ [1/exch cvx/sub cvx]cvx
+ }{
+ [/pop cvx 1]cvx
+ }ifelse
+ ColorSpace 3 3 -1 roll put
+ pop
+ }{
+ {
+ [exch ColorSpace 1 get length 1 sub exch sub/index cvx 1/exch cvx/sub cvx ColorSpace 1 get length 1 add 1/roll cvx ColorSpace 1 get length{/pop cvx}repeat]cvx
+ }{
+ pop[ColorSpace 1 get length{/pop cvx}repeat cvx 1]cvx
+ }ifelse
+ ColorSpace 3 3 -1 roll bind put
+ }ifelse
+ ColorSpace 2/DeviceGray put
+ }ifelse
+ end
+ }def
+ /AGMCORE_separateShadingDict
+ {
+ dup/ColorSpace get
+ dup type/arraytype ne
+ {[exch]}if
+ dup 0 get/DeviceCMYK eq
+ {
+ exch begin
+ currentdict
+ AGMCORE_cyan_plate
+ {0 true}if
+ AGMCORE_magenta_plate
+ {1 true}if
+ AGMCORE_yellow_plate
+ {2 true}if
+ AGMCORE_black_plate
+ {3 true}if
+ AGMCORE_plate_ndx 4 eq
+ {0 false}if
+ dup not currentoverprint and
+ {/AGMCORE_ignoreshade true def}if
+ AGMCORE_separateShading
+ currentdict
+ end exch
+ }if
+ dup 0 get/Separation eq
+ {
+ exch begin
+ ColorSpace 1 get dup/None ne exch/All ne and
+ {
+ ColorSpace 1 get AGMCORE_IsCurrentColor AGMCORE_plate_ndx 4 lt and ColorSpace 1 get AGMCORE_IsSeparationAProcessColor not and
+ {
+ ColorSpace 2 get dup type/arraytype eq{0 get}if/DeviceCMYK eq
+ {
+ /ColorSpace
+ [
+ /Separation
+ ColorSpace 1 get
+ /DeviceGray
+ [
+ ColorSpace 3 get/exec cvx
+ 4 AGMCORE_plate_ndx sub -1/roll cvx
+ 4 1/roll cvx
+ 3[/pop cvx]cvx/repeat cvx
+ 1/exch cvx/sub cvx
+ ]cvx
+ ]def
+ }{
+ AGMCORE_report_unsupported_color_space
+ AGMCORE_black_plate not
+ {
+ currentdict 0 false AGMCORE_separateShading
+ }if
+ }ifelse
+ }{
+ currentdict ColorSpace 1 get AGMCORE_IsCurrentColor
+ 0 exch
+ dup not currentoverprint and
+ {/AGMCORE_ignoreshade true def}if
+ AGMCORE_separateShading
+ }ifelse
+ }if
+ currentdict
+ end exch
+ }if
+ dup 0 get/DeviceN eq
+ {
+ exch begin
+ ColorSpace 1 get convert_to_process
+ {
+ ColorSpace 2 get dup type/arraytype eq{0 get}if/DeviceCMYK eq
+ {
+ /ColorSpace
+ [
+ /DeviceN
+ ColorSpace 1 get
+ /DeviceGray
+ [
+ ColorSpace 3 get/exec cvx
+ 4 AGMCORE_plate_ndx sub -1/roll cvx
+ 4 1/roll cvx
+ 3[/pop cvx]cvx/repeat cvx
+ 1/exch cvx/sub cvx
+ ]cvx
+ ]def
+ }{
+ AGMCORE_report_unsupported_color_space
+ AGMCORE_black_plate not
+ {
+ currentdict 0 false AGMCORE_separateShading
+ /ColorSpace[/DeviceGray]def
+ }if
+ }ifelse
+ }{
+ currentdict
+ false -1 ColorSpace 1 get
+ {
+ AGMCORE_IsCurrentColor
+ {
+ 1 add
+ exch pop true exch exit
+ }if
+ 1 add
+ }forall
+ exch
+ dup not currentoverprint and
+ {/AGMCORE_ignoreshade true def}if
+ AGMCORE_separateShading
+ }ifelse
+ currentdict
+ end exch
+ }if
+ dup 0 get dup/DeviceCMYK eq exch dup/Separation eq exch/DeviceN eq or or not
+ {
+ exch begin
+ ColorSpace dup type/arraytype eq
+ {0 get}if
+ /DeviceGray ne
+ {
+ AGMCORE_report_unsupported_color_space
+ AGMCORE_black_plate not
+ {
+ ColorSpace 0 get/CIEBasedA eq
+ {
+ /ColorSpace[/Separation/_ciebaseda_/DeviceGray{}]def
+ }if
+ ColorSpace 0 get dup/CIEBasedABC eq exch dup/CIEBasedDEF eq exch/DeviceRGB eq or or
+ {
+ /ColorSpace[/DeviceN[/_red_/_green_/_blue_]/DeviceRGB{}]def
+ }if
+ ColorSpace 0 get/CIEBasedDEFG eq
+ {
+ /ColorSpace[/DeviceN[/_cyan_/_magenta_/_yellow_/_black_]/DeviceCMYK{}]def
+ }if
+ currentdict 0 false AGMCORE_separateShading
+ }if
+ }if
+ currentdict
+ end exch
+ }if
+ pop
+ dup/AGMCORE_ignoreshade known
+ {
+ begin
+ /ColorSpace[/Separation(None)/DeviceGray{}]def
+ currentdict end
+ }if
+ }def
+ /shfill
+ {
+ AGMCORE_separateShadingDict
+ dup/AGMCORE_ignoreshade known
+ {pop}
+ {AGMCORE_&sysshfill}ifelse
+ }def
+ /makepattern
+ {
+ exch
+ dup/PatternType get 2 eq
+ {
+ clonedict
+ begin
+ /Shading Shading AGMCORE_separateShadingDict def
+ Shading/AGMCORE_ignoreshade known
+ currentdict end exch
+ {pop<>}if
+ exch AGMCORE_&sysmakepattern
+ }{
+ exch AGMCORE_&usrmakepattern
+ }ifelse
+ }def
+ }if
+ }if
+ AGMCORE_in_rip_sep{
+ /setcustomcolor
+ {
+ exch aload pop
+ dup 7 1 roll inRip_spot_has_ink not {
+ 4{4 index mul 4 1 roll}
+ repeat
+ /DeviceCMYK setcolorspace
+ 6 -2 roll pop pop
+ }{
+ //Adobe_AGM_Core begin
+ /AGMCORE_k xdf/AGMCORE_y xdf/AGMCORE_m xdf/AGMCORE_c xdf
+ end
+ [/Separation 4 -1 roll/DeviceCMYK
+ {dup AGMCORE_c mul exch dup AGMCORE_m mul exch dup AGMCORE_y mul exch AGMCORE_k mul}
+ ]
+ setcolorspace
+ }ifelse
+ setcolor
+ }ndf
+ /setseparationgray
+ {
+ [/Separation(All)/DeviceGray{}]setcolorspace_opt
+ 1 exch sub setcolor
+ }ndf
+ }{
+ /setseparationgray
+ {
+ AGMCORE_&setgray
+ }ndf
+ }ifelse
+ /findcmykcustomcolor
+ {
+ 5 makereadonlyarray
+ }ndf
+ /setcustomcolor
+ {
+ exch aload pop pop
+ 4{4 index mul 4 1 roll}repeat
+ setcmykcolor pop
+ }ndf
+ /has_color
+ /colorimage where{
+ AGMCORE_producing_seps{
+ pop true
+ }{
+ systemdict eq
+ }ifelse
+ }{
+ false
+ }ifelse
+ def
+ /map_index
+ {
+ 1 index mul exch getinterval{255 div}forall
+ }bdf
+ /map_indexed_devn
+ {
+ Lookup Names length 3 -1 roll cvi map_index
+ }bdf
+ /n_color_components
+ {
+ base_colorspace_type
+ dup/DeviceGray eq{
+ pop 1
+ }{
+ /DeviceCMYK eq{
+ 4
+ }{
+ 3
+ }ifelse
+ }ifelse
+ }bdf
+ level2{
+ /mo/moveto ldf
+ /li/lineto ldf
+ /cv/curveto ldf
+ /knockout_unitsq
+ {
+ 1 setgray
+ 0 0 1 1 rectfill
+ }def
+ level2/setcolorspace AGMCORE_key_known not and{
+ /AGMCORE_&&&setcolorspace/setcolorspace ldf
+ /AGMCORE_ReplaceMappedColor
+ {
+ dup type dup/arraytype eq exch/packedarraytype eq or
+ {
+ /AGMCORE_SpotAliasAry2 where{
+ begin
+ dup 0 get dup/Separation eq
+ {
+ pop
+ dup length array copy
+ dup dup 1 get
+ current_spot_alias
+ {
+ dup map_alias
+ {
+ false set_spot_alias
+ dup 1 exch setsepcolorspace
+ true set_spot_alias
+ begin
+ /sep_colorspace_dict currentdict AGMCORE_gput
+ pop pop pop
+ [
+ /Separation Name
+ CSA map_csa
+ MappedCSA
+ /sep_colorspace_proc load
+ ]
+ dup Name
+ end
+ }if
+ }if
+ map_reserved_ink_name 1 xpt
+ }{
+ /DeviceN eq
+ {
+ dup length array copy
+ dup dup 1 get[
+ exch{
+ current_spot_alias{
+ dup map_alias{
+ /Name get exch pop
+ }if
+ }if
+ map_reserved_ink_name
+ }forall
+ ]1 xpt
+ }if
+ }ifelse
+ end
+ }if
+ }if
+ }def
+ /setcolorspace
+ {
+ dup type dup/arraytype eq exch/packedarraytype eq or
+ {
+ dup 0 get/Indexed eq
+ {
+ AGMCORE_distilling
+ {
+ /PhotoshopDuotoneList where
+ {
+ pop false
+ }{
+ true
+ }ifelse
+ }{
+ true
+ }ifelse
+ {
+ aload pop 3 -1 roll
+ AGMCORE_ReplaceMappedColor
+ 3 1 roll 4 array astore
+ }if
+ }{
+ AGMCORE_ReplaceMappedColor
+ }ifelse
+ }if
+ DeviceN_PS2_inRip_seps{AGMCORE_&&&setcolorspace}if
+ }def
+ }if
+ }{
+ /adj
+ {
+ currentstrokeadjust{
+ transform
+ 0.25 sub round 0.25 add exch
+ 0.25 sub round 0.25 add exch
+ itransform
+ }if
+ }def
+ /mo{
+ adj moveto
+ }def
+ /li{
+ adj lineto
+ }def
+ /cv{
+ 6 2 roll adj
+ 6 2 roll adj
+ 6 2 roll adj curveto
+ }def
+ /knockout_unitsq
+ {
+ 1 setgray
+ 8 8 1[8 0 0 8 0 0]{}image
+ }def
+ /currentstrokeadjust{
+ /currentstrokeadjust AGMCORE_gget
+ }def
+ /setstrokeadjust{
+ /currentstrokeadjust exch AGMCORE_gput
+ }def
+ /setcolorspace
+ {
+ /currentcolorspace exch AGMCORE_gput
+ }def
+ /currentcolorspace
+ {
+ /currentcolorspace AGMCORE_gget
+ }def
+ /setcolor_devicecolor
+ {
+ base_colorspace_type
+ dup/DeviceGray eq{
+ pop setgray
+ }{
+ /DeviceCMYK eq{
+ setcmykcolor
+ }{
+ setrgbcolor
+ }ifelse
+ }ifelse
+ }def
+ /setcolor
+ {
+ currentcolorspace 0 get
+ dup/DeviceGray ne{
+ dup/DeviceCMYK ne{
+ dup/DeviceRGB ne{
+ dup/Separation eq{
+ pop
+ currentcolorspace 3 gx
+ currentcolorspace 2 get
+ }{
+ dup/Indexed eq{
+ pop
+ currentcolorspace 3 get dup type/stringtype eq{
+ currentcolorspace 1 get n_color_components
+ 3 -1 roll map_index
+ }{
+ exec
+ }ifelse
+ currentcolorspace 1 get
+ }{
+ /AGMCORE_cur_err/AGMCORE_invalid_color_space def
+ AGMCORE_invalid_color_space
+ }ifelse
+ }ifelse
+ }if
+ }if
+ }if
+ setcolor_devicecolor
+ }def
+ }ifelse
+ /sop/setoverprint ldf
+ /lw/setlinewidth ldf
+ /lc/setlinecap ldf
+ /lj/setlinejoin ldf
+ /ml/setmiterlimit ldf
+ /dsh/setdash ldf
+ /sadj/setstrokeadjust ldf
+ /gry/setgray ldf
+ /rgb/setrgbcolor ldf
+ /cmyk[
+ /currentcolorspace[/DeviceCMYK]/AGMCORE_gput cvx
+ /setcmykcolor load dup type/operatortype ne{/exec cvx}if
+ ]cvx bdf
+ level3 AGMCORE_host_sep not and{
+ /nzopmsc{
+ 6 dict begin
+ /kk exch def
+ /yy exch def
+ /mm exch def
+ /cc exch def
+ /sum 0 def
+ cc 0 ne{/sum sum 2#1000 or def cc}if
+ mm 0 ne{/sum sum 2#0100 or def mm}if
+ yy 0 ne{/sum sum 2#0010 or def yy}if
+ kk 0 ne{/sum sum 2#0001 or def kk}if
+ AGMCORE_CMYKDeviceNColorspaces sum get setcolorspace
+ sum 0 eq{0}if
+ end
+ setcolor
+ }bdf
+ }{
+ /nzopmsc/cmyk ldf
+ }ifelse
+ /sep/setsepcolor ldf
+ /devn/setdevicencolor ldf
+ /idx/setindexedcolor ldf
+ /colr/setcolor ldf
+ /csacrd/set_csa_crd ldf
+ /sepcs/setsepcolorspace ldf
+ /devncs/setdevicencolorspace ldf
+ /idxcs/setindexedcolorspace ldf
+ /cp/closepath ldf
+ /clp/clp_npth ldf
+ /eclp/eoclp_npth ldf
+ /f/fill ldf
+ /ef/eofill ldf
+ /@/stroke ldf
+ /nclp/npth_clp ldf
+ /gset/graphic_setup ldf
+ /gcln/graphic_cleanup ldf
+ /ct/concat ldf
+ /cf/currentfile ldf
+ /fl/filter ldf
+ /rs/readstring ldf
+ /AGMCORE_def_ht currenthalftone def
+ /clonedict Adobe_AGM_Utils begin/clonedict load end def
+ /clonearray Adobe_AGM_Utils begin/clonearray load end def
+ currentdict{
+ dup xcheck 1 index type dup/arraytype eq exch/packedarraytype eq or and{
+ bind
+ }if
+ def
+ }forall
+ /getrampcolor
+ {
+ /indx exch def
+ 0 1 NumComp 1 sub
+ {
+ dup
+ Samples exch get
+ dup type/stringtype eq{indx get}if
+ exch
+ Scaling exch get aload pop
+ 3 1 roll
+ mul add
+ }for
+ ColorSpaceFamily/Separation eq
+ {sep}
+ {
+ ColorSpaceFamily/DeviceN eq
+ {devn}{setcolor}ifelse
+ }ifelse
+ }bdf
+ /sssetbackground{
+ aload pop
+ ColorSpaceFamily/Separation eq
+ {sep}
+ {
+ ColorSpaceFamily/DeviceN eq
+ {devn}{setcolor}ifelse
+ }ifelse
+ }bdf
+ /RadialShade
+ {
+ 40 dict begin
+ /ColorSpaceFamily xdf
+ /background xdf
+ /ext1 xdf
+ /ext0 xdf
+ /BBox xdf
+ /r2 xdf
+ /c2y xdf
+ /c2x xdf
+ /r1 xdf
+ /c1y xdf
+ /c1x xdf
+ /rampdict xdf
+ /setinkoverprint where{pop/setinkoverprint{pop}def}if
+ gsave
+ BBox length 0 gt
+ {
+ np
+ BBox 0 get BBox 1 get moveto
+ BBox 2 get BBox 0 get sub 0 rlineto
+ 0 BBox 3 get BBox 1 get sub rlineto
+ BBox 2 get BBox 0 get sub neg 0 rlineto
+ closepath
+ clip
+ np
+ }if
+ c1x c2x eq
+ {
+ c1y c2y lt{/theta 90 def}{/theta 270 def}ifelse
+ }{
+ /slope c2y c1y sub c2x c1x sub div def
+ /theta slope 1 atan def
+ c2x c1x lt c2y c1y ge and{/theta theta 180 sub def}if
+ c2x c1x lt c2y c1y lt and{/theta theta 180 add def}if
+ }ifelse
+ gsave
+ clippath
+ c1x c1y translate
+ theta rotate
+ -90 rotate
+ {pathbbox}stopped
+ {0 0 0 0}if
+ /yMax xdf
+ /xMax xdf
+ /yMin xdf
+ /xMin xdf
+ grestore
+ xMax xMin eq yMax yMin eq or
+ {
+ grestore
+ end
+ }{
+ /max{2 copy gt{pop}{exch pop}ifelse}bdf
+ /min{2 copy lt{pop}{exch pop}ifelse}bdf
+ rampdict begin
+ 40 dict begin
+ background length 0 gt{background sssetbackground gsave clippath fill grestore}if
+ gsave
+ c1x c1y translate
+ theta rotate
+ -90 rotate
+ /c2y c1x c2x sub dup mul c1y c2y sub dup mul add sqrt def
+ /c1y 0 def
+ /c1x 0 def
+ /c2x 0 def
+ ext0
+ {
+ 0 getrampcolor
+ c2y r2 add r1 sub 0.0001 lt
+ {
+ c1x c1y r1 360 0 arcn
+ pathbbox
+ /aymax exch def
+ /axmax exch def
+ /aymin exch def
+ /axmin exch def
+ /bxMin xMin axmin min def
+ /byMin yMin aymin min def
+ /bxMax xMax axmax max def
+ /byMax yMax aymax max def
+ bxMin byMin moveto
+ bxMax byMin lineto
+ bxMax byMax lineto
+ bxMin byMax lineto
+ bxMin byMin lineto
+ eofill
+ }{
+ c2y r1 add r2 le
+ {
+ c1x c1y r1 0 360 arc
+ fill
+ }
+ {
+ c2x c2y r2 0 360 arc fill
+ r1 r2 eq
+ {
+ /p1x r1 neg def
+ /p1y c1y def
+ /p2x r1 def
+ /p2y c1y def
+ p1x p1y moveto p2x p2y lineto p2x yMin lineto p1x yMin lineto
+ fill
+ }{
+ /AA r2 r1 sub c2y div def
+ AA -1 eq
+ {/theta 89.99 def}
+ {/theta AA 1 AA dup mul sub sqrt div 1 atan def}
+ ifelse
+ /SS1 90 theta add dup sin exch cos div def
+ /p1x r1 SS1 SS1 mul SS1 SS1 mul 1 add div sqrt mul neg def
+ /p1y p1x SS1 div neg def
+ /SS2 90 theta sub dup sin exch cos div def
+ /p2x r1 SS2 SS2 mul SS2 SS2 mul 1 add div sqrt mul def
+ /p2y p2x SS2 div neg def
+ r1 r2 gt
+ {
+ /L1maxX p1x yMin p1y sub SS1 div add def
+ /L2maxX p2x yMin p2y sub SS2 div add def
+ }{
+ /L1maxX 0 def
+ /L2maxX 0 def
+ }ifelse
+ p1x p1y moveto p2x p2y lineto L2maxX L2maxX p2x sub SS2 mul p2y add lineto
+ L1maxX L1maxX p1x sub SS1 mul p1y add lineto
+ fill
+ }ifelse
+ }ifelse
+ }ifelse
+ }if
+ c1x c2x sub dup mul
+ c1y c2y sub dup mul
+ add 0.5 exp
+ 0 dtransform
+ dup mul exch dup mul add 0.5 exp 72 div
+ 0 72 matrix defaultmatrix dtransform dup mul exch dup mul add sqrt
+ 72 0 matrix defaultmatrix dtransform dup mul exch dup mul add sqrt
+ 1 index 1 index lt{exch}if pop
+ /hires xdf
+ hires mul
+ /numpix xdf
+ /numsteps NumSamples def
+ /rampIndxInc 1 def
+ /subsampling false def
+ numpix 0 ne
+ {
+ NumSamples numpix div 0.5 gt
+ {
+ /numsteps numpix 2 div round cvi dup 1 le{pop 2}if def
+ /rampIndxInc NumSamples 1 sub numsteps div def
+ /subsampling true def
+ }if
+ }if
+ /xInc c2x c1x sub numsteps div def
+ /yInc c2y c1y sub numsteps div def
+ /rInc r2 r1 sub numsteps div def
+ /cx c1x def
+ /cy c1y def
+ /radius r1 def
+ np
+ xInc 0 eq yInc 0 eq rInc 0 eq and and
+ {
+ 0 getrampcolor
+ cx cy radius 0 360 arc
+ stroke
+ NumSamples 1 sub getrampcolor
+ cx cy radius 72 hires div add 0 360 arc
+ 0 setlinewidth
+ stroke
+ }{
+ 0
+ numsteps
+ {
+ dup
+ subsampling{round cvi}if
+ getrampcolor
+ cx cy radius 0 360 arc
+ /cx cx xInc add def
+ /cy cy yInc add def
+ /radius radius rInc add def
+ cx cy radius 360 0 arcn
+ eofill
+ rampIndxInc add
+ }repeat
+ pop
+ }ifelse
+ ext1
+ {
+ c2y r2 add r1 lt
+ {
+ c2x c2y r2 0 360 arc
+ fill
+ }{
+ c2y r1 add r2 sub 0.0001 le
+ {
+ c2x c2y r2 360 0 arcn
+ pathbbox
+ /aymax exch def
+ /axmax exch def
+ /aymin exch def
+ /axmin exch def
+ /bxMin xMin axmin min def
+ /byMin yMin aymin min def
+ /bxMax xMax axmax max def
+ /byMax yMax aymax max def
+ bxMin byMin moveto
+ bxMax byMin lineto
+ bxMax byMax lineto
+ bxMin byMax lineto
+ bxMin byMin lineto
+ eofill
+ }{
+ c2x c2y r2 0 360 arc fill
+ r1 r2 eq
+ {
+ /p1x r2 neg def
+ /p1y c2y def
+ /p2x r2 def
+ /p2y c2y def
+ p1x p1y moveto p2x p2y lineto p2x yMax lineto p1x yMax lineto
+ fill
+ }{
+ /AA r2 r1 sub c2y div def
+ AA -1 eq
+ {/theta 89.99 def}
+ {/theta AA 1 AA dup mul sub sqrt div 1 atan def}
+ ifelse
+ /SS1 90 theta add dup sin exch cos div def
+ /p1x r2 SS1 SS1 mul SS1 SS1 mul 1 add div sqrt mul neg def
+ /p1y c2y p1x SS1 div sub def
+ /SS2 90 theta sub dup sin exch cos div def
+ /p2x r2 SS2 SS2 mul SS2 SS2 mul 1 add div sqrt mul def
+ /p2y c2y p2x SS2 div sub def
+ r1 r2 lt
+ {
+ /L1maxX p1x yMax p1y sub SS1 div add def
+ /L2maxX p2x yMax p2y sub SS2 div add def
+ }{
+ /L1maxX 0 def
+ /L2maxX 0 def
+ }ifelse
+ p1x p1y moveto p2x p2y lineto L2maxX L2maxX p2x sub SS2 mul p2y add lineto
+ L1maxX L1maxX p1x sub SS1 mul p1y add lineto
+ fill
+ }ifelse
+ }ifelse
+ }ifelse
+ }if
+ grestore
+ grestore
+ end
+ end
+ end
+ }ifelse
+ }bdf
+ /GenStrips
+ {
+ 40 dict begin
+ /ColorSpaceFamily xdf
+ /background xdf
+ /ext1 xdf
+ /ext0 xdf
+ /BBox xdf
+ /y2 xdf
+ /x2 xdf
+ /y1 xdf
+ /x1 xdf
+ /rampdict xdf
+ /setinkoverprint where{pop/setinkoverprint{pop}def}if
+ gsave
+ BBox length 0 gt
+ {
+ np
+ BBox 0 get BBox 1 get moveto
+ BBox 2 get BBox 0 get sub 0 rlineto
+ 0 BBox 3 get BBox 1 get sub rlineto
+ BBox 2 get BBox 0 get sub neg 0 rlineto
+ closepath
+ clip
+ np
+ }if
+ x1 x2 eq
+ {
+ y1 y2 lt{/theta 90 def}{/theta 270 def}ifelse
+ }{
+ /slope y2 y1 sub x2 x1 sub div def
+ /theta slope 1 atan def
+ x2 x1 lt y2 y1 ge and{/theta theta 180 sub def}if
+ x2 x1 lt y2 y1 lt and{/theta theta 180 add def}if
+ }
+ ifelse
+ gsave
+ clippath
+ x1 y1 translate
+ theta rotate
+ {pathbbox}stopped
+ {0 0 0 0}if
+ /yMax exch def
+ /xMax exch def
+ /yMin exch def
+ /xMin exch def
+ grestore
+ xMax xMin eq yMax yMin eq or
+ {
+ grestore
+ end
+ }{
+ rampdict begin
+ 20 dict begin
+ background length 0 gt{background sssetbackground gsave clippath fill grestore}if
+ gsave
+ x1 y1 translate
+ theta rotate
+ /xStart 0 def
+ /xEnd x2 x1 sub dup mul y2 y1 sub dup mul add 0.5 exp def
+ /ySpan yMax yMin sub def
+ /numsteps NumSamples def
+ /rampIndxInc 1 def
+ /subsampling false def
+ xStart 0 transform
+ xEnd 0 transform
+ 3 -1 roll
+ sub dup mul
+ 3 1 roll
+ sub dup mul
+ add 0.5 exp 72 div
+ 0 72 matrix defaultmatrix dtransform dup mul exch dup mul add sqrt
+ 72 0 matrix defaultmatrix dtransform dup mul exch dup mul add sqrt
+ 1 index 1 index lt{exch}if pop
+ mul
+ /numpix xdf
+ numpix 0 ne
+ {
+ NumSamples numpix div 0.5 gt
+ {
+ /numsteps numpix 2 div round cvi dup 1 le{pop 2}if def
+ /rampIndxInc NumSamples 1 sub numsteps div def
+ /subsampling true def
+ }if
+ }if
+ ext0
+ {
+ 0 getrampcolor
+ xMin xStart lt
+ {
+ xMin yMin xMin neg ySpan rectfill
+ }if
+ }if
+ /xInc xEnd xStart sub numsteps div def
+ /x xStart def
+ 0
+ numsteps
+ {
+ dup
+ subsampling{round cvi}if
+ getrampcolor
+ x yMin xInc ySpan rectfill
+ /x x xInc add def
+ rampIndxInc add
+ }repeat
+ pop
+ ext1{
+ xMax xEnd gt
+ {
+ xEnd yMin xMax xEnd sub ySpan rectfill
+ }if
+ }if
+ grestore
+ grestore
+ end
+ end
+ end
+ }ifelse
+ }bdf
+}def
+/pt
+{
+ end
+}def
+/dt{
+}def
+/pgsv{
+ //Adobe_AGM_Core/AGMCORE_save save put
+}def
+/pgrs{
+ //Adobe_AGM_Core/AGMCORE_save get restore
+}def
+systemdict/findcolorrendering known{
+ /findcolorrendering systemdict/findcolorrendering get def
+}if
+systemdict/setcolorrendering known{
+ /setcolorrendering systemdict/setcolorrendering get def
+}if
+/test_cmyk_color_plate
+{
+ gsave
+ setcmykcolor currentgray 1 ne
+ grestore
+}def
+/inRip_spot_has_ink
+{
+ dup//Adobe_AGM_Core/AGMCORE_name xddf
+ convert_spot_to_process not
+}def
+/map255_to_range
+{
+ 1 index sub
+ 3 -1 roll 255 div mul add
+}def
+/set_csa_crd
+{
+ /sep_colorspace_dict null AGMCORE_gput
+ begin
+ CSA get_csa_by_name setcolorspace_opt
+ set_crd
+ end
+}
+def
+/map_csa
+{
+ currentdict/MappedCSA known{MappedCSA null ne}{false}ifelse
+ {pop}{get_csa_by_name/MappedCSA xdf}ifelse
+}def
+/setsepcolor
+{
+ /sep_colorspace_dict AGMCORE_gget begin
+ dup/sep_tint exch AGMCORE_gput
+ TintProc
+ end
+}def
+/setdevicencolor
+{
+ /devicen_colorspace_dict AGMCORE_gget begin
+ Names length copy
+ Names length 1 sub -1 0
+ {
+ /devicen_tints AGMCORE_gget 3 1 roll xpt
+ }for
+ TintProc
+ end
+}def
+/sep_colorspace_proc
+{
+ /AGMCORE_tmp exch store
+ /sep_colorspace_dict AGMCORE_gget begin
+ currentdict/Components known{
+ Components aload pop
+ TintMethod/Lab eq{
+ 2{AGMCORE_tmp mul NComponents 1 roll}repeat
+ LMax sub AGMCORE_tmp mul LMax add NComponents 1 roll
+ }{
+ TintMethod/Subtractive eq{
+ NComponents{
+ AGMCORE_tmp mul NComponents 1 roll
+ }repeat
+ }{
+ NComponents{
+ 1 sub AGMCORE_tmp mul 1 add NComponents 1 roll
+ }repeat
+ }ifelse
+ }ifelse
+ }{
+ ColorLookup AGMCORE_tmp ColorLookup length 1 sub mul round cvi get
+ aload pop
+ }ifelse
+ end
+}def
+/sep_colorspace_gray_proc
+{
+ /AGMCORE_tmp exch store
+ /sep_colorspace_dict AGMCORE_gget begin
+ GrayLookup AGMCORE_tmp GrayLookup length 1 sub mul round cvi get
+ end
+}def
+/sep_proc_name
+{
+ dup 0 get
+ dup/DeviceRGB eq exch/DeviceCMYK eq or level2 not and has_color not and{
+ pop[/DeviceGray]
+ /sep_colorspace_gray_proc
+ }{
+ /sep_colorspace_proc
+ }ifelse
+}def
+/setsepcolorspace
+{
+ current_spot_alias{
+ dup begin
+ Name map_alias{
+ exch pop
+ }if
+ end
+ }if
+ dup/sep_colorspace_dict exch AGMCORE_gput
+ begin
+ CSA map_csa
+ /AGMCORE_sep_special Name dup()eq exch(All)eq or store
+ AGMCORE_avoid_L2_sep_space{
+ [/Indexed MappedCSA sep_proc_name 255 exch
+ {255 div}/exec cvx 3 -1 roll[4 1 roll load/exec cvx]cvx
+ ]setcolorspace_opt
+ /TintProc{
+ 255 mul round cvi setcolor
+ }bdf
+ }{
+ MappedCSA 0 get/DeviceCMYK eq
+ currentdict/Components known and
+ AGMCORE_sep_special not and{
+ /TintProc[
+ Components aload pop Name findcmykcustomcolor
+ /exch cvx/setcustomcolor cvx
+ ]cvx bdf
+ }{
+ AGMCORE_host_sep Name(All)eq and{
+ /TintProc{
+ 1 exch sub setseparationgray
+ }bdf
+ }{
+ AGMCORE_in_rip_sep MappedCSA 0 get/DeviceCMYK eq and
+ AGMCORE_host_sep or
+ Name()eq and{
+ /TintProc[
+ MappedCSA sep_proc_name exch 0 get/DeviceCMYK eq{
+ cvx/setcmykcolor cvx
+ }{
+ cvx/setgray cvx
+ }ifelse
+ ]cvx bdf
+ }{
+ AGMCORE_producing_seps MappedCSA 0 get dup/DeviceCMYK eq exch/DeviceGray eq or and AGMCORE_sep_special not and{
+ /TintProc[
+ /dup cvx
+ MappedCSA sep_proc_name cvx exch
+ 0 get/DeviceGray eq{
+ 1/exch cvx/sub cvx 0 0 0 4 -1/roll cvx
+ }if
+ /Name cvx/findcmykcustomcolor cvx/exch cvx
+ AGMCORE_host_sep{
+ AGMCORE_is_cmyk_sep
+ /Name cvx
+ /AGMCORE_IsSeparationAProcessColor load/exec cvx
+ /not cvx/and cvx
+ }{
+ Name inRip_spot_has_ink not
+ }ifelse
+ [
+ /pop cvx 1
+ ]cvx/if cvx
+ /setcustomcolor cvx
+ ]cvx bdf
+ }{
+ /TintProc{setcolor}bdf
+ [/Separation Name MappedCSA sep_proc_name load]setcolorspace_opt
+ }ifelse
+ }ifelse
+ }ifelse
+ }ifelse
+ }ifelse
+ set_crd
+ setsepcolor
+ end
+}def
+/additive_blend
+{
+ 3 dict begin
+ /numarrays xdf
+ /numcolors xdf
+ 0 1 numcolors 1 sub
+ {
+ /c1 xdf
+ 1
+ 0 1 numarrays 1 sub
+ {
+ 1 exch add/index cvx
+ c1/get cvx/mul cvx
+ }for
+ numarrays 1 add 1/roll cvx
+ }for
+ numarrays[/pop cvx]cvx/repeat cvx
+ end
+}def
+/subtractive_blend
+{
+ 3 dict begin
+ /numarrays xdf
+ /numcolors xdf
+ 0 1 numcolors 1 sub
+ {
+ /c1 xdf
+ 1 1
+ 0 1 numarrays 1 sub
+ {
+ 1 3 3 -1 roll add/index cvx
+ c1/get cvx/sub cvx/mul cvx
+ }for
+ /sub cvx
+ numarrays 1 add 1/roll cvx
+ }for
+ numarrays[/pop cvx]cvx/repeat cvx
+ end
+}def
+/exec_tint_transform
+{
+ /TintProc[
+ /TintTransform cvx/setcolor cvx
+ ]cvx bdf
+ MappedCSA setcolorspace_opt
+}bdf
+/devn_makecustomcolor
+{
+ 2 dict begin
+ /names_index xdf
+ /Names xdf
+ 1 1 1 1 Names names_index get findcmykcustomcolor
+ /devicen_tints AGMCORE_gget names_index get setcustomcolor
+ Names length{pop}repeat
+ end
+}bdf
+/setdevicencolorspace
+{
+ dup/AliasedColorants known{false}{true}ifelse
+ current_spot_alias and{
+ 7 dict begin
+ /names_index 0 def
+ dup/names_len exch/Names get length def
+ /new_names names_len array def
+ /new_LookupTables names_len array def
+ /alias_cnt 0 def
+ dup/Names get
+ {
+ dup map_alias{
+ exch pop
+ dup/ColorLookup known{
+ dup begin
+ new_LookupTables names_index ColorLookup put
+ end
+ }{
+ dup/Components known{
+ dup begin
+ new_LookupTables names_index Components put
+ end
+ }{
+ dup begin
+ new_LookupTables names_index[null null null null]put
+ end
+ }ifelse
+ }ifelse
+ new_names names_index 3 -1 roll/Name get put
+ /alias_cnt alias_cnt 1 add def
+ }{
+ /name xdf
+ new_names names_index name put
+ dup/LookupTables known{
+ dup begin
+ new_LookupTables names_index LookupTables names_index get put
+ end
+ }{
+ dup begin
+ new_LookupTables names_index[null null null null]put
+ end
+ }ifelse
+ }ifelse
+ /names_index names_index 1 add def
+ }forall
+ alias_cnt 0 gt{
+ /AliasedColorants true def
+ /lut_entry_len new_LookupTables 0 get dup length 256 ge{0 get length}{length}ifelse def
+ 0 1 names_len 1 sub{
+ /names_index xdf
+ new_LookupTables names_index get dup length 256 ge{0 get length}{length}ifelse lut_entry_len ne{
+ /AliasedColorants false def
+ exit
+ }{
+ new_LookupTables names_index get 0 get null eq{
+ dup/Names get names_index get/name xdf
+ name(Cyan)eq name(Magenta)eq name(Yellow)eq name(Black)eq
+ or or or not{
+ /AliasedColorants false def
+ exit
+ }if
+ }if
+ }ifelse
+ }for
+ lut_entry_len 1 eq{
+ /AliasedColorants false def
+ }if
+ AliasedColorants{
+ dup begin
+ /Names new_names def
+ /LookupTables new_LookupTables def
+ /AliasedColorants true def
+ /NComponents lut_entry_len def
+ /TintMethod NComponents 4 eq{/Subtractive}{/Additive}ifelse def
+ /MappedCSA TintMethod/Additive eq{/DeviceRGB}{/DeviceCMYK}ifelse def
+ currentdict/TTTablesIdx known not{
+ /TTTablesIdx -1 def
+ }if
+ end
+ }if
+ }if
+ end
+ }if
+ dup/devicen_colorspace_dict exch AGMCORE_gput
+ begin
+ currentdict/AliasedColorants known{
+ AliasedColorants
+ }{
+ false
+ }ifelse
+ dup not{
+ CSA map_csa
+ }if
+ /TintTransform load type/nulltype eq or{
+ /TintTransform[
+ 0 1 Names length 1 sub
+ {
+ /TTTablesIdx TTTablesIdx 1 add def
+ dup LookupTables exch get dup 0 get null eq
+ {
+ 1 index
+ Names exch get
+ dup(Cyan)eq
+ {
+ pop exch
+ LookupTables length exch sub
+ /index cvx
+ 0 0 0
+ }
+ {
+ dup(Magenta)eq
+ {
+ pop exch
+ LookupTables length exch sub
+ /index cvx
+ 0/exch cvx 0 0
+ }{
+ (Yellow)eq
+ {
+ exch
+ LookupTables length exch sub
+ /index cvx
+ 0 0 3 -1/roll cvx 0
+ }{
+ exch
+ LookupTables length exch sub
+ /index cvx
+ 0 0 0 4 -1/roll cvx
+ }ifelse
+ }ifelse
+ }ifelse
+ 5 -1/roll cvx/astore cvx
+ }{
+ dup length 1 sub
+ LookupTables length 4 -1 roll sub 1 add
+ /index cvx/mul cvx/round cvx/cvi cvx/get cvx
+ }ifelse
+ Names length TTTablesIdx add 1 add 1/roll cvx
+ }for
+ Names length[/pop cvx]cvx/repeat cvx
+ NComponents Names length
+ TintMethod/Subtractive eq
+ {
+ subtractive_blend
+ }{
+ additive_blend
+ }ifelse
+ ]cvx bdf
+ }if
+ AGMCORE_host_sep{
+ Names convert_to_process{
+ exec_tint_transform
+ }
+ {
+ currentdict/AliasedColorants known{
+ AliasedColorants not
+ }{
+ false
+ }ifelse
+ 5 dict begin
+ /AvoidAliasedColorants xdf
+ /painted? false def
+ /names_index 0 def
+ /names_len Names length def
+ AvoidAliasedColorants{
+ /currentspotalias current_spot_alias def
+ false set_spot_alias
+ }if
+ Names{
+ AGMCORE_is_cmyk_sep{
+ dup(Cyan)eq AGMCORE_cyan_plate and exch
+ dup(Magenta)eq AGMCORE_magenta_plate and exch
+ dup(Yellow)eq AGMCORE_yellow_plate and exch
+ (Black)eq AGMCORE_black_plate and or or or{
+ /devicen_colorspace_dict AGMCORE_gget/TintProc[
+ Names names_index/devn_makecustomcolor cvx
+ ]cvx ddf
+ /painted? true def
+ }if
+ painted?{exit}if
+ }{
+ 0 0 0 0 5 -1 roll findcmykcustomcolor 1 setcustomcolor currentgray 0 eq{
+ /devicen_colorspace_dict AGMCORE_gget/TintProc[
+ Names names_index/devn_makecustomcolor cvx
+ ]cvx ddf
+ /painted? true def
+ exit
+ }if
+ }ifelse
+ /names_index names_index 1 add def
+ }forall
+ AvoidAliasedColorants{
+ currentspotalias set_spot_alias
+ }if
+ painted?{
+ /devicen_colorspace_dict AGMCORE_gget/names_index names_index put
+ }{
+ /devicen_colorspace_dict AGMCORE_gget/TintProc[
+ names_len[/pop cvx]cvx/repeat cvx 1/setseparationgray cvx
+ 0 0 0 0/setcmykcolor cvx
+ ]cvx ddf
+ }ifelse
+ end
+ }ifelse
+ }
+ {
+ AGMCORE_in_rip_sep{
+ Names convert_to_process not
+ }{
+ level3
+ }ifelse
+ {
+ [/DeviceN Names MappedCSA/TintTransform load]setcolorspace_opt
+ /TintProc level3 not AGMCORE_in_rip_sep and{
+ [
+ Names/length cvx[/pop cvx]cvx/repeat cvx
+ ]cvx bdf
+ }{
+ {setcolor}bdf
+ }ifelse
+ }{
+ exec_tint_transform
+ }ifelse
+ }ifelse
+ set_crd
+ /AliasedColorants false def
+ end
+}def
+/setindexedcolorspace
+{
+ dup/indexed_colorspace_dict exch AGMCORE_gput
+ begin
+ currentdict/CSDBase known{
+ CSDBase/CSD get_res begin
+ currentdict/Names known{
+ currentdict devncs
+ }{
+ 1 currentdict sepcs
+ }ifelse
+ AGMCORE_host_sep{
+ 4 dict begin
+ /compCnt/Names where{pop Names length}{1}ifelse def
+ /NewLookup HiVal 1 add string def
+ 0 1 HiVal{
+ /tableIndex xdf
+ Lookup dup type/stringtype eq{
+ compCnt tableIndex map_index
+ }{
+ exec
+ }ifelse
+ /Names where{
+ pop setdevicencolor
+ }{
+ setsepcolor
+ }ifelse
+ currentgray
+ tableIndex exch
+ 255 mul cvi
+ NewLookup 3 1 roll put
+ }for
+ [/Indexed currentcolorspace HiVal NewLookup]setcolorspace_opt
+ end
+ }{
+ level3
+ {
+ currentdict/Names known{
+ [/Indexed[/DeviceN Names MappedCSA/TintTransform load]HiVal Lookup]setcolorspace_opt
+ }{
+ [/Indexed[/Separation Name MappedCSA sep_proc_name load]HiVal Lookup]setcolorspace_opt
+ }ifelse
+ }{
+ [/Indexed MappedCSA HiVal
+ [
+ currentdict/Names known{
+ Lookup dup type/stringtype eq
+ {/exch cvx CSDBase/CSD get_res/Names get length dup/mul cvx exch/getinterval cvx{255 div}/forall cvx}
+ {/exec cvx}ifelse
+ /TintTransform load/exec cvx
+ }{
+ Lookup dup type/stringtype eq
+ {/exch cvx/get cvx 255/div cvx}
+ {/exec cvx}ifelse
+ CSDBase/CSD get_res/MappedCSA get sep_proc_name exch pop/load cvx/exec cvx
+ }ifelse
+ ]cvx
+ ]setcolorspace_opt
+ }ifelse
+ }ifelse
+ end
+ set_crd
+ }
+ {
+ CSA map_csa
+ AGMCORE_host_sep level2 not and{
+ 0 0 0 0 setcmykcolor
+ }{
+ [/Indexed MappedCSA
+ level2 not has_color not and{
+ dup 0 get dup/DeviceRGB eq exch/DeviceCMYK eq or{
+ pop[/DeviceGray]
+ }if
+ HiVal GrayLookup
+ }{
+ HiVal
+ currentdict/RangeArray known{
+ {
+ /indexed_colorspace_dict AGMCORE_gget begin
+ Lookup exch
+ dup HiVal gt{
+ pop HiVal
+ }if
+ NComponents mul NComponents getinterval{}forall
+ NComponents 1 sub -1 0{
+ RangeArray exch 2 mul 2 getinterval aload pop map255_to_range
+ NComponents 1 roll
+ }for
+ end
+ }bind
+ }{
+ Lookup
+ }ifelse
+ }ifelse
+ ]setcolorspace_opt
+ set_crd
+ }ifelse
+ }ifelse
+ end
+}def
+/setindexedcolor
+{
+ AGMCORE_host_sep{
+ /indexed_colorspace_dict AGMCORE_gget
+ begin
+ currentdict/CSDBase known{
+ CSDBase/CSD get_res begin
+ currentdict/Names known{
+ map_indexed_devn
+ devn
+ }
+ {
+ Lookup 1 3 -1 roll map_index
+ sep
+ }ifelse
+ end
+ }{
+ Lookup MappedCSA/DeviceCMYK eq{4}{1}ifelse 3 -1 roll
+ map_index
+ MappedCSA/DeviceCMYK eq{setcmykcolor}{setgray}ifelse
+ }ifelse
+ end
+ }{
+ level3 not AGMCORE_in_rip_sep and/indexed_colorspace_dict AGMCORE_gget/CSDBase known and{
+ /indexed_colorspace_dict AGMCORE_gget/CSDBase get/CSD get_res begin
+ map_indexed_devn
+ devn
+ end
+ }
+ {
+ setcolor
+ }ifelse
+ }ifelse
+}def
+/ignoreimagedata
+{
+ currentoverprint not{
+ gsave
+ dup clonedict begin
+ 1 setgray
+ /Decode[0 1]def
+ /DataSourcedef
+ /MultipleDataSources false def
+ /BitsPerComponent 8 def
+ currentdict end
+ systemdict/image gx
+ grestore
+ }if
+ consumeimagedata
+}def
+/add_res
+{
+ dup/CSD eq{
+ pop
+ //Adobe_AGM_Core begin
+ /AGMCORE_CSD_cache load 3 1 roll put
+ end
+ }{
+ defineresource pop
+ }ifelse
+}def
+/del_res
+{
+ {
+ aload pop exch
+ dup/CSD eq{
+ pop
+ {//Adobe_AGM_Core/AGMCORE_CSD_cache get exch undef}forall
+ }{
+ exch
+ {1 index undefineresource}forall
+ pop
+ }ifelse
+ }forall
+}def
+/get_res
+{
+ dup/CSD eq{
+ pop
+ dup type dup/nametype eq exch/stringtype eq or{
+ AGMCORE_CSD_cache exch get
+ }if
+ }{
+ findresource
+ }ifelse
+}def
+/get_csa_by_name
+{
+ dup type dup/nametype eq exch/stringtype eq or{
+ /CSA get_res
+ }if
+}def
+/paintproc_buf_init
+{
+ /count get 0 0 put
+}def
+/paintproc_buf_next
+{
+ dup/count get dup 0 get
+ dup 3 1 roll
+ 1 add 0 xpt
+ get
+}def
+/cachepaintproc_compress
+{
+ 5 dict begin
+ currentfile exch 0 exch/SubFileDecode filter/ReadFilter exch def
+ /ppdict 20 dict def
+ /string_size 16000 def
+ /readbuffer string_size string def
+ currentglobal true setglobal
+ ppdict 1 array dup 0 1 put/count xpt
+ setglobal
+ /LZWFilter
+ {
+ exch
+ dup length 0 eq{
+ pop
+ }{
+ ppdict dup length 1 sub 3 -1 roll put
+ }ifelse
+ {string_size}{0}ifelse string
+ }/LZWEncode filter def
+ {
+ ReadFilter readbuffer readstring
+ exch LZWFilter exch writestring
+ not{exit}if
+ }loop
+ LZWFilter closefile
+ ppdict
+ end
+}def
+/cachepaintproc
+{
+ 2 dict begin
+ currentfile exch 0 exch/SubFileDecode filter/ReadFilter exch def
+ /ppdict 20 dict def
+ currentglobal true setglobal
+ ppdict 1 array dup 0 1 put/count xpt
+ setglobal
+ {
+ ReadFilter 16000 string readstring exch
+ ppdict dup length 1 sub 3 -1 roll put
+ not{exit}if
+ }loop
+ ppdict dup dup length 1 sub()put
+ end
+}def
+/make_pattern
+{
+ exch clonedict exch
+ dup matrix currentmatrix matrix concatmatrix 0 0 3 2 roll itransform
+ exch 3 index/XStep get 1 index exch 2 copy div cvi mul sub sub
+ exch 3 index/YStep get 1 index exch 2 copy div cvi mul sub sub
+ matrix translate exch matrix concatmatrix
+ 1 index begin
+ BBox 0 get XStep div cvi XStep mul/xshift exch neg def
+ BBox 1 get YStep div cvi YStep mul/yshift exch neg def
+ BBox 0 get xshift add
+ BBox 1 get yshift add
+ BBox 2 get xshift add
+ BBox 3 get yshift add
+ 4 array astore
+ /BBox exch def
+ [xshift yshift/translate load null/exec load]dup
+ 3/PaintProc load put cvx/PaintProc exch def
+ end
+ gsave 0 setgray
+ makepattern
+ grestore
+}def
+/set_pattern
+{
+ dup/PatternType get 1 eq{
+ dup/PaintType get 1 eq{
+ currentoverprint sop[/DeviceGray]setcolorspace 0 setgray
+ }if
+ }if
+ setpattern
+}def
+/setcolorspace_opt
+{
+ dup currentcolorspace eq{pop}{setcolorspace}ifelse
+}def
+/updatecolorrendering
+{
+ currentcolorrendering/RenderingIntent known{
+ currentcolorrendering/RenderingIntent get
+ }
+ {
+ Intent/AbsoluteColorimetric eq
+ {
+ /absolute_colorimetric_crd AGMCORE_gget dup null eq
+ }
+ {
+ Intent/RelativeColorimetric eq
+ {
+ /relative_colorimetric_crd AGMCORE_gget dup null eq
+ }
+ {
+ Intent/Saturation eq
+ {
+ /saturation_crd AGMCORE_gget dup null eq
+ }
+ {
+ /perceptual_crd AGMCORE_gget dup null eq
+ }ifelse
+ }ifelse
+ }ifelse
+ {
+ pop null
+ }
+ {
+ /RenderingIntent known{null}{Intent}ifelse
+ }ifelse
+ }ifelse
+ Intent ne{
+ Intent/ColorRendering{findresource}stopped
+ {
+ pop pop systemdict/findcolorrendering known
+ {
+ Intent findcolorrendering
+ {
+ /ColorRendering findresource true exch
+ }
+ {
+ /ColorRendering findresource
+ product(Xerox Phaser 5400)ne
+ exch
+ }ifelse
+ dup Intent/AbsoluteColorimetric eq
+ {
+ /absolute_colorimetric_crd exch AGMCORE_gput
+ }
+ {
+ Intent/RelativeColorimetric eq
+ {
+ /relative_colorimetric_crd exch AGMCORE_gput
+ }
+ {
+ Intent/Saturation eq
+ {
+ /saturation_crd exch AGMCORE_gput
+ }
+ {
+ Intent/Perceptual eq
+ {
+ /perceptual_crd exch AGMCORE_gput
+ }
+ {
+ pop
+ }ifelse
+ }ifelse
+ }ifelse
+ }ifelse
+ 1 index{exch}{pop}ifelse
+ }
+ {false}ifelse
+ }
+ {true}ifelse
+ {
+ dup begin
+ currentdict/TransformPQR known{
+ currentdict/TransformPQR get aload pop
+ 3{{}eq 3 1 roll}repeat or or
+ }
+ {true}ifelse
+ currentdict/MatrixPQR known{
+ currentdict/MatrixPQR get aload pop
+ 1.0 eq 9 1 roll 0.0 eq 9 1 roll 0.0 eq 9 1 roll
+ 0.0 eq 9 1 roll 1.0 eq 9 1 roll 0.0 eq 9 1 roll
+ 0.0 eq 9 1 roll 0.0 eq 9 1 roll 1.0 eq
+ and and and and and and and and
+ }
+ {true}ifelse
+ end
+ or
+ {
+ clonedict begin
+ /TransformPQR[
+ {4 -1 roll 3 get dup 3 1 roll sub 5 -1 roll 3 get 3 -1 roll sub div
+ 3 -1 roll 3 get 3 -1 roll 3 get dup 4 1 roll sub mul add}bind
+ {4 -1 roll 4 get dup 3 1 roll sub 5 -1 roll 4 get 3 -1 roll sub div
+ 3 -1 roll 4 get 3 -1 roll 4 get dup 4 1 roll sub mul add}bind
+ {4 -1 roll 5 get dup 3 1 roll sub 5 -1 roll 5 get 3 -1 roll sub div
+ 3 -1 roll 5 get 3 -1 roll 5 get dup 4 1 roll sub mul add}bind
+ ]def
+ /MatrixPQR[0.8951 -0.7502 0.0389 0.2664 1.7135 -0.0685 -0.1614 0.0367 1.0296]def
+ /RangePQR[-0.3227950745 2.3229645538 -1.5003771057 3.5003465881 -0.1369979095 2.136967392]def
+ currentdict end
+ }if
+ setcolorrendering_opt
+ }if
+ }if
+}def
+/set_crd
+{
+ AGMCORE_host_sep not level2 and{
+ currentdict/ColorRendering known{
+ ColorRendering/ColorRendering{findresource}stopped not{setcolorrendering_opt}if
+ }{
+ currentdict/Intent known{
+ updatecolorrendering
+ }if
+ }ifelse
+ currentcolorspace dup type/arraytype eq
+ {0 get}if
+ /DeviceRGB eq
+ {
+ currentdict/UCR known
+ {/UCR}{/AGMCORE_currentucr}ifelse
+ load setundercolorremoval
+ currentdict/BG known
+ {/BG}{/AGMCORE_currentbg}ifelse
+ load setblackgeneration
+ }if
+ }if
+}def
+/set_ucrbg
+{
+ dup null eq{pop/AGMCORE_currentbg load}{/Procedure get_res}ifelse setblackgeneration
+ dup null eq{pop/AGMCORE_currentucr load}{/Procedure get_res}ifelse setundercolorremoval
+}def
+/setcolorrendering_opt
+{
+ dup currentcolorrendering eq{
+ pop
+ }{
+ clonedict
+ begin
+ /Intent Intent def
+ currentdict
+ end
+ setcolorrendering
+ }ifelse
+}def
+/cpaint_gcomp
+{
+ convert_to_process//Adobe_AGM_Core/AGMCORE_ConvertToProcess xddf
+ //Adobe_AGM_Core/AGMCORE_ConvertToProcess get not
+ {
+ (%end_cpaint_gcomp)flushinput
+ }if
+}def
+/cpaint_gsep
+{
+ //Adobe_AGM_Core/AGMCORE_ConvertToProcess get
+ {
+ (%end_cpaint_gsep)flushinput
+ }if
+}def
+/cpaint_gend
+{np}def
+/T1_path
+{
+ currentfile token pop currentfile token pop mo
+ {
+ currentfile token pop dup type/stringtype eq
+ {pop exit}if
+ 0 exch rlineto
+ currentfile token pop dup type/stringtype eq
+ {pop exit}if
+ 0 rlineto
+ }loop
+}def
+/T1_gsave
+ level3
+ {/clipsave}
+ {/gsave}ifelse
+ load def
+/T1_grestore
+ level3
+ {/cliprestore}
+ {/grestore}ifelse
+ load def
+/set_spot_alias_ary
+{
+ dup inherit_aliases
+ //Adobe_AGM_Core/AGMCORE_SpotAliasAry xddf
+}def
+/set_spot_normalization_ary
+{
+ dup inherit_aliases
+ dup length
+ /AGMCORE_SpotAliasAry where{pop AGMCORE_SpotAliasAry length add}if
+ array
+ //Adobe_AGM_Core/AGMCORE_SpotAliasAry2 xddf
+ /AGMCORE_SpotAliasAry where{
+ pop
+ AGMCORE_SpotAliasAry2 0 AGMCORE_SpotAliasAry putinterval
+ AGMCORE_SpotAliasAry length
+ }{0}ifelse
+ AGMCORE_SpotAliasAry2 3 1 roll exch putinterval
+ true set_spot_alias
+}def
+/inherit_aliases
+{
+ {dup/Name get map_alias{/CSD put}{pop}ifelse}forall
+}def
+/set_spot_alias
+{
+ /AGMCORE_SpotAliasAry2 where{
+ /AGMCORE_current_spot_alias 3 -1 roll put
+ }{
+ pop
+ }ifelse
+}def
+/current_spot_alias
+{
+ /AGMCORE_SpotAliasAry2 where{
+ /AGMCORE_current_spot_alias get
+ }{
+ false
+ }ifelse
+}def
+/map_alias
+{
+ /AGMCORE_SpotAliasAry2 where{
+ begin
+ /AGMCORE_name xdf
+ false
+ AGMCORE_SpotAliasAry2{
+ dup/Name get AGMCORE_name eq{
+ /CSD get/CSD get_res
+ exch pop true
+ exit
+ }{
+ pop
+ }ifelse
+ }forall
+ end
+ }{
+ pop false
+ }ifelse
+}bdf
+/spot_alias
+{
+ true set_spot_alias
+ /AGMCORE_&setcustomcolor AGMCORE_key_known not{
+ //Adobe_AGM_Core/AGMCORE_&setcustomcolor/setcustomcolor load put
+ }if
+ /customcolor_tint 1 AGMCORE_gput
+ //Adobe_AGM_Core begin
+ /setcustomcolor
+ {
+ //Adobe_AGM_Core begin
+ dup/customcolor_tint exch AGMCORE_gput
+ 1 index aload pop pop 1 eq exch 1 eq and exch 1 eq and exch 1 eq and not
+ current_spot_alias and{1 index 4 get map_alias}{false}ifelse
+ {
+ false set_spot_alias
+ /sep_colorspace_dict AGMCORE_gget null ne
+ 3 1 roll 2 index{
+ exch pop/sep_tint AGMCORE_gget exch
+ }if
+ mark 3 1 roll
+ setsepcolorspace
+ counttomark 0 ne{
+ setsepcolor
+ }if
+ pop
+ not{/sep_tint 1.0 AGMCORE_gput}if
+ pop
+ true set_spot_alias
+ }{
+ AGMCORE_&setcustomcolor
+ }ifelse
+ end
+ }bdf
+ end
+}def
+/begin_feature
+{
+ Adobe_AGM_Core/AGMCORE_feature_dictCount countdictstack put
+ count Adobe_AGM_Core/AGMCORE_feature_opCount 3 -1 roll put
+ {Adobe_AGM_Core/AGMCORE_feature_ctm matrix currentmatrix put}if
+}def
+/end_feature
+{
+ 2 dict begin
+ /spd/setpagedevice load def
+ /setpagedevice{get_gstate spd set_gstate}def
+ stopped{$error/newerror false put}if
+ end
+ count Adobe_AGM_Core/AGMCORE_feature_opCount get sub dup 0 gt{{pop}repeat}{pop}ifelse
+ countdictstack Adobe_AGM_Core/AGMCORE_feature_dictCount get sub dup 0 gt{{end}repeat}{pop}ifelse
+ {Adobe_AGM_Core/AGMCORE_feature_ctm get setmatrix}if
+}def
+/set_negative
+{
+ //Adobe_AGM_Core begin
+ /AGMCORE_inverting exch def
+ level2{
+ currentpagedevice/NegativePrint known AGMCORE_distilling not and{
+ currentpagedevice/NegativePrint get//Adobe_AGM_Core/AGMCORE_inverting get ne{
+ true begin_feature true{
+ <>setpagedevice
+ }end_feature
+ }if
+ /AGMCORE_inverting false def
+ }if
+ }if
+ AGMCORE_inverting{
+ [{1 exch sub}/exec load dup currenttransfer exch]cvx bind settransfer
+ AGMCORE_distilling{
+ erasepage
+ }{
+ gsave np clippath 1/setseparationgray where{pop setseparationgray}{setgray}ifelse
+ /AGMIRS_&fill where{pop AGMIRS_&fill}{fill}ifelse grestore
+ }ifelse
+ }if
+ end
+}def
+/lw_save_restore_override{
+ /md where{
+ pop
+ md begin
+ initializepage
+ /initializepage{}def
+ /pmSVsetup{}def
+ /endp{}def
+ /pse{}def
+ /psb{}def
+ /orig_showpage where
+ {pop}
+ {/orig_showpage/showpage load def}
+ ifelse
+ /showpage{orig_showpage gR}def
+ end
+ }if
+}def
+/pscript_showpage_override{
+ /NTPSOct95 where
+ {
+ begin
+ showpage
+ save
+ /showpage/restore load def
+ /restore{exch pop}def
+ end
+ }if
+}def
+/driver_media_override
+{
+ /md where{
+ pop
+ md/initializepage known{
+ md/initializepage{}put
+ }if
+ md/rC known{
+ md/rC{4{pop}repeat}put
+ }if
+ }if
+ /mysetup where{
+ /mysetup[1 0 0 1 0 0]put
+ }if
+ Adobe_AGM_Core/AGMCORE_Default_CTM matrix currentmatrix put
+ level2
+ {Adobe_AGM_Core/AGMCORE_Default_PageSize currentpagedevice/PageSize get put}if
+}def
+/driver_check_media_override
+{
+ /PrepsDict where
+ {pop}
+ {
+ Adobe_AGM_Core/AGMCORE_Default_CTM get matrix currentmatrix ne
+ Adobe_AGM_Core/AGMCORE_Default_PageSize get type/arraytype eq
+ {
+ Adobe_AGM_Core/AGMCORE_Default_PageSize get 0 get currentpagedevice/PageSize get 0 get eq and
+ Adobe_AGM_Core/AGMCORE_Default_PageSize get 1 get currentpagedevice/PageSize get 1 get eq and
+ }if
+ {
+ Adobe_AGM_Core/AGMCORE_Default_CTM get setmatrix
+ }if
+ }ifelse
+}def
+AGMCORE_err_strings begin
+ /AGMCORE_bad_environ(Environment not satisfactory for this job. Ensure that the PPD is correct or that the PostScript level requested is supported by this printer. )def
+ /AGMCORE_color_space_onhost_seps(This job contains colors that will not separate with on-host methods. )def
+ /AGMCORE_invalid_color_space(This job contains an invalid color space. )def
+end
+/set_def_ht
+{AGMCORE_def_ht sethalftone}def
+/set_def_flat
+{AGMCORE_Default_flatness setflat}def
+end
+systemdict/setpacking known
+{setpacking}if
+%%EndResource
+%%BeginResource: procset Adobe_CoolType_Core 2.31 0
%%Copyright: Copyright 1997-2006 Adobe Systems Incorporated. All Rights Reserved.
%%Version: 2.31 0
10 dict begin
/Adobe_CoolType_Passthru currentdict def
/Adobe_CoolType_Core_Defined userdict/Adobe_CoolType_Core known def
Adobe_CoolType_Core_Defined
{/Adobe_CoolType_Core userdict/Adobe_CoolType_Core get def}
if
userdict/Adobe_CoolType_Core 70 dict dup begin put
/Adobe_CoolType_Version 2.31 def
/Level2?
systemdict/languagelevel known dup
{pop systemdict/languagelevel get 2 ge}
if def
Level2? not
{
/currentglobal false def
/setglobal/pop load def
/gcheck{pop false}bind def
/currentpacking false def
/setpacking/pop load def
/SharedFontDirectory 0 dict def
}
if
currentpacking
true setpacking
currentglobal false setglobal
userdict/Adobe_CoolType_Data 2 copy known not
{2 copy 10 dict put}
if
get
begin
/@opStackCountByLevel 32 dict def
/@opStackLevel 0 def
/@dictStackCountByLevel 32 dict def
/@dictStackLevel 0 def
end
setglobal
currentglobal true setglobal
userdict/Adobe_CoolType_GVMFonts known not
{userdict/Adobe_CoolType_GVMFonts 10 dict put}
if
setglobal
currentglobal false setglobal
userdict/Adobe_CoolType_LVMFonts known not
{userdict/Adobe_CoolType_LVMFonts 10 dict put}
if
setglobal
/ct_VMDictPut
{
dup gcheck{Adobe_CoolType_GVMFonts}{Adobe_CoolType_LVMFonts}ifelse
3 1 roll put
}bind def
/ct_VMDictUndef
{
dup Adobe_CoolType_GVMFonts exch known
{Adobe_CoolType_GVMFonts exch undef}
{
dup Adobe_CoolType_LVMFonts exch known
{Adobe_CoolType_LVMFonts exch undef}
{pop}
ifelse
}ifelse
}bind def
/ct_str1 1 string def
/ct_xshow
{
/_ct_na exch def
/_ct_i 0 def
currentpoint
/_ct_y exch def
/_ct_x exch def
{
pop pop
ct_str1 exch 0 exch put
ct_str1 show
{_ct_na _ct_i get}stopped
{pop pop}
{
_ct_x _ct_y moveto
0
rmoveto
}
ifelse
/_ct_i _ct_i 1 add def
currentpoint
/_ct_y exch def
/_ct_x exch def
}
exch
@cshow
}bind def
/ct_yshow
{
/_ct_na exch def
/_ct_i 0 def
currentpoint
/_ct_y exch def
/_ct_x exch def
{
pop pop
ct_str1 exch 0 exch put
ct_str1 show
{_ct_na _ct_i get}stopped
{pop pop}
{
_ct_x _ct_y moveto
0 exch
rmoveto
}
ifelse
/_ct_i _ct_i 1 add def
currentpoint
/_ct_y exch def
/_ct_x exch def
}
exch
@cshow
}bind def
/ct_xyshow
{
/_ct_na exch def
/_ct_i 0 def
currentpoint
/_ct_y exch def
/_ct_x exch def
{
pop pop
ct_str1 exch 0 exch put
ct_str1 show
{_ct_na _ct_i get}stopped
{pop pop}
{
{_ct_na _ct_i 1 add get}stopped
{pop pop pop}
{
_ct_x _ct_y moveto
rmoveto
}
ifelse
}
ifelse
/_ct_i _ct_i 2 add def
currentpoint
/_ct_y exch def
/_ct_x exch def
}
exch
@cshow
}bind def
/xsh{{@xshow}stopped{Adobe_CoolType_Data begin ct_xshow end}if}bind def
/ysh{{@yshow}stopped{Adobe_CoolType_Data begin ct_yshow end}if}bind def
/xysh{{@xyshow}stopped{Adobe_CoolType_Data begin ct_xyshow end}if}bind def
currentglobal true setglobal
/ct_T3Defs
{
/BuildChar
{
1 index/Encoding get exch get
1 index/BuildGlyph get exec
}bind def
/BuildGlyph
{
exch begin
GlyphProcs exch get exec
end
}bind def
}bind def
setglobal
/@_SaveStackLevels
{
Adobe_CoolType_Data
begin
/@vmState currentglobal def false setglobal
@opStackCountByLevel
@opStackLevel
2 copy known not
{
2 copy
3 dict dup/args
7 index
5 add array put
put get
}
{
get dup/args get dup length 3 index lt
{
dup length 5 add array exch
1 index exch 0 exch putinterval
1 index exch/args exch put
}
{pop}
ifelse
}
ifelse
begin
count 1 sub
1 index lt
{pop count}
if
dup/argCount exch def
dup 0 gt
{
args exch 0 exch getinterval
astore pop
}
{pop}
ifelse
count
/restCount exch def
end
/@opStackLevel @opStackLevel 1 add def
countdictstack 1 sub
@dictStackCountByLevel exch @dictStackLevel exch put
/@dictStackLevel @dictStackLevel 1 add def
@vmState setglobal
end
}bind def
/@_RestoreStackLevels
{
Adobe_CoolType_Data
begin
/@opStackLevel @opStackLevel 1 sub def
@opStackCountByLevel @opStackLevel get
begin
count restCount sub dup 0 gt
{{pop}repeat}
{pop}
ifelse
args 0 argCount getinterval{}forall
end
/@dictStackLevel @dictStackLevel 1 sub def
@dictStackCountByLevel @dictStackLevel get
end
countdictstack exch sub dup 0 gt
{{end}repeat}
{pop}
ifelse
}bind def
/@_PopStackLevels
{
Adobe_CoolType_Data
begin
/@opStackLevel @opStackLevel 1 sub def
/@dictStackLevel @dictStackLevel 1 sub def
end
}bind def
/@Raise
{
exch cvx exch errordict exch get exec
stop
}bind def
/@ReRaise
{
cvx $error/errorname get errordict exch get exec
stop
}bind def
/@Stopped
{
0 @#Stopped
}bind def
/@#Stopped
{
@_SaveStackLevels
stopped
{@_RestoreStackLevels true}
{@_PopStackLevels false}
ifelse
}bind def
/@Arg
{
Adobe_CoolType_Data
begin
@opStackCountByLevel @opStackLevel 1 sub get
begin
args exch
argCount 1 sub exch sub get
end
end
}bind def
currentglobal true setglobal
/CTHasResourceForAllBug
Level2?
{
1 dict dup
/@shouldNotDisappearDictValue true def
Adobe_CoolType_Data exch/@shouldNotDisappearDict exch put
begin
count @_SaveStackLevels
{(*){pop stop}128 string/Category resourceforall}
stopped pop
@_RestoreStackLevels
currentdict Adobe_CoolType_Data/@shouldNotDisappearDict get dup 3 1 roll ne dup 3 1 roll
{
/@shouldNotDisappearDictValue known
{
{
end
currentdict 1 index eq
{pop exit}
if
}
loop
}
if
}
{
pop
end
}
ifelse
}
{false}
ifelse
def
true setglobal
/CTHasResourceStatusBug
Level2?
{
mark
{/steveamerige/Category resourcestatus}
stopped
{cleartomark true}
{cleartomark currentglobal not}
ifelse
}
{false}
ifelse
def
setglobal
/CTResourceStatus
{
mark 3 1 roll
/Category findresource
begin
({ResourceStatus}stopped)0()/SubFileDecode filter cvx exec
{cleartomark false}
{{3 2 roll pop true}{cleartomark false}ifelse}
ifelse
end
}bind def
/CTWorkAroundBugs
{
Level2?
{
/cid_PreLoad/ProcSet resourcestatus
{
pop pop
currentglobal
mark
{
(*)
{
dup/CMap CTHasResourceStatusBug
{CTResourceStatus}
{resourcestatus}
ifelse
{
pop dup 0 eq exch 1 eq or
{
dup/CMap findresource gcheck setglobal
/CMap undefineresource
}
{
pop CTHasResourceForAllBug
{exit}
{stop}
ifelse
}
ifelse
}
{pop}
ifelse
}
128 string/CMap resourceforall
}
stopped
{cleartomark}
stopped pop
setglobal
}
if
}
if
}bind def
/ds
{
Adobe_CoolType_Core
begin
CTWorkAroundBugs
/mo/moveto load def
/nf/newencodedfont load def
/msf{makefont setfont}bind def
/uf{dup undefinefont ct_VMDictUndef}bind def
/ur/undefineresource load def
/chp/charpath load def
/awsh/awidthshow load def
/wsh/widthshow load def
/ash/ashow load def
/@xshow/xshow load def
/@yshow/yshow load def
/@xyshow/xyshow load def
/@cshow/cshow load def
/sh/show load def
/rp/repeat load def
/.n/.notdef def
end
currentglobal false setglobal
userdict/Adobe_CoolType_Data 2 copy known not
{2 copy 10 dict put}
if
get
begin
/AddWidths? false def
/CC 0 def
/charcode 2 string def
/@opStackCountByLevel 32 dict def
/@opStackLevel 0 def
/@dictStackCountByLevel 32 dict def
/@dictStackLevel 0 def
/InVMFontsByCMap 10 dict def
/InVMDeepCopiedFonts 10 dict def
end
setglobal
}bind def
/dt
{
currentdict Adobe_CoolType_Core eq
{end}
if
}bind def
/ps
{
Adobe_CoolType_Core begin
Adobe_CoolType_GVMFonts begin
Adobe_CoolType_LVMFonts begin
SharedFontDirectory begin
}bind def
/pt
{
end
end
end
end
}bind def
/unload
{
systemdict/languagelevel known
{
systemdict/languagelevel get 2 ge
{
userdict/Adobe_CoolType_Core 2 copy known
{undef}
{pop pop}
ifelse
}
if
}
if
}bind def
/ndf
{
1 index where
{pop pop pop}
{dup xcheck{bind}if def}
ifelse
}def
/findfont systemdict
begin
userdict
begin
/globaldict where{/globaldict get begin}if
dup where pop exch get
/globaldict where{pop end}if
end
end
Adobe_CoolType_Core_Defined
{/systemfindfont exch def}
{
/findfont 1 index def
/systemfindfont exch def
}
ifelse
/undefinefont
{pop}ndf
/copyfont
{
currentglobal 3 1 roll
1 index gcheck setglobal
dup null eq{0}{dup length}ifelse
2 index length add 1 add dict
begin
exch
{
1 index/FID eq
{pop pop}
{def}
ifelse
}
forall
dup null eq
{pop}
{{def}forall}
ifelse
currentdict
end
exch setglobal
}bind def
/copyarray
{
currentglobal exch
dup gcheck setglobal
dup length array copy
exch setglobal
}bind def
/newencodedfont
{
currentglobal
{
SharedFontDirectory 3 index known
{SharedFontDirectory 3 index get/FontReferenced known}
{false}
ifelse
}
{
FontDirectory 3 index known
{FontDirectory 3 index get/FontReferenced known}
{
SharedFontDirectory 3 index known
{SharedFontDirectory 3 index get/FontReferenced known}
{false}
ifelse
}
ifelse
}
ifelse
dup
{
3 index findfont/FontReferenced get
2 index dup type/nametype eq
{findfont}
if ne
{pop false}
if
}
if
dup
{
1 index dup type/nametype eq
{findfont}
if
dup/CharStrings known
{
/CharStrings get length
4 index findfont/CharStrings get length
ne
{
pop false
}
if
}
{pop}
ifelse
}
if
{
pop
1 index findfont
/Encoding get exch
0 1 255
{2 copy get 3 index 3 1 roll put}
for
pop pop pop
}
{
currentglobal
4 1 roll
dup type/nametype eq
{findfont}
if
dup gcheck setglobal
dup dup maxlength 2 add dict
begin
exch
{
1 index/FID ne
2 index/Encoding ne and
{def}
{pop pop}
ifelse
}
forall
/FontReferenced exch def
/Encoding exch dup length array copy def
/FontName 1 index dup type/stringtype eq{cvn}if def dup
currentdict
end
definefont ct_VMDictPut
setglobal
}
ifelse
}bind def
/SetSubstituteStrategy
{
$SubstituteFont
begin
dup type/dicttype ne
{0 dict}
if
currentdict/$Strategies known
{
exch $Strategies exch
2 copy known
{
get
2 copy maxlength exch maxlength add dict
begin
{def}forall
{def}forall
currentdict
dup/$Init known
{dup/$Init get exec}
if
end
/$Strategy exch def
}
{pop pop pop}
ifelse
}
{pop pop}
ifelse
end
}bind def
/scff
{
$SubstituteFont
begin
dup type/stringtype eq
{dup length exch}
{null}
ifelse
/$sname exch def
/$slen exch def
/$inVMIndex
$sname null eq
{
1 index $str cvs
dup length $slen sub $slen getinterval cvn
}
{$sname}
ifelse def
end
{findfont}
@Stopped
{
dup length 8 add string exch
1 index 0(BadFont:)putinterval
1 index exch 8 exch dup length string cvs putinterval cvn
{findfont}
@Stopped
{pop/Courier findfont}
if
}
if
$SubstituteFont
begin
/$sname null def
/$slen 0 def
/$inVMIndex null def
end
}bind def
/isWidthsOnlyFont
{
dup/WidthsOnly known
{pop pop true}
{
dup/FDepVector known
{/FDepVector get{isWidthsOnlyFont dup{exit}if}forall}
{
dup/FDArray known
{/FDArray get{isWidthsOnlyFont dup{exit}if}forall}
{pop}
ifelse
}
ifelse
}
ifelse
}bind def
/ct_StyleDicts 4 dict dup begin
/Adobe-Japan1 4 dict dup begin
Level2?
{
/Serif
/HeiseiMin-W3-83pv-RKSJ-H/Font resourcestatus
{pop pop/HeiseiMin-W3}
{
/CIDFont/Category resourcestatus
{
pop pop
/HeiseiMin-W3/CIDFont resourcestatus
{pop pop/HeiseiMin-W3}
{/Ryumin-Light}
ifelse
}
{/Ryumin-Light}
ifelse
}
ifelse
def
/SansSerif
/HeiseiKakuGo-W5-83pv-RKSJ-H/Font resourcestatus
{pop pop/HeiseiKakuGo-W5}
{
/CIDFont/Category resourcestatus
{
pop pop
/HeiseiKakuGo-W5/CIDFont resourcestatus
{pop pop/HeiseiKakuGo-W5}
{/GothicBBB-Medium}
ifelse
}
{/GothicBBB-Medium}
ifelse
}
ifelse
def
/HeiseiMaruGo-W4-83pv-RKSJ-H/Font resourcestatus
{pop pop/HeiseiMaruGo-W4}
{
/CIDFont/Category resourcestatus
{
pop pop
/HeiseiMaruGo-W4/CIDFont resourcestatus
{pop pop/HeiseiMaruGo-W4}
{
/Jun101-Light-RKSJ-H/Font resourcestatus
{pop pop/Jun101-Light}
{SansSerif}
ifelse
}
ifelse
}
{
/Jun101-Light-RKSJ-H/Font resourcestatus
{pop pop/Jun101-Light}
{SansSerif}
ifelse
}
ifelse
}
ifelse
/RoundSansSerif exch def
/Default Serif def
}
{
/Serif/Ryumin-Light def
/SansSerif/GothicBBB-Medium def
{
(fonts/Jun101-Light-83pv-RKSJ-H)status
}stopped
{pop}{
{pop pop pop pop/Jun101-Light}
{SansSerif}
ifelse
/RoundSansSerif exch def
}ifelse
/Default Serif def
}
ifelse
end
def
/Adobe-Korea1 4 dict dup begin
/Serif/HYSMyeongJo-Medium def
/SansSerif/HYGoThic-Medium def
/RoundSansSerif SansSerif def
/Default Serif def
end
def
/Adobe-GB1 4 dict dup begin
/Serif/STSong-Light def
/SansSerif/STHeiti-Regular def
/RoundSansSerif SansSerif def
/Default Serif def
end
def
/Adobe-CNS1 4 dict dup begin
/Serif/MKai-Medium def
/SansSerif/MHei-Medium def
/RoundSansSerif SansSerif def
/Default Serif def
end
def
end
def
Level2?{currentglobal true setglobal}if
/ct_BoldRomanWidthProc
{
stringwidth 1 index 0 ne{exch .03 add exch}if setcharwidth
0 0
}bind def
/ct_Type0WidthProc
{
dup stringwidth 0 0 moveto
2 index true charpath pathbbox
0 -1
7 index 2 div .88
setcachedevice2
pop
0 0
}bind def
/ct_Type0WMode1WidthProc
{
dup stringwidth
pop 2 div neg -0.88
2 copy
moveto
0 -1
5 -1 roll true charpath pathbbox
setcachedevice
}bind def
/cHexEncoding
[/c00/c01/c02/c03/c04/c05/c06/c07/c08/c09/c0A/c0B/c0C/c0D/c0E/c0F/c10/c11/c12
/c13/c14/c15/c16/c17/c18/c19/c1A/c1B/c1C/c1D/c1E/c1F/c20/c21/c22/c23/c24/c25
/c26/c27/c28/c29/c2A/c2B/c2C/c2D/c2E/c2F/c30/c31/c32/c33/c34/c35/c36/c37/c38
/c39/c3A/c3B/c3C/c3D/c3E/c3F/c40/c41/c42/c43/c44/c45/c46/c47/c48/c49/c4A/c4B
/c4C/c4D/c4E/c4F/c50/c51/c52/c53/c54/c55/c56/c57/c58/c59/c5A/c5B/c5C/c5D/c5E
/c5F/c60/c61/c62/c63/c64/c65/c66/c67/c68/c69/c6A/c6B/c6C/c6D/c6E/c6F/c70/c71
/c72/c73/c74/c75/c76/c77/c78/c79/c7A/c7B/c7C/c7D/c7E/c7F/c80/c81/c82/c83/c84
/c85/c86/c87/c88/c89/c8A/c8B/c8C/c8D/c8E/c8F/c90/c91/c92/c93/c94/c95/c96/c97
/c98/c99/c9A/c9B/c9C/c9D/c9E/c9F/cA0/cA1/cA2/cA3/cA4/cA5/cA6/cA7/cA8/cA9/cAA
/cAB/cAC/cAD/cAE/cAF/cB0/cB1/cB2/cB3/cB4/cB5/cB6/cB7/cB8/cB9/cBA/cBB/cBC/cBD
/cBE/cBF/cC0/cC1/cC2/cC3/cC4/cC5/cC6/cC7/cC8/cC9/cCA/cCB/cCC/cCD/cCE/cCF/cD0
/cD1/cD2/cD3/cD4/cD5/cD6/cD7/cD8/cD9/cDA/cDB/cDC/cDD/cDE/cDF/cE0/cE1/cE2/cE3
/cE4/cE5/cE6/cE7/cE8/cE9/cEA/cEB/cEC/cED/cEE/cEF/cF0/cF1/cF2/cF3/cF4/cF5/cF6
/cF7/cF8/cF9/cFA/cFB/cFC/cFD/cFE/cFF]def
/ct_BoldBaseFont
11 dict begin
/FontType 3 def
/FontMatrix[1 0 0 1 0 0]def
/FontBBox[0 0 1 1]def
/Encoding cHexEncoding def
/_setwidthProc/ct_BoldRomanWidthProc load def
/_bcstr1 1 string def
/BuildChar
{
exch begin
_basefont setfont
_bcstr1 dup 0 4 -1 roll put
dup
_setwidthProc
3 copy
moveto
show
_basefonto setfont
moveto
show
end
}bind def
currentdict
end
def
systemdict/composefont known
{
/ct_DefineIdentity-H
{
/Identity-H/CMap resourcestatus
{
pop pop
}
{
/CIDInit/ProcSet findresource begin
12 dict begin
begincmap
/CIDSystemInfo 3 dict dup begin
/Registry(Adobe)def
/Ordering(Identity)def
/Supplement 0 def
end def
/CMapName/Identity-H def
/CMapVersion 1.000 def
/CMapType 1 def
1 begincodespacerange
<0000>
endcodespacerange
1 begincidrange
<0000>0
endcidrange
endcmap
CMapName currentdict/CMap defineresource pop
end
end
}
ifelse
}
def
/ct_BoldBaseCIDFont
11 dict begin
/CIDFontType 1 def
/CIDFontName/ct_BoldBaseCIDFont def
/FontMatrix[1 0 0 1 0 0]def
/FontBBox[0 0 1 1]def
/_setwidthProc/ct_Type0WidthProc load def
/_bcstr2 2 string def
/BuildGlyph
{
exch begin
_basefont setfont
_bcstr2 1 2 index 256 mod put
_bcstr2 0 3 -1 roll 256 idiv put
_bcstr2 dup _setwidthProc
3 copy
moveto
show
_basefonto setfont
moveto
show
end
}bind def
currentdict
end
def
}if
Level2?{setglobal}if
/ct_CopyFont{
{
1 index/FID ne 2 index/UniqueID ne and
{def}{pop pop}ifelse
}forall
}bind def
/ct_Type0CopyFont
{
exch
dup length dict
begin
ct_CopyFont
[
exch
FDepVector
{
dup/FontType get 0 eq
{
1 index ct_Type0CopyFont
/_ctType0 exch definefont
}
{
/_ctBaseFont exch
2 index exec
}
ifelse
exch
}
forall
pop
]
/FDepVector exch def
currentdict
end
}bind def
/ct_MakeBoldFont
{
dup/ct_SyntheticBold known
{
dup length 3 add dict begin
ct_CopyFont
/ct_StrokeWidth .03 0 FontMatrix idtransform pop def
/ct_SyntheticBold true def
currentdict
end
definefont
}
{
dup dup length 3 add dict
begin
ct_CopyFont
/PaintType 2 def
/StrokeWidth .03 0 FontMatrix idtransform pop def
/dummybold currentdict
end
definefont
dup/FontType get dup 9 ge exch 11 le and
{
ct_BoldBaseCIDFont
dup length 3 add dict copy begin
dup/CIDSystemInfo get/CIDSystemInfo exch def
ct_DefineIdentity-H
/_Type0Identity/Identity-H 3 -1 roll[exch]composefont
/_basefont exch def
/_Type0Identity/Identity-H 3 -1 roll[exch]composefont
/_basefonto exch def
currentdict
end
/CIDFont defineresource
}
{
ct_BoldBaseFont
dup length 3 add dict copy begin
/_basefont exch def
/_basefonto exch def
currentdict
end
definefont
}
ifelse
}
ifelse
}bind def
/ct_MakeBold{
1 index
1 index
findfont
currentglobal 5 1 roll
dup gcheck setglobal
dup
/FontType get 0 eq
{
dup/WMode known{dup/WMode get 1 eq}{false}ifelse
version length 4 ge
and
{version 0 4 getinterval cvi 2015 ge}
{true}
ifelse
{/ct_Type0WidthProc}
{/ct_Type0WMode1WidthProc}
ifelse
ct_BoldBaseFont/_setwidthProc 3 -1 roll load put
{ct_MakeBoldFont}ct_Type0CopyFont definefont
}
{
dup/_fauxfont known not 1 index/SubstMaster known not and
{
ct_BoldBaseFont/_setwidthProc /ct_BoldRomanWidthProc load put
ct_MakeBoldFont
}
{
2 index 2 index eq
{exch pop }
{
dup length dict begin
ct_CopyFont
currentdict
end
definefont
}
ifelse
}
ifelse
}
ifelse
pop pop pop
setglobal
}bind def
/?str1 256 string def
/?set
{
$SubstituteFont
begin
/$substituteFound false def
/$fontname 1 index def
/$doSmartSub false def
end
dup
findfont
$SubstituteFont
begin
$substituteFound
{false}
{
dup/FontName known
{
dup/FontName get $fontname eq
1 index/DistillerFauxFont known not and
/currentdistillerparams where
{pop false 2 index isWidthsOnlyFont not and}
if
}
{false}
ifelse
}
ifelse
exch pop
/$doSmartSub true def
end
{
5 1 roll pop pop pop pop
findfont
}
{
1 index
findfont
dup/FontType get 3 eq
{
6 1 roll pop pop pop pop pop false
}
{pop true}
ifelse
{
$SubstituteFont
begin
pop pop
/$styleArray 1 index def
/$regOrdering 2 index def
pop pop
0 1 $styleArray length 1 sub
{
$styleArray exch get
ct_StyleDicts $regOrdering
2 copy known
{
get
exch 2 copy known not
{pop/Default}
if
get
dup type/nametype eq
{
?str1 cvs length dup 1 add exch
?str1 exch(-)putinterval
exch dup length exch ?str1 exch 3 index exch putinterval
add ?str1 exch 0 exch getinterval cvn
}
{
pop pop/Unknown
}
ifelse
}
{
pop pop pop pop/Unknown
}
ifelse
}
for
end
findfont
}if
}
ifelse
currentglobal false setglobal 3 1 roll
null copyfont definefont pop
setglobal
}bind def
setpacking
userdict/$SubstituteFont 25 dict put
1 dict
begin
/SubstituteFont
dup $error exch 2 copy known
{get}
{pop pop{pop/Courier}bind}
ifelse def
/currentdistillerparams where dup
{
pop pop
currentdistillerparams/CannotEmbedFontPolicy 2 copy known
{get/Error eq}
{pop pop false}
ifelse
}
if not
{
countdictstack array dictstack 0 get
begin
userdict
begin
$SubstituteFont
begin
/$str 128 string def
/$fontpat 128 string def
/$slen 0 def
/$sname null def
/$match false def
/$fontname null def
/$substituteFound false def
/$inVMIndex null def
/$doSmartSub true def
/$depth 0 def
/$fontname null def
/$italicangle 26.5 def
/$dstack null def
/$Strategies 10 dict dup
begin
/$Type3Underprint
{
currentglobal exch false setglobal
11 dict
begin
/UseFont exch
$WMode 0 ne
{
dup length dict copy
dup/WMode $WMode put
/UseFont exch definefont
}
if def
/FontName $fontname dup type/stringtype eq{cvn}if def
/FontType 3 def
/FontMatrix[.001 0 0 .001 0 0]def
/Encoding 256 array dup 0 1 255{/.notdef put dup}for pop def
/FontBBox[0 0 0 0]def
/CCInfo 7 dict dup
begin
/cc null def
/x 0 def
/y 0 def
end def
/BuildChar
{
exch
begin
CCInfo
begin
1 string dup 0 3 index put exch pop
/cc exch def
UseFont 1000 scalefont setfont
cc stringwidth/y exch def/x exch def
x y setcharwidth
$SubstituteFont/$Strategy get/$Underprint get exec
0 0 moveto cc show
x y moveto
end
end
}bind def
currentdict
end
exch setglobal
}bind def
/$GetaTint
2 dict dup
begin
/$BuildFont
{
dup/WMode known
{dup/WMode get}
{0}
ifelse
/$WMode exch def
$fontname exch
dup/FontName known
{
dup/FontName get
dup type/stringtype eq{cvn}if
}
{/unnamedfont}
ifelse
exch
Adobe_CoolType_Data/InVMDeepCopiedFonts get
1 index/FontName get known
{
pop
Adobe_CoolType_Data/InVMDeepCopiedFonts get
1 index get
null copyfont
}
{$deepcopyfont}
ifelse
exch 1 index exch/FontBasedOn exch put
dup/FontName $fontname dup type/stringtype eq{cvn}if put
definefont
Adobe_CoolType_Data/InVMDeepCopiedFonts get
begin
dup/FontBasedOn get 1 index def
end
}bind def
/$Underprint
{
gsave
x abs y abs gt
{/y 1000 def}
{/x -1000 def 500 120 translate}
ifelse
Level2?
{
[/Separation(All)/DeviceCMYK{0 0 0 1 pop}]
setcolorspace
}
{0 setgray}
ifelse
10 setlinewidth
x .8 mul
[7 3]
{
y mul 8 div 120 sub x 10 div exch moveto
0 y 4 div neg rlineto
dup 0 rlineto
0 y 4 div rlineto
closepath
gsave
Level2?
{.2 setcolor}
{.8 setgray}
ifelse
fill grestore
stroke
}
forall
pop
grestore
}bind def
end def
/$Oblique
1 dict dup
begin
/$BuildFont
{
currentglobal exch dup gcheck setglobal
null copyfont
begin
/FontBasedOn
currentdict/FontName known
{
FontName
dup type/stringtype eq{cvn}if
}
{/unnamedfont}
ifelse
def
/FontName $fontname dup type/stringtype eq{cvn}if def
/currentdistillerparams where
{pop}
{
/FontInfo currentdict/FontInfo known
{FontInfo null copyfont}
{2 dict}
ifelse
dup
begin
/ItalicAngle $italicangle def
/FontMatrix FontMatrix
[1 0 ItalicAngle dup sin exch cos div 1 0 0]
matrix concatmatrix readonly
end
4 2 roll def
def
}
ifelse
FontName currentdict
end
definefont
exch setglobal
}bind def
end def
/$None
1 dict dup
begin
/$BuildFont{}bind def
end def
end def
/$Oblique SetSubstituteStrategy
/$findfontByEnum
{
dup type/stringtype eq{cvn}if
dup/$fontname exch def
$sname null eq
{$str cvs dup length $slen sub $slen getinterval}
{pop $sname}
ifelse
$fontpat dup 0(fonts/*)putinterval exch 7 exch putinterval
/$match false def
$SubstituteFont/$dstack countdictstack array dictstack put
mark
{
$fontpat 0 $slen 7 add getinterval
{/$match exch def exit}
$str filenameforall
}
stopped
{
cleardictstack
currentdict
true
$SubstituteFont/$dstack get
{
exch
{
1 index eq
{pop false}
{true}
ifelse
}
{begin false}
ifelse
}
forall
pop
}
if
cleartomark
/$slen 0 def
$match false ne
{$match(fonts/)anchorsearch pop pop cvn}
{/Courier}
ifelse
}bind def
/$ROS 1 dict dup
begin
/Adobe 4 dict dup
begin
/Japan1 [/Ryumin-Light/HeiseiMin-W3
/GothicBBB-Medium/HeiseiKakuGo-W5
/HeiseiMaruGo-W4/Jun101-Light]def
/Korea1 [/HYSMyeongJo-Medium/HYGoThic-Medium]def
/GB1 [/STSong-Light/STHeiti-Regular]def
/CNS1 [/MKai-Medium/MHei-Medium]def
end def
end def
/$cmapname null def
/$deepcopyfont
{
dup/FontType get 0 eq
{
1 dict dup/FontName/copied put copyfont
begin
/FDepVector FDepVector copyarray
0 1 2 index length 1 sub
{
2 copy get $deepcopyfont
dup/FontName/copied put
/copied exch definefont
3 copy put pop pop
}
for
def
currentdict
end
}
{$Strategies/$Type3Underprint get exec}
ifelse
}bind def
/$buildfontname
{
dup/CIDFont findresource/CIDSystemInfo get
begin
Registry length Ordering length Supplement 8 string cvs
3 copy length 2 add add add string
dup 5 1 roll dup 0 Registry putinterval
dup 4 index(-)putinterval
dup 4 index 1 add Ordering putinterval
4 2 roll add 1 add 2 copy(-)putinterval
end
1 add 2 copy 0 exch getinterval $cmapname $fontpat cvs exch
anchorsearch
{pop pop 3 2 roll putinterval cvn/$cmapname exch def}
{pop pop pop pop pop}
ifelse
length
$str 1 index(-)putinterval 1 add
$str 1 index $cmapname $fontpat cvs putinterval
$cmapname length add
$str exch 0 exch getinterval cvn
}bind def
/$findfontByROS
{
/$fontname exch def
$ROS Registry 2 copy known
{
get Ordering 2 copy known
{get}
{pop pop[]}
ifelse
}
{pop pop[]}
ifelse
false exch
{
dup/CIDFont resourcestatus
{
pop pop
save
1 index/CIDFont findresource
dup/WidthsOnly known
{dup/WidthsOnly get}
{false}
ifelse
exch pop
exch restore
{pop}
{exch pop true exit}
ifelse
}
{pop}
ifelse
}
forall
{$str cvs $buildfontname}
{
false(*)
{
save exch
dup/CIDFont findresource
dup/WidthsOnly known
{dup/WidthsOnly get not}
{true}
ifelse
exch/CIDSystemInfo get
dup/Registry get Registry eq
exch/Ordering get Ordering eq and and
{exch restore exch pop true exit}
{pop restore}
ifelse
}
$str/CIDFont resourceforall
{$buildfontname}
{$fontname $findfontByEnum}
ifelse
}
ifelse
}bind def
end
end
currentdict/$error known currentdict/languagelevel known and dup
{pop $error/SubstituteFont known}
if
dup
{$error}
{Adobe_CoolType_Core}
ifelse
begin
{
/SubstituteFont
/CMap/Category resourcestatus
{
pop pop
{
$SubstituteFont
begin
/$substituteFound true def
dup length $slen gt
$sname null ne or
$slen 0 gt and
{
$sname null eq
{dup $str cvs dup length $slen sub $slen getinterval cvn}
{$sname}
ifelse
Adobe_CoolType_Data/InVMFontsByCMap get
1 index 2 copy known
{
get
false exch
{
pop
currentglobal
{
GlobalFontDirectory 1 index known
{exch pop true exit}
{pop}
ifelse
}
{
FontDirectory 1 index known
{exch pop true exit}
{
GlobalFontDirectory 1 index known
{exch pop true exit}
{pop}
ifelse
}
ifelse
}
ifelse
}
forall
}
{pop pop false}
ifelse
{
exch pop exch pop
}
{
dup/CMap resourcestatus
{
pop pop
dup/$cmapname exch def
/CMap findresource/CIDSystemInfo get{def}forall
$findfontByROS
}
{
128 string cvs
dup(-)search
{
3 1 roll search
{
3 1 roll pop
{dup cvi}
stopped
{pop pop pop pop pop $findfontByEnum}
{
4 2 roll pop pop
exch length
exch
2 index length
2 index
sub
exch 1 sub -1 0
{
$str cvs dup length
4 index
0
4 index
4 3 roll add
getinterval
exch 1 index exch 3 index exch
putinterval
dup/CMap resourcestatus
{
pop pop
4 1 roll pop pop pop
dup/$cmapname exch def
/CMap findresource/CIDSystemInfo get{def}forall
$findfontByROS
true exit
}
{pop}
ifelse
}
for
dup type/booleantype eq
{pop}
{pop pop pop $findfontByEnum}
ifelse
}
ifelse
}
{pop pop pop $findfontByEnum}
ifelse
}
{pop pop $findfontByEnum}
ifelse
}
ifelse
}
ifelse
}
{//SubstituteFont exec}
ifelse
/$slen 0 def
end
}
}
{
{
$SubstituteFont
begin
/$substituteFound true def
dup length $slen gt
$sname null ne or
$slen 0 gt and
{$findfontByEnum}
{//SubstituteFont exec}
ifelse
end
}
}
ifelse
bind readonly def
Adobe_CoolType_Core/scfindfont/systemfindfont load put
}
{
/scfindfont
{
$SubstituteFont
begin
dup systemfindfont
dup/FontName known
{dup/FontName get dup 3 index ne}
{/noname true}
ifelse
dup
{
/$origfontnamefound 2 index def
/$origfontname 4 index def/$substituteFound true def
}
if
exch pop
{
$slen 0 gt
$sname null ne
3 index length $slen gt or and
{
pop dup $findfontByEnum findfont
dup maxlength 1 add dict
begin
{1 index/FID eq{pop pop}{def}ifelse}
forall
currentdict
end
definefont
dup/FontName known{dup/FontName get}{null}ifelse
$origfontnamefound ne
{
$origfontname $str cvs print
( substitution revised, using )print
dup/FontName known
{dup/FontName get}{(unspecified font)}
ifelse
$str cvs print(.\n)print
}
if
}
{exch pop}
ifelse
}
{exch pop}
ifelse
end
}bind def
}
ifelse
end
end
Adobe_CoolType_Core_Defined not
{
Adobe_CoolType_Core/findfont
{
$SubstituteFont
begin
$depth 0 eq
{
/$fontname 1 index dup type/stringtype ne{$str cvs}if def
/$substituteFound false def
}
if
/$depth $depth 1 add def
end
scfindfont
$SubstituteFont
begin
/$depth $depth 1 sub def
$substituteFound $depth 0 eq and
{
$inVMIndex null ne
{dup $inVMIndex $AddInVMFont}
if
$doSmartSub
{
currentdict/$Strategy known
{$Strategy/$BuildFont get exec}
if
}
if
}
if
end
}bind put
}
if
}
if
end
/$AddInVMFont
{
exch/FontName 2 copy known
{
get
1 dict dup begin exch 1 index gcheck def end exch
Adobe_CoolType_Data/InVMFontsByCMap get exch
$DictAdd
}
{pop pop pop}
ifelse
}bind def
/$DictAdd
{
2 copy known not
{2 copy 4 index length dict put}
if
Level2? not
{
2 copy get dup maxlength exch length 4 index length add lt
2 copy get dup length 4 index length add exch maxlength 1 index lt
{
2 mul dict
begin
2 copy get{forall}def
2 copy currentdict put
end
}
{pop}
ifelse
}
if
get
begin
{def}
forall
end
}bind def
end
end
%%EndResource
currentglobal true setglobal
%%BeginResource: procset Adobe_CoolType_Utility_MAKEOCF 1.23 0
%%Copyright: Copyright 1987-2006 Adobe Systems Incorporated.
%%Version: 1.23 0
systemdict/languagelevel known dup
{currentglobal false setglobal}
{false}
ifelse
exch
userdict/Adobe_CoolType_Utility 2 copy known
{2 copy get dup maxlength 27 add dict copy}
{27 dict}
ifelse put
Adobe_CoolType_Utility
begin
/@eexecStartData
def
/@recognizeCIDFont null def
/ct_Level2? exch def
/ct_Clone? 1183615869 internaldict dup
/CCRun known not
exch/eCCRun known not
ct_Level2? and or def
ct_Level2?
{globaldict begin currentglobal true setglobal}
if
/ct_AddStdCIDMap
ct_Level2?
{{
mark
Adobe_CoolType_Utility/@recognizeCIDFont currentdict put
{
((Hex)57 StartData
0615 1e27 2c39 1c60 d8a8 cc31 fe2b f6e0
7aa3 e541 e21c 60d8 a8c9 c3d0 6d9e 1c60
d8a8 c9c2 02d7 9a1c 60d8 a849 1c60 d8a8
cc36 74f4 1144 b13b 77)0()/SubFileDecode filter cvx exec
}
stopped
{
cleartomark
Adobe_CoolType_Utility/@recognizeCIDFont get
countdictstack dup array dictstack
exch 1 sub -1 0
{
2 copy get 3 index eq
{1 index length exch sub 1 sub{end}repeat exit}
{pop}
ifelse
}
for
pop pop
Adobe_CoolType_Utility/@eexecStartData get eexec
}
{cleartomark}
ifelse
}}
{{
Adobe_CoolType_Utility/@eexecStartData get eexec
}}
ifelse bind def
userdict/cid_extensions known
dup{cid_extensions/cid_UpdateDB known and}if
{
cid_extensions
begin
/cid_GetCIDSystemInfo
{
1 index type/stringtype eq
{exch cvn exch}
if
cid_extensions
begin
dup load 2 index known
{
2 copy
cid_GetStatusInfo
dup null ne
{
1 index load
3 index get
dup null eq
{pop pop cid_UpdateDB}
{
exch
1 index/Created get eq
{exch pop exch pop}
{pop cid_UpdateDB}
ifelse
}
ifelse
}
{pop cid_UpdateDB}
ifelse
}
{cid_UpdateDB}
ifelse
end
}bind def
end
}
if
ct_Level2?
{end setglobal}
if
/ct_UseNativeCapability? systemdict/composefont known def
/ct_MakeOCF 35 dict def
/ct_Vars 25 dict def
/ct_GlyphDirProcs 6 dict def
/ct_BuildCharDict 15 dict dup
begin
/charcode 2 string def
/dst_string 1500 string def
/nullstring()def
/usewidths? true def
end def
ct_Level2?{setglobal}{pop}ifelse
ct_GlyphDirProcs
begin
/GetGlyphDirectory
{
systemdict/languagelevel known
{pop/CIDFont findresource/GlyphDirectory get}
{
1 index/CIDFont findresource/GlyphDirectory
get dup type/dicttype eq
{
dup dup maxlength exch length sub 2 index lt
{
dup length 2 index add dict copy 2 index
/CIDFont findresource/GlyphDirectory 2 index put
}
if
}
if
exch pop exch pop
}
ifelse
+
}def
/+
{
systemdict/languagelevel known
{
currentglobal false setglobal
3 dict begin
/vm exch def
}
{1 dict begin}
ifelse
/$ exch def
systemdict/languagelevel known
{
vm setglobal
/gvm currentglobal def
$ gcheck setglobal
}
if
?{$ begin}if
}def
/?{$ type/dicttype eq}def
/|{
userdict/Adobe_CoolType_Data known
{
Adobe_CoolType_Data/AddWidths? known
{
currentdict Adobe_CoolType_Data
begin
begin
AddWidths?
{
Adobe_CoolType_Data/CC 3 index put
?{def}{$ 3 1 roll put}ifelse
CC charcode exch 1 index 0 2 index 256 idiv put
1 index exch 1 exch 256 mod put
stringwidth 2 array astore
currentfont/Widths get exch CC exch put
}
{?{def}{$ 3 1 roll put}ifelse}
ifelse
end
end
}
{?{def}{$ 3 1 roll put}ifelse} ifelse
}
{?{def}{$ 3 1 roll put}ifelse}
ifelse
}def
/!
{
?{end}if
systemdict/languagelevel known
{gvm setglobal}
if
end
}def
/:{string currentfile exch readstring pop}executeonly def
end
ct_MakeOCF
begin
/ct_cHexEncoding
[/c00/c01/c02/c03/c04/c05/c06/c07/c08/c09/c0A/c0B/c0C/c0D/c0E/c0F/c10/c11/c12
/c13/c14/c15/c16/c17/c18/c19/c1A/c1B/c1C/c1D/c1E/c1F/c20/c21/c22/c23/c24/c25
/c26/c27/c28/c29/c2A/c2B/c2C/c2D/c2E/c2F/c30/c31/c32/c33/c34/c35/c36/c37/c38
/c39/c3A/c3B/c3C/c3D/c3E/c3F/c40/c41/c42/c43/c44/c45/c46/c47/c48/c49/c4A/c4B
/c4C/c4D/c4E/c4F/c50/c51/c52/c53/c54/c55/c56/c57/c58/c59/c5A/c5B/c5C/c5D/c5E
/c5F/c60/c61/c62/c63/c64/c65/c66/c67/c68/c69/c6A/c6B/c6C/c6D/c6E/c6F/c70/c71
/c72/c73/c74/c75/c76/c77/c78/c79/c7A/c7B/c7C/c7D/c7E/c7F/c80/c81/c82/c83/c84
/c85/c86/c87/c88/c89/c8A/c8B/c8C/c8D/c8E/c8F/c90/c91/c92/c93/c94/c95/c96/c97
/c98/c99/c9A/c9B/c9C/c9D/c9E/c9F/cA0/cA1/cA2/cA3/cA4/cA5/cA6/cA7/cA8/cA9/cAA
/cAB/cAC/cAD/cAE/cAF/cB0/cB1/cB2/cB3/cB4/cB5/cB6/cB7/cB8/cB9/cBA/cBB/cBC/cBD
/cBE/cBF/cC0/cC1/cC2/cC3/cC4/cC5/cC6/cC7/cC8/cC9/cCA/cCB/cCC/cCD/cCE/cCF/cD0
/cD1/cD2/cD3/cD4/cD5/cD6/cD7/cD8/cD9/cDA/cDB/cDC/cDD/cDE/cDF/cE0/cE1/cE2/cE3
/cE4/cE5/cE6/cE7/cE8/cE9/cEA/cEB/cEC/cED/cEE/cEF/cF0/cF1/cF2/cF3/cF4/cF5/cF6
/cF7/cF8/cF9/cFA/cFB/cFC/cFD/cFE/cFF]def
/ct_CID_STR_SIZE 8000 def
/ct_mkocfStr100 100 string def
/ct_defaultFontMtx[.001 0 0 .001 0 0]def
/ct_1000Mtx[1000 0 0 1000 0 0]def
/ct_raise{exch cvx exch errordict exch get exec stop}bind def
/ct_reraise
{cvx $error/errorname get(Error: )print dup( )cvs print
errordict exch get exec stop
}bind def
/ct_cvnsi
{
1 index add 1 sub 1 exch 0 4 1 roll
{
2 index exch get
exch 8 bitshift
add
}
for
exch pop
}bind def
/ct_GetInterval
{
Adobe_CoolType_Utility/ct_BuildCharDict get
begin
/dst_index 0 def
dup dst_string length gt
{dup string/dst_string exch def}
if
1 index ct_CID_STR_SIZE idiv
/arrayIndex exch def
2 index arrayIndex get
2 index
arrayIndex ct_CID_STR_SIZE mul
sub
{
dup 3 index add 2 index length le
{
2 index getinterval
dst_string dst_index 2 index putinterval
length dst_index add/dst_index exch def
exit
}
{
1 index length 1 index sub
dup 4 1 roll
getinterval
dst_string dst_index 2 index putinterval
pop dup dst_index add/dst_index exch def
sub
/arrayIndex arrayIndex 1 add def
2 index dup length arrayIndex gt
{arrayIndex get}
{
pop
exit
}
ifelse
0
}
ifelse
}
loop
pop pop pop
dst_string 0 dst_index getinterval
end
}bind def
ct_Level2?
{
/ct_resourcestatus
currentglobal mark true setglobal
{/unknowninstancename/Category resourcestatus}
stopped
{cleartomark setglobal true}
{cleartomark currentglobal not exch setglobal}
ifelse
{
{
mark 3 1 roll/Category findresource
begin
ct_Vars/vm currentglobal put
({ResourceStatus}stopped)0()/SubFileDecode filter cvx exec
{cleartomark false}
{{3 2 roll pop true}{cleartomark false}ifelse}
ifelse
ct_Vars/vm get setglobal
end
}
}
{{resourcestatus}}
ifelse bind def
/CIDFont/Category ct_resourcestatus
{pop pop}
{
currentglobal true setglobal
/Generic/Category findresource
dup length dict copy
dup/InstanceType/dicttype put
/CIDFont exch/Category defineresource pop
setglobal
}
ifelse
ct_UseNativeCapability?
{
/CIDInit/ProcSet findresource begin
12 dict begin
begincmap
/CIDSystemInfo 3 dict dup begin
/Registry(Adobe)def
/Ordering(Identity)def
/Supplement 0 def
end def
/CMapName/Identity-H def
/CMapVersion 1.000 def
/CMapType 1 def
1 begincodespacerange
<0000>
endcodespacerange
1 begincidrange
<0000>0
endcidrange
endcmap
CMapName currentdict/CMap defineresource pop
end
end
}
if
}
{
/ct_Category 2 dict begin
/CIDFont 10 dict def
/ProcSet 2 dict def
currentdict
end
def
/defineresource
{
ct_Category 1 index 2 copy known
{
get
dup dup maxlength exch length eq
{
dup length 10 add dict copy
ct_Category 2 index 2 index put
}
if
3 index 3 index put
pop exch pop
}
{pop pop/defineresource/undefined ct_raise}
ifelse
}bind def
/findresource
{
ct_Category 1 index 2 copy known
{
get
2 index 2 copy known
{get 3 1 roll pop pop}
{pop pop/findresource/undefinedresource ct_raise}
ifelse
}
{pop pop/findresource/undefined ct_raise}
ifelse
}bind def
/resourcestatus
{
ct_Category 1 index 2 copy known
{
get
2 index known
exch pop exch pop
{
0 -1 true
}
{
false
}
ifelse
}
{pop pop/findresource/undefined ct_raise}
ifelse
}bind def
/ct_resourcestatus/resourcestatus load def
}
ifelse
/ct_CIDInit 2 dict
begin
/ct_cidfont_stream_init
{
{
dup(Binary)eq
{
pop
null
currentfile
ct_Level2?
{
{cid_BYTE_COUNT()/SubFileDecode filter}
stopped
{pop pop pop}
if
}
if
/readstring load
exit
}
if
dup(Hex)eq
{
pop
currentfile
ct_Level2?
{
{null exch/ASCIIHexDecode filter/readstring}
stopped
{pop exch pop(>)exch/readhexstring}
if
}
{(>)exch/readhexstring}
ifelse
load
exit
}
if
/StartData/typecheck ct_raise
}
loop
cid_BYTE_COUNT ct_CID_STR_SIZE le
{
2 copy cid_BYTE_COUNT string exch exec
pop
1 array dup
3 -1 roll
0 exch put
}
{
cid_BYTE_COUNT ct_CID_STR_SIZE div ceiling cvi
dup array exch 2 sub 0 exch 1 exch
{
2 copy
5 index
ct_CID_STR_SIZE
string
6 index exec
pop
put
pop
}
for
2 index
cid_BYTE_COUNT ct_CID_STR_SIZE mod string
3 index exec
pop
1 index exch
1 index length 1 sub
exch put
}
ifelse
cid_CIDFONT exch/GlyphData exch put
2 index null eq
{
pop pop pop
}
{
pop/readstring load
1 string exch
{
3 copy exec
pop
dup length 0 eq
{
pop pop pop pop pop
true exit
}
if
4 index
eq
{
pop pop pop pop
false exit
}
if
}
loop
pop
}
ifelse
}bind def
/StartData
{
mark
{
currentdict
dup/FDArray get 0 get/FontMatrix get
0 get 0.001 eq
{
dup/CDevProc known not
{
/CDevProc 1183615869 internaldict/stdCDevProc 2 copy known
{get}
{
pop pop
{pop pop pop pop pop 0 -1000 7 index 2 div 880}
}
ifelse
def
}
if
}
{
/CDevProc
{
pop pop pop pop pop
0
1 cid_temp/cid_CIDFONT get
/FDArray get 0 get
/FontMatrix get 0 get div
7 index 2 div
1 index 0.88 mul
}def
}
ifelse
/cid_temp 15 dict def
cid_temp
begin
/cid_CIDFONT exch def
3 copy pop
dup/cid_BYTE_COUNT exch def 0 gt
{
ct_cidfont_stream_init
FDArray
{
/Private get
dup/SubrMapOffset known
{
begin
/Subrs SubrCount array def
Subrs
SubrMapOffset
SubrCount
SDBytes
ct_Level2?
{
currentdict dup/SubrMapOffset undef
dup/SubrCount undef
/SDBytes undef
}
if
end
/cid_SD_BYTES exch def
/cid_SUBR_COUNT exch def
/cid_SUBR_MAP_OFFSET exch def
/cid_SUBRS exch def
cid_SUBR_COUNT 0 gt
{
GlyphData cid_SUBR_MAP_OFFSET cid_SD_BYTES ct_GetInterval
0 cid_SD_BYTES ct_cvnsi
0 1 cid_SUBR_COUNT 1 sub
{
exch 1 index
1 add
cid_SD_BYTES mul cid_SUBR_MAP_OFFSET add
GlyphData exch cid_SD_BYTES ct_GetInterval
0 cid_SD_BYTES ct_cvnsi
cid_SUBRS 4 2 roll
GlyphData exch
4 index
1 index
sub
ct_GetInterval
dup length string copy put
}
for
pop
}
if
}
{pop}
ifelse
}
forall
}
if
cleartomark pop pop
end
CIDFontName currentdict/CIDFont defineresource pop
end end
}
stopped
{cleartomark/StartData ct_reraise}
if
}bind def
currentdict
end def
/ct_saveCIDInit
{
/CIDInit/ProcSet ct_resourcestatus
{true}
{/CIDInitC/ProcSet ct_resourcestatus}
ifelse
{
pop pop
/CIDInit/ProcSet findresource
ct_UseNativeCapability?
{pop null}
{/CIDInit ct_CIDInit/ProcSet defineresource pop}
ifelse
}
{/CIDInit ct_CIDInit/ProcSet defineresource pop null}
ifelse
ct_Vars exch/ct_oldCIDInit exch put
}bind def
/ct_restoreCIDInit
{
ct_Vars/ct_oldCIDInit get dup null ne
{/CIDInit exch/ProcSet defineresource pop}
{pop}
ifelse
}bind def
/ct_BuildCharSetUp
{
1 index
begin
CIDFont
begin
Adobe_CoolType_Utility/ct_BuildCharDict get
begin
/ct_dfCharCode exch def
/ct_dfDict exch def
CIDFirstByte ct_dfCharCode add
dup CIDCount ge
{pop 0}
if
/cid exch def
{
GlyphDirectory cid 2 copy known
{get}
{pop pop nullstring}
ifelse
dup length FDBytes sub 0 gt
{
dup
FDBytes 0 ne
{0 FDBytes ct_cvnsi}
{pop 0}
ifelse
/fdIndex exch def
dup length FDBytes sub FDBytes exch getinterval
/charstring exch def
exit
}
{
pop
cid 0 eq
{/charstring nullstring def exit}
if
/cid 0 def
}
ifelse
}
loop
}def
/ct_SetCacheDevice
{
0 0 moveto
dup stringwidth
3 -1 roll
true charpath
pathbbox
0 -1000
7 index 2 div 880
setcachedevice2
0 0 moveto
}def
/ct_CloneSetCacheProc
{
1 eq
{
stringwidth
pop -2 div -880
0 -1000 setcharwidth
moveto
}
{
usewidths?
{
currentfont/Widths get cid
2 copy known
{get exch pop aload pop}
{pop pop stringwidth}
ifelse
}
{stringwidth}
ifelse
setcharwidth
0 0 moveto
}
ifelse
}def
/ct_Type3ShowCharString
{
ct_FDDict fdIndex 2 copy known
{get}
{
currentglobal 3 1 roll
1 index gcheck setglobal
ct_Type1FontTemplate dup maxlength dict copy
begin
FDArray fdIndex get
dup/FontMatrix 2 copy known
{get}
{pop pop ct_defaultFontMtx}
ifelse
/FontMatrix exch dup length array copy def
/Private get
/Private exch def
/Widths rootfont/Widths get def
/CharStrings 1 dict dup/.notdef
dup length string copy put def
currentdict
end
/ct_Type1Font exch definefont
dup 5 1 roll put
setglobal
}
ifelse
dup/CharStrings get 1 index/Encoding get
ct_dfCharCode get charstring put
rootfont/WMode 2 copy known
{get}
{pop pop 0}
ifelse
exch
1000 scalefont setfont
ct_str1 0 ct_dfCharCode put
ct_str1 exch ct_dfSetCacheProc
ct_SyntheticBold
{
currentpoint
ct_str1 show
newpath
moveto
ct_str1 true charpath
ct_StrokeWidth setlinewidth
stroke
}
{ct_str1 show}
ifelse
}def
/ct_Type4ShowCharString
{
ct_dfDict ct_dfCharCode charstring
FDArray fdIndex get
dup/FontMatrix get dup ct_defaultFontMtx ct_matrixeq not
{ct_1000Mtx matrix concatmatrix concat}
{pop}
ifelse
/Private get
Adobe_CoolType_Utility/ct_Level2? get not
{
ct_dfDict/Private
3 -1 roll
{put}
1183615869 internaldict/superexec get exec
}
if
1183615869 internaldict
Adobe_CoolType_Utility/ct_Level2? get
{1 index}
{3 index/Private get mark 6 1 roll}
ifelse
dup/RunInt known
{/RunInt get}
{pop/CCRun}
ifelse
get exec
Adobe_CoolType_Utility/ct_Level2? get not
{cleartomark}
if
}bind def
/ct_BuildCharIncremental
{
{
Adobe_CoolType_Utility/ct_MakeOCF get begin
ct_BuildCharSetUp
ct_ShowCharString
}
stopped
{stop}
if
end
end
end
end
}bind def
/BaseFontNameStr(BF00)def
/ct_Type1FontTemplate 14 dict
begin
/FontType 1 def
/FontMatrix [0.001 0 0 0.001 0 0]def
/FontBBox [-250 -250 1250 1250]def
/Encoding ct_cHexEncoding def
/PaintType 0 def
currentdict
end def
/BaseFontTemplate 11 dict
begin
/FontMatrix [0.001 0 0 0.001 0 0]def
/FontBBox [-250 -250 1250 1250]def
/Encoding ct_cHexEncoding def
/BuildChar/ct_BuildCharIncremental load def
ct_Clone?
{
/FontType 3 def
/ct_ShowCharString/ct_Type3ShowCharString load def
/ct_dfSetCacheProc/ct_CloneSetCacheProc load def
/ct_SyntheticBold false def
/ct_StrokeWidth 1 def
}
{
/FontType 4 def
/Private 1 dict dup/lenIV 4 put def
/CharStrings 1 dict dup/.notdefput def
/PaintType 0 def
/ct_ShowCharString/ct_Type4ShowCharString load def
}
ifelse
/ct_str1 1 string def
currentdict
end def
/BaseFontDictSize BaseFontTemplate length 5 add def
/ct_matrixeq
{
true 0 1 5
{
dup 4 index exch get exch 3 index exch get eq and
dup not
{exit}
if
}
for
exch pop exch pop
}bind def
/ct_makeocf
{
15 dict
begin
exch/WMode exch def
exch/FontName exch def
/FontType 0 def
/FMapType 2 def
dup/FontMatrix known
{dup/FontMatrix get/FontMatrix exch def}
{/FontMatrix matrix def}
ifelse
/bfCount 1 index/CIDCount get 256 idiv 1 add
dup 256 gt{pop 256}if def
/Encoding
256 array 0 1 bfCount 1 sub{2 copy dup put pop}for
bfCount 1 255{2 copy bfCount put pop}for
def
/FDepVector bfCount dup 256 lt{1 add}if array def
BaseFontTemplate BaseFontDictSize dict copy
begin
/CIDFont exch def
CIDFont/FontBBox known
{CIDFont/FontBBox get/FontBBox exch def}
if
CIDFont/CDevProc known
{CIDFont/CDevProc get/CDevProc exch def}
if
currentdict
end
BaseFontNameStr 3(0)putinterval
0 1 bfCount dup 256 eq{1 sub}if
{
FDepVector exch
2 index BaseFontDictSize dict copy
begin
dup/CIDFirstByte exch 256 mul def
FontType 3 eq
{/ct_FDDict 2 dict def}
if
currentdict
end
1 index 16
BaseFontNameStr 2 2 getinterval cvrs pop
BaseFontNameStr exch definefont
put
}
for
ct_Clone?
{/Widths 1 index/CIDFont get/GlyphDirectory get length dict def}
if
FontName
currentdict
end
definefont
ct_Clone?
{
gsave
dup 1000 scalefont setfont
ct_BuildCharDict
begin
/usewidths? false def
currentfont/Widths get
begin
exch/CIDFont get/GlyphDirectory get
{
pop
dup charcode exch 1 index 0 2 index 256 idiv put
1 index exch 1 exch 256 mod put
stringwidth 2 array astore def
}
forall
end
/usewidths? true def
end
grestore
}
{exch pop}
ifelse
}bind def
currentglobal true setglobal
/ct_ComposeFont
{
ct_UseNativeCapability?
{
2 index/CMap ct_resourcestatus
{pop pop exch pop}
{
/CIDInit/ProcSet findresource
begin
12 dict
begin
begincmap
/CMapName 3 index def
/CMapVersion 1.000 def
/CMapType 1 def
exch/WMode exch def
/CIDSystemInfo 3 dict dup
begin
/Registry(Adobe)def
/Ordering
CMapName ct_mkocfStr100 cvs
(Adobe-)search
{
pop pop
(-)search
{
dup length string copy
exch pop exch pop
}
{pop(Identity)}
ifelse
}
{pop (Identity)}
ifelse
def
/Supplement 0 def
end def
1 begincodespacerange
<0000>
endcodespacerange
1 begincidrange
<0000>0
endcidrange
endcmap
CMapName currentdict/CMap defineresource pop
end
end
}
ifelse
composefont
}
{
3 2 roll pop
0 get/CIDFont findresource
ct_makeocf
}
ifelse
}bind def
setglobal
/ct_MakeIdentity
{
ct_UseNativeCapability?
{
1 index/CMap ct_resourcestatus
{pop pop}
{
/CIDInit/ProcSet findresource begin
12 dict begin
begincmap
/CMapName 2 index def
/CMapVersion 1.000 def
/CMapType 1 def
/CIDSystemInfo 3 dict dup
begin
/Registry(Adobe)def
/Ordering
CMapName ct_mkocfStr100 cvs
(Adobe-)search
{
pop pop
(-)search
{dup length string copy exch pop exch pop}
{pop(Identity)}
ifelse
}
{pop(Identity)}
ifelse
def
/Supplement 0 def
end def
1 begincodespacerange
<0000>
endcodespacerange
1 begincidrange
<0000>0
endcidrange
endcmap
CMapName currentdict/CMap defineresource pop
end
end
}
ifelse
composefont
}
{
exch pop
0 get/CIDFont findresource
ct_makeocf
}
ifelse
}bind def
currentdict readonly pop
end
end
%%EndResource
setglobal
%%BeginResource: procset Adobe_CoolType_Utility_T42 1.0 0
%%Copyright: Copyright 1987-2004 Adobe Systems Incorporated.
%%Version: 1.0 0
userdict/ct_T42Dict 15 dict put
ct_T42Dict begin
/Is2015?
{
version
cvi
2015
ge
}bind def
/AllocGlyphStorage
{
Is2015?
{
pop
}
{
{string}forall
}ifelse
}bind def
/Type42DictBegin
{
25 dict begin
/FontName exch def
/CharStrings 256 dict
begin
/.notdef 0 def
currentdict
end def
/Encoding exch def
/PaintType 0 def
/FontType 42 def
/FontMatrix[1 0 0 1 0 0]def
4 array astore cvx/FontBBox exch def
/sfnts
}bind def
/Type42DictEnd
{
currentdict dup/FontName get exch definefont end
ct_T42Dict exch
dup/FontName get exch put
}bind def
/RD{string currentfile exch readstring pop}executeonly def
/PrepFor2015
{
Is2015?
{
/GlyphDirectory
16
dict def
sfnts 0 get
dup
2 index
(glyx)
putinterval
2 index
(locx)
putinterval
pop
pop
}
{
pop
pop
}ifelse
}bind def
/AddT42Char
{
Is2015?
{
/GlyphDirectory get
begin
def
end
pop
pop
}
{
/sfnts get
4 index
get
3 index
2 index
putinterval
pop
pop
pop
pop
}ifelse
}bind def
/T0AddT42Mtx2
{
/CIDFont findresource/Metrics2 get begin def end
}bind def
end
%%EndResource
currentglobal true setglobal
%%BeginFile: MMFauxFont.prc
%%Copyright: Copyright 1987-2001 Adobe Systems Incorporated.
%%All Rights Reserved.
userdict /ct_EuroDict 10 dict put
ct_EuroDict begin
/ct_CopyFont
{
{ 1 index /FID ne {def} {pop pop} ifelse} forall
} def
/ct_GetGlyphOutline
{
gsave
initmatrix newpath
exch findfont dup
length 1 add dict
begin
ct_CopyFont
/Encoding Encoding dup length array copy
dup
4 -1 roll
0 exch put
def
currentdict
end
/ct_EuroFont exch definefont
1000 scalefont setfont
0 0 moveto
[
<00> stringwidth
<00> false charpath
pathbbox
[
{/m cvx} {/l cvx} {/c cvx} {/cp cvx} pathforall
grestore
counttomark 8 add
}
def
/ct_MakeGlyphProc
{
] cvx
/ct_PSBuildGlyph cvx
] cvx
} def
/ct_PSBuildGlyph
{
gsave
8 -1 roll pop
7 1 roll
6 -2 roll ct_FontMatrix transform 6 2 roll
4 -2 roll ct_FontMatrix transform 4 2 roll
ct_FontMatrix transform
currentdict /PaintType 2 copy known {get 2 eq}{pop pop false} ifelse
dup 9 1 roll
{
currentdict /StrokeWidth 2 copy known
{
get 2 div
0 ct_FontMatrix dtransform pop
5 1 roll
4 -1 roll 4 index sub
4 1 roll
3 -1 roll 4 index sub
3 1 roll
exch 4 index add exch
4 index add
5 -1 roll pop
}
{
pop pop
}
ifelse
}
if
setcachedevice
ct_FontMatrix concat
ct_PSPathOps begin
exec
end
{
currentdict /StrokeWidth 2 copy known
{ get }
{ pop pop 0 }
ifelse
setlinewidth stroke
}
{
fill
}
ifelse
grestore
} def
/ct_PSPathOps 4 dict dup begin
/m {moveto} def
/l {lineto} def
/c {curveto} def
/cp {closepath} def
end
def
/ct_matrix1000 [1000 0 0 1000 0 0] def
/ct_AddGlyphProc
{
2 index findfont dup length 4 add dict
begin
ct_CopyFont
/CharStrings CharStrings dup length 1 add dict copy
begin
3 1 roll def
currentdict
end
def
/ct_FontMatrix ct_matrix1000 FontMatrix matrix concatmatrix def
/ct_PSBuildGlyph /ct_PSBuildGlyph load def
/ct_PSPathOps /ct_PSPathOps load def
currentdict
end
definefont pop
}
def
systemdict /languagelevel known
{
/ct_AddGlyphToPrinterFont {
2 copy
ct_GetGlyphOutline 3 add -1 roll restore
ct_MakeGlyphProc
ct_AddGlyphProc
} def
}
{
/ct_AddGlyphToPrinterFont {
pop pop restore
Adobe_CTFauxDict /$$$FONTNAME get
/Euro
Adobe_CTFauxDict /$$$SUBSTITUTEBASE get
ct_EuroDict exch get
ct_AddGlyphProc
} def
} ifelse
/AdobeSansMM
{
556 0 24 -19 541 703
{
541 628 m
510 669 442 703 354 703 c
201 703 117 607 101 444 c
50 444 l
25 372 l
97 372 l
97 301 l
49 301 l
24 229 l
103 229 l
124 67 209 -19 350 -19 c
435 -19 501 25 509 32 c
509 131 l
492 105 417 60 343 60 c
267 60 204 127 197 229 c
406 229 l
430 301 l
191 301 l
191 372 l
455 372 l
479 444 l
194 444 l
201 531 245 624 348 624 c
433 624 484 583 509 534 c
cp
556 0 m
}
ct_PSBuildGlyph
} def
/AdobeSerifMM
{
500 0 10 -12 484 692
{
347 298 m
171 298 l
170 310 170 322 170 335 c
170 362 l
362 362 l
374 403 l
172 403 l
184 580 244 642 308 642 c
380 642 434 574 457 457 c
481 462 l
474 691 l
449 691 l
433 670 429 657 410 657 c
394 657 360 692 299 692 c
204 692 94 604 73 403 c
22 403 l
10 362 l
70 362 l
69 352 69 341 69 330 c
69 319 69 308 70 298 c
22 298 l
10 257 l
73 257 l
97 57 216 -12 295 -12 c
364 -12 427 25 484 123 c
458 142 l
425 101 384 37 316 37 c
256 37 189 84 173 257 c
335 257 l
cp
500 0 m
}
ct_PSBuildGlyph
} def
end
%%EndFile
setglobal
Adobe_CoolType_Core begin /$Oblique SetSubstituteStrategy end
%%BeginResource: procset Adobe_AGM_Image 1.0 0
+%%Version: 1.0 0
+%%Copyright: Copyright(C)2000-2006 Adobe Systems, Inc. All Rights Reserved.
+systemdict/setpacking known
+{
+ currentpacking
+ true setpacking
+}if
+userdict/Adobe_AGM_Image 71 dict dup begin put
+/Adobe_AGM_Image_Id/Adobe_AGM_Image_1.0_0 def
+/nd{
+ null def
+}bind def
+/AGMIMG_&image nd
+/AGMIMG_&colorimage nd
+/AGMIMG_&imagemask nd
+/AGMIMG_mbuf()def
+/AGMIMG_ybuf()def
+/AGMIMG_kbuf()def
+/AGMIMG_c 0 def
+/AGMIMG_m 0 def
+/AGMIMG_y 0 def
+/AGMIMG_k 0 def
+/AGMIMG_tmp nd
+/AGMIMG_imagestring0 nd
+/AGMIMG_imagestring1 nd
+/AGMIMG_imagestring2 nd
+/AGMIMG_imagestring3 nd
+/AGMIMG_imagestring4 nd
+/AGMIMG_imagestring5 nd
+/AGMIMG_cnt nd
+/AGMIMG_fsave nd
+/AGMIMG_colorAry nd
+/AGMIMG_override nd
+/AGMIMG_name nd
+/AGMIMG_maskSource nd
+/AGMIMG_flushfilters nd
+/invert_image_samples nd
+/knockout_image_samples nd
+/img nd
+/sepimg nd
+/devnimg nd
+/idximg nd
+/ds
+{
+ Adobe_AGM_Core begin
+ Adobe_AGM_Image begin
+ /AGMIMG_&image systemdict/image get def
+ /AGMIMG_&imagemask systemdict/imagemask get def
+ /colorimage where{
+ pop
+ /AGMIMG_&colorimage/colorimage ldf
+ }if
+ end
+ end
+}def
+/ps
+{
+ Adobe_AGM_Image begin
+ /AGMIMG_ccimage_exists{/customcolorimage where
+ {
+ pop
+ /Adobe_AGM_OnHost_Seps where
+ {
+ pop false
+ }{
+ /Adobe_AGM_InRip_Seps where
+ {
+ pop false
+ }{
+ true
+ }ifelse
+ }ifelse
+ }{
+ false
+ }ifelse
+ }bdf
+ level2{
+ /invert_image_samples
+ {
+ Adobe_AGM_Image/AGMIMG_tmp Decode length ddf
+ /Decode[Decode 1 get Decode 0 get]def
+ }def
+ /knockout_image_samples
+ {
+ Operator/imagemask ne{
+ /Decode[1 1]def
+ }if
+ }def
+ }{
+ /invert_image_samples
+ {
+ {1 exch sub}currenttransfer addprocs settransfer
+ }def
+ /knockout_image_samples
+ {
+ {pop 1}currenttransfer addprocs settransfer
+ }def
+ }ifelse
+ /img/imageormask ldf
+ /sepimg/sep_imageormask ldf
+ /devnimg/devn_imageormask ldf
+ /idximg/indexed_imageormask ldf
+ /_ctype 7 def
+ currentdict{
+ dup xcheck 1 index type dup/arraytype eq exch/packedarraytype eq or and{
+ bind
+ }if
+ def
+ }forall
+}def
+/pt
+{
+ end
+}def
+/dt
+{
+}def
+/AGMIMG_flushfilters
+{
+ dup type/arraytype ne
+ {1 array astore}if
+ dup 0 get currentfile ne
+ {dup 0 get flushfile}if
+ {
+ dup type/filetype eq
+ {
+ dup status 1 index currentfile ne and
+ {closefile}
+ {pop}
+ ifelse
+ }{pop}ifelse
+ }forall
+}def
+/AGMIMG_init_common
+{
+ currentdict/T known{/ImageType/T ldf currentdict/T undef}if
+ currentdict/W known{/Width/W ldf currentdict/W undef}if
+ currentdict/H known{/Height/H ldf currentdict/H undef}if
+ currentdict/M known{/ImageMatrix/M ldf currentdict/M undef}if
+ currentdict/BC known{/BitsPerComponent/BC ldf currentdict/BC undef}if
+ currentdict/D known{/Decode/D ldf currentdict/D undef}if
+ currentdict/DS known{/DataSource/DS ldf currentdict/DS undef}if
+ currentdict/O known{
+ /Operator/O load 1 eq{
+ /imagemask
+ }{
+ /O load 2 eq{
+ /image
+ }{
+ /colorimage
+ }ifelse
+ }ifelse
+ def
+ currentdict/O undef
+ }if
+ currentdict/HSCI known{/HostSepColorImage/HSCI ldf currentdict/HSCI undef}if
+ currentdict/MD known{/MultipleDataSources/MD ldf currentdict/MD undef}if
+ currentdict/I known{/Interpolate/I ldf currentdict/I undef}if
+ currentdict/SI known{/SkipImageProc/SI ldf currentdict/SI undef}if
+ /DataSource load xcheck not{
+ DataSource type/arraytype eq{
+ DataSource 0 get type/filetype eq{
+ /_Filters DataSource def
+ currentdict/MultipleDataSources known not{
+ /DataSource DataSource dup length 1 sub get def
+ }if
+ }if
+ }if
+ currentdict/MultipleDataSources known not{
+ /MultipleDataSources DataSource type/arraytype eq{
+ DataSource length 1 gt
+ }
+ {false}ifelse def
+ }if
+ }if
+ /NComponents Decode length 2 div def
+ currentdict/SkipImageProc known not{/SkipImageProc{false}def}if
+}bdf
+/imageormask_sys
+{
+ begin
+ AGMIMG_init_common
+ save mark
+ level2{
+ currentdict
+ Operator/imagemask eq{
+ AGMIMG_&imagemask
+ }{
+ use_mask{
+ process_mask AGMIMG_&image
+ }{
+ AGMIMG_&image
+ }ifelse
+ }ifelse
+ }{
+ Width Height
+ Operator/imagemask eq{
+ Decode 0 get 1 eq Decode 1 get 0 eq and
+ ImageMatrix/DataSource load
+ AGMIMG_&imagemask
+ }{
+ BitsPerComponent ImageMatrix/DataSource load
+ AGMIMG_&image
+ }ifelse
+ }ifelse
+ currentdict/_Filters known{_Filters AGMIMG_flushfilters}if
+ cleartomark restore
+ end
+}def
+/overprint_plate
+{
+ currentoverprint{
+ 0 get dup type/nametype eq{
+ dup/DeviceGray eq{
+ pop AGMCORE_black_plate not
+ }{
+ /DeviceCMYK eq{
+ AGMCORE_is_cmyk_sep not
+ }if
+ }ifelse
+ }{
+ false exch
+ {
+ AGMOHS_sepink eq or
+ }forall
+ not
+ }ifelse
+ }{
+ pop false
+ }ifelse
+}def
+/process_mask
+{
+ level3{
+ dup begin
+ /ImageType 1 def
+ end
+ 4 dict begin
+ /DataDict exch def
+ /ImageType 3 def
+ /InterleaveType 3 def
+ /MaskDict 9 dict begin
+ /ImageType 1 def
+ /Width DataDict dup/MaskWidth known{/MaskWidth}{/Width}ifelse get def
+ /Height DataDict dup/MaskHeight known{/MaskHeight}{/Height}ifelse get def
+ /ImageMatrix[Width 0 0 Height neg 0 Height]def
+ /NComponents 1 def
+ /BitsPerComponent 1 def
+ /Decode DataDict dup/MaskD known{/MaskD}{[1 0]}ifelse get def
+ /DataSource Adobe_AGM_Core/AGMIMG_maskSource get def
+ currentdict end def
+ currentdict end
+ }if
+}def
+/use_mask
+{
+ dup/Mask known {dup/Mask get}{false}ifelse
+}def
+/imageormask
+{
+ begin
+ AGMIMG_init_common
+ SkipImageProc{
+ currentdict consumeimagedata
+ }
+ {
+ save mark
+ level2 AGMCORE_host_sep not and{
+ currentdict
+ Operator/imagemask eq DeviceN_PS2 not and{
+ imagemask
+ }{
+ AGMCORE_in_rip_sep currentoverprint and currentcolorspace 0 get/DeviceGray eq and{
+ [/Separation/Black/DeviceGray{}]setcolorspace
+ /Decode[Decode 1 get Decode 0 get]def
+ }if
+ use_mask{
+ process_mask image
+ }{
+ DeviceN_NoneName DeviceN_PS2 Indexed_DeviceN level3 not and or or AGMCORE_in_rip_sep and
+ {
+ Names convert_to_process not{
+ 2 dict begin
+ /imageDict xdf
+ /names_index 0 def
+ gsave
+ imageDict write_image_file{
+ Names{
+ dup(None)ne{
+ [/Separation 3 -1 roll/DeviceGray{1 exch sub}]setcolorspace
+ Operator imageDict read_image_file
+ names_index 0 eq{true setoverprint}if
+ /names_index names_index 1 add def
+ }{
+ pop
+ }ifelse
+ }forall
+ close_image_file
+ }if
+ grestore
+ end
+ }{
+ Operator/imagemask eq{
+ imagemask
+ }{
+ image
+ }ifelse
+ }ifelse
+ }{
+ Operator/imagemask eq{
+ imagemask
+ }{
+ image
+ }ifelse
+ }ifelse
+ }ifelse
+ }ifelse
+ }{
+ Width Height
+ Operator/imagemask eq{
+ Decode 0 get 1 eq Decode 1 get 0 eq and
+ ImageMatrix/DataSource load
+ /Adobe_AGM_OnHost_Seps where{
+ pop imagemask
+ }{
+ currentgray 1 ne{
+ currentdict imageormask_sys
+ }{
+ currentoverprint not{
+ 1 AGMCORE_&setgray
+ currentdict imageormask_sys
+ }{
+ currentdict ignoreimagedata
+ }ifelse
+ }ifelse
+ }ifelse
+ }{
+ BitsPerComponent ImageMatrix
+ MultipleDataSources{
+ 0 1 NComponents 1 sub{
+ DataSource exch get
+ }for
+ }{
+ /DataSource load
+ }ifelse
+ Operator/colorimage eq{
+ AGMCORE_host_sep{
+ MultipleDataSources level2 or NComponents 4 eq and{
+ AGMCORE_is_cmyk_sep{
+ MultipleDataSources{
+ /DataSource DataSource 0 get xcheck
+ {
+ [
+ DataSource 0 get/exec cvx
+ DataSource 1 get/exec cvx
+ DataSource 2 get/exec cvx
+ DataSource 3 get/exec cvx
+ /AGMCORE_get_ink_data cvx
+ ]cvx
+ }{
+ DataSource aload pop AGMCORE_get_ink_data
+ }ifelse def
+ }{
+ /DataSource
+ Width BitsPerComponent mul 7 add 8 idiv Height mul 4 mul
+ /DataSource load
+ filter_cmyk 0()/SubFileDecode filter def
+ }ifelse
+ /Decode[Decode 0 get Decode 1 get]def
+ /MultipleDataSources false def
+ /NComponents 1 def
+ /Operator/image def
+ invert_image_samples
+ 1 AGMCORE_&setgray
+ currentdict imageormask_sys
+ }{
+ currentoverprint not Operator/imagemask eq and{
+ 1 AGMCORE_&setgray
+ currentdict imageormask_sys
+ }{
+ currentdict ignoreimagedata
+ }ifelse
+ }ifelse
+ }{
+ MultipleDataSources NComponents AGMIMG_&colorimage
+ }ifelse
+ }{
+ true NComponents colorimage
+ }ifelse
+ }{
+ Operator/image eq{
+ AGMCORE_host_sep{
+ /DoImage true def
+ currentdict/HostSepColorImage known{HostSepColorImage not}{false}ifelse
+ {
+ AGMCORE_black_plate not Operator/imagemask ne and{
+ /DoImage false def
+ currentdict ignoreimagedata
+ }if
+ }if
+ 1 AGMCORE_&setgray
+ DoImage
+ {currentdict imageormask_sys}if
+ }{
+ use_mask{
+ process_mask image
+ }{
+ image
+ }ifelse
+ }ifelse
+ }{
+ Operator/knockout eq{
+ pop pop pop pop pop
+ currentcolorspace overprint_plate not{
+ knockout_unitsq
+ }if
+ }if
+ }ifelse
+ }ifelse
+ }ifelse
+ }ifelse
+ cleartomark restore
+ }ifelse
+ currentdict/_Filters known{_Filters AGMIMG_flushfilters}if
+ end
+}def
+/sep_imageormask
+{
+ /sep_colorspace_dict AGMCORE_gget begin
+ CSA map_csa
+ begin
+ AGMIMG_init_common
+ SkipImageProc{
+ currentdict consumeimagedata
+ }{
+ save mark
+ AGMCORE_avoid_L2_sep_space{
+ /Decode[Decode 0 get 255 mul Decode 1 get 255 mul]def
+ }if
+ AGMIMG_ccimage_exists
+ MappedCSA 0 get/DeviceCMYK eq and
+ currentdict/Components known and
+ Name()ne and
+ Name(All)ne and
+ Operator/image eq and
+ AGMCORE_producing_seps not and
+ level2 not and
+ {
+ Width Height BitsPerComponent ImageMatrix
+ [
+ /DataSource load/exec cvx
+ {
+ 0 1 2 index length 1 sub{
+ 1 index exch
+ 2 copy get 255 xor put
+ }for
+ }/exec cvx
+ ]cvx bind
+ MappedCSA 0 get/DeviceCMYK eq{
+ Components aload pop
+ }{
+ 0 0 0 Components aload pop 1 exch sub
+ }ifelse
+ Name findcmykcustomcolor
+ customcolorimage
+ }{
+ AGMCORE_producing_seps not{
+ level2{
+ //Adobe_AGM_Core/AGMCORE_pattern_paint_type get 2 ne AGMCORE_avoid_L2_sep_space not and currentcolorspace 0 get/Separation ne and{
+ [/Separation Name MappedCSA sep_proc_name exch dup 0 get 15 string cvs(/Device)anchorsearch{pop pop 0 get}{pop}ifelse exch load]setcolorspace_opt
+ /sep_tint AGMCORE_gget setcolor
+ }if
+ currentdict imageormask
+ }{
+ currentdict
+ Operator/imagemask eq{
+ imageormask
+ }{
+ sep_imageormask_lev1
+ }ifelse
+ }ifelse
+ }{
+ AGMCORE_host_sep{
+ Operator/knockout eq{
+ currentdict/ImageMatrix get concat
+ knockout_unitsq
+ }{
+ currentgray 1 ne{
+ AGMCORE_is_cmyk_sep Name(All)ne and{
+ level2{
+ Name AGMCORE_IsSeparationAProcessColor
+ {
+ Operator/imagemask eq{
+ //Adobe_AGM_Core/AGMCORE_pattern_paint_type get 2 ne{
+ /sep_tint AGMCORE_gget 1 exch sub AGMCORE_&setcolor
+ }if
+ }{
+ invert_image_samples
+ }ifelse
+ }{
+ //Adobe_AGM_Core/AGMCORE_pattern_paint_type get 2 ne{
+ [/Separation Name[/DeviceGray]
+ {
+ sep_colorspace_proc AGMCORE_get_ink_data
+ 1 exch sub
+ }bind
+ ]AGMCORE_&setcolorspace
+ /sep_tint AGMCORE_gget AGMCORE_&setcolor
+ }if
+ }ifelse
+ currentdict imageormask_sys
+ }{
+ currentdict
+ Operator/imagemask eq{
+ imageormask_sys
+ }{
+ sep_image_lev1_sep
+ }ifelse
+ }ifelse
+ }{
+ Operator/imagemask ne{
+ invert_image_samples
+ }if
+ currentdict imageormask_sys
+ }ifelse
+ }{
+ currentoverprint not Name(All)eq or Operator/imagemask eq and{
+ currentdict imageormask_sys
+ }{
+ currentoverprint not
+ {
+ gsave
+ knockout_unitsq
+ grestore
+ }if
+ currentdict consumeimagedata
+ }ifelse
+ }ifelse
+ }ifelse
+ }{
+ //Adobe_AGM_Core/AGMCORE_pattern_paint_type get 2 ne{
+ currentcolorspace 0 get/Separation ne{
+ [/Separation Name MappedCSA sep_proc_name exch 0 get exch load]setcolorspace_opt
+ /sep_tint AGMCORE_gget setcolor
+ }if
+ }if
+ currentoverprint
+ MappedCSA 0 get/DeviceCMYK eq and
+ Name AGMCORE_IsSeparationAProcessColor not and
+ //Adobe_AGM_Core/AGMCORE_pattern_paint_type get 2 ne{Name inRip_spot_has_ink not and}{false}ifelse
+ Name(All)ne and{
+ imageormask_l2_overprint
+ }{
+ currentdict imageormask
+ }ifelse
+ }ifelse
+ }ifelse
+ }ifelse
+ cleartomark restore
+ }ifelse
+ currentdict/_Filters known{_Filters AGMIMG_flushfilters}if
+ end
+ end
+}def
+/colorSpaceElemCnt
+{
+ mark currentcolor counttomark dup 2 add 1 roll cleartomark
+}bdf
+/devn_sep_datasource
+{
+ 1 dict begin
+ /dataSource xdf
+ [
+ 0 1 dataSource length 1 sub{
+ dup currentdict/dataSource get/exch cvx/get cvx/exec cvx
+ /exch cvx names_index/ne cvx[/pop cvx]cvx/if cvx
+ }for
+ ]cvx bind
+ end
+}bdf
+/devn_alt_datasource
+{
+ 11 dict begin
+ /convProc xdf
+ /origcolorSpaceElemCnt xdf
+ /origMultipleDataSources xdf
+ /origBitsPerComponent xdf
+ /origDecode xdf
+ /origDataSource xdf
+ /dsCnt origMultipleDataSources{origDataSource length}{1}ifelse def
+ /DataSource origMultipleDataSources
+ {
+ [
+ BitsPerComponent 8 idiv origDecode length 2 idiv mul string
+ 0 1 origDecode length 2 idiv 1 sub
+ {
+ dup 7 mul 1 add index exch dup BitsPerComponent 8 idiv mul exch
+ origDataSource exch get 0()/SubFileDecode filter
+ BitsPerComponent 8 idiv string/readstring cvx/pop cvx/putinterval cvx
+ }for
+ ]bind cvx
+ }{origDataSource}ifelse 0()/SubFileDecode filter def
+ [
+ origcolorSpaceElemCnt string
+ 0 2 origDecode length 2 sub
+ {
+ dup origDecode exch get dup 3 -1 roll 1 add origDecode exch get exch sub 2 BitsPerComponent exp 1 sub div
+ 1 BitsPerComponent 8 idiv{DataSource/read cvx/not cvx{0}/if cvx/mul cvx}repeat/mul cvx/add cvx
+ }for
+ /convProc load/exec cvx
+ origcolorSpaceElemCnt 1 sub -1 0
+ {
+ /dup cvx 2/add cvx/index cvx
+ 3 1/roll cvx/exch cvx 255/mul cvx/cvi cvx/put cvx
+ }for
+ ]bind cvx 0()/SubFileDecode filter
+ end
+}bdf
+/devn_imageormask
+{
+ /devicen_colorspace_dict AGMCORE_gget begin
+ CSA map_csa
+ 2 dict begin
+ dup
+ /srcDataStrs[3 -1 roll begin
+ AGMIMG_init_common
+ currentdict/MultipleDataSources known{MultipleDataSources{DataSource length}{1}ifelse}{1}ifelse
+ {
+ Width Decode length 2 div mul cvi
+ {
+ dup 65535 gt{1 add 2 div cvi}{exit}ifelse
+ }loop
+ string
+ }repeat
+ end]def
+ /dstDataStr srcDataStrs 0 get length string def
+ begin
+ AGMIMG_init_common
+ SkipImageProc{
+ currentdict consumeimagedata
+ }{
+ save mark
+ AGMCORE_producing_seps not{
+ level3 not{
+ Operator/imagemask ne{
+ /DataSource[[
+ DataSource Decode BitsPerComponent currentdict/MultipleDataSources known{MultipleDataSources}{false}ifelse
+ colorSpaceElemCnt/devicen_colorspace_dict AGMCORE_gget/TintTransform get
+ devn_alt_datasource 1/string cvx/readstring cvx/pop cvx]cvx colorSpaceElemCnt 1 sub{dup}repeat]def
+ /MultipleDataSources true def
+ /Decode colorSpaceElemCnt[exch{0 1}repeat]def
+ }if
+ }if
+ currentdict imageormask
+ }{
+ AGMCORE_host_sep{
+ Names convert_to_process{
+ CSA get_csa_by_name 0 get/DeviceCMYK eq{
+ /DataSource
+ Width BitsPerComponent mul 7 add 8 idiv Height mul 4 mul
+ DataSource Decode BitsPerComponent currentdict/MultipleDataSources known{MultipleDataSources}{false}ifelse
+ 4/devicen_colorspace_dict AGMCORE_gget/TintTransform get
+ devn_alt_datasource
+ filter_cmyk 0()/SubFileDecode filter def
+ /MultipleDataSources false def
+ /Decode[1 0]def
+ /DeviceGray setcolorspace
+ currentdict imageormask_sys
+ }{
+ AGMCORE_report_unsupported_color_space
+ AGMCORE_black_plate{
+ /DataSource
+ DataSource Decode BitsPerComponent currentdict/MultipleDataSources known{MultipleDataSources}{false}ifelse
+ CSA get_csa_by_name 0 get/DeviceRGB eq{3}{1}ifelse/devicen_colorspace_dict AGMCORE_gget/TintTransform get
+ devn_alt_datasource
+ /MultipleDataSources false def
+ /Decode colorSpaceElemCnt[exch{0 1}repeat]def
+ currentdict imageormask_sys
+ }{
+ gsave
+ knockout_unitsq
+ grestore
+ currentdict consumeimagedata
+ }ifelse
+ }ifelse
+ }
+ {
+ /devicen_colorspace_dict AGMCORE_gget/names_index known{
+ Operator/imagemask ne{
+ MultipleDataSources{
+ /DataSource[DataSource devn_sep_datasource/exec cvx]cvx def
+ /MultipleDataSources false def
+ }{
+ /DataSource/DataSource load dstDataStr srcDataStrs 0 get filter_devn def
+ }ifelse
+ invert_image_samples
+ }if
+ currentdict imageormask_sys
+ }{
+ currentoverprint not Operator/imagemask eq and{
+ currentdict imageormask_sys
+ }{
+ currentoverprint not
+ {
+ gsave
+ knockout_unitsq
+ grestore
+ }if
+ currentdict consumeimagedata
+ }ifelse
+ }ifelse
+ }ifelse
+ }{
+ currentdict imageormask
+ }ifelse
+ }ifelse
+ cleartomark restore
+ }ifelse
+ currentdict/_Filters known{_Filters AGMIMG_flushfilters}if
+ end
+ end
+ end
+}def
+/imageormask_l2_overprint
+{
+ currentdict
+ currentcmykcolor add add add 0 eq{
+ currentdict consumeimagedata
+ }{
+ level3{
+ currentcmykcolor
+ /AGMIMG_k xdf
+ /AGMIMG_y xdf
+ /AGMIMG_m xdf
+ /AGMIMG_c xdf
+ Operator/imagemask eq{
+ [/DeviceN[
+ AGMIMG_c 0 ne{/Cyan}if
+ AGMIMG_m 0 ne{/Magenta}if
+ AGMIMG_y 0 ne{/Yellow}if
+ AGMIMG_k 0 ne{/Black}if
+ ]/DeviceCMYK{}]setcolorspace
+ AGMIMG_c 0 ne{AGMIMG_c}if
+ AGMIMG_m 0 ne{AGMIMG_m}if
+ AGMIMG_y 0 ne{AGMIMG_y}if
+ AGMIMG_k 0 ne{AGMIMG_k}if
+ setcolor
+ }{
+ /Decode[Decode 0 get 255 mul Decode 1 get 255 mul]def
+ [/Indexed
+ [
+ /DeviceN[
+ AGMIMG_c 0 ne{/Cyan}if
+ AGMIMG_m 0 ne{/Magenta}if
+ AGMIMG_y 0 ne{/Yellow}if
+ AGMIMG_k 0 ne{/Black}if
+ ]
+ /DeviceCMYK{
+ AGMIMG_k 0 eq{0}if
+ AGMIMG_y 0 eq{0 exch}if
+ AGMIMG_m 0 eq{0 3 1 roll}if
+ AGMIMG_c 0 eq{0 4 1 roll}if
+ }
+ ]
+ 255
+ {
+ 255 div
+ mark exch
+ dup dup dup
+ AGMIMG_k 0 ne{
+ /sep_tint AGMCORE_gget mul MappedCSA sep_proc_name exch pop load exec 4 1 roll pop pop pop
+ counttomark 1 roll
+ }{
+ pop
+ }ifelse
+ AGMIMG_y 0 ne{
+ /sep_tint AGMCORE_gget mul MappedCSA sep_proc_name exch pop load exec 4 2 roll pop pop pop
+ counttomark 1 roll
+ }{
+ pop
+ }ifelse
+ AGMIMG_m 0 ne{
+ /sep_tint AGMCORE_gget mul MappedCSA sep_proc_name exch pop load exec 4 3 roll pop pop pop
+ counttomark 1 roll
+ }{
+ pop
+ }ifelse
+ AGMIMG_c 0 ne{
+ /sep_tint AGMCORE_gget mul MappedCSA sep_proc_name exch pop load exec pop pop pop
+ counttomark 1 roll
+ }{
+ pop
+ }ifelse
+ counttomark 1 add -1 roll pop
+ }
+ ]setcolorspace
+ }ifelse
+ imageormask_sys
+ }{
+ write_image_file{
+ currentcmykcolor
+ 0 ne{
+ [/Separation/Black/DeviceGray{}]setcolorspace
+ gsave
+ /Black
+ [{1 exch sub/sep_tint AGMCORE_gget mul}/exec cvx MappedCSA sep_proc_name cvx exch pop{4 1 roll pop pop pop 1 exch sub}/exec cvx]
+ cvx modify_halftone_xfer
+ Operator currentdict read_image_file
+ grestore
+ }if
+ 0 ne{
+ [/Separation/Yellow/DeviceGray{}]setcolorspace
+ gsave
+ /Yellow
+ [{1 exch sub/sep_tint AGMCORE_gget mul}/exec cvx MappedCSA sep_proc_name cvx exch pop{4 2 roll pop pop pop 1 exch sub}/exec cvx]
+ cvx modify_halftone_xfer
+ Operator currentdict read_image_file
+ grestore
+ }if
+ 0 ne{
+ [/Separation/Magenta/DeviceGray{}]setcolorspace
+ gsave
+ /Magenta
+ [{1 exch sub/sep_tint AGMCORE_gget mul}/exec cvx MappedCSA sep_proc_name cvx exch pop{4 3 roll pop pop pop 1 exch sub}/exec cvx]
+ cvx modify_halftone_xfer
+ Operator currentdict read_image_file
+ grestore
+ }if
+ 0 ne{
+ [/Separation/Cyan/DeviceGray{}]setcolorspace
+ gsave
+ /Cyan
+ [{1 exch sub/sep_tint AGMCORE_gget mul}/exec cvx MappedCSA sep_proc_name cvx exch pop{pop pop pop 1 exch sub}/exec cvx]
+ cvx modify_halftone_xfer
+ Operator currentdict read_image_file
+ grestore
+ }if
+ close_image_file
+ }{
+ imageormask
+ }ifelse
+ }ifelse
+ }ifelse
+}def
+/indexed_imageormask
+{
+ begin
+ AGMIMG_init_common
+ save mark
+ currentdict
+ AGMCORE_host_sep{
+ Operator/knockout eq{
+ /indexed_colorspace_dict AGMCORE_gget dup/CSA known{
+ /CSA get get_csa_by_name
+ }{
+ /Names get
+ }ifelse
+ overprint_plate not{
+ knockout_unitsq
+ }if
+ }{
+ Indexed_DeviceN{
+ /devicen_colorspace_dict AGMCORE_gget dup/names_index known exch/Names get convert_to_process or{
+ indexed_image_lev2_sep
+ }{
+ currentoverprint not{
+ knockout_unitsq
+ }if
+ currentdict consumeimagedata
+ }ifelse
+ }{
+ AGMCORE_is_cmyk_sep{
+ Operator/imagemask eq{
+ imageormask_sys
+ }{
+ level2{
+ indexed_image_lev2_sep
+ }{
+ indexed_image_lev1_sep
+ }ifelse
+ }ifelse
+ }{
+ currentoverprint not{
+ knockout_unitsq
+ }if
+ currentdict consumeimagedata
+ }ifelse
+ }ifelse
+ }ifelse
+ }{
+ level2{
+ Indexed_DeviceN{
+ /indexed_colorspace_dict AGMCORE_gget begin
+ }{
+ /indexed_colorspace_dict AGMCORE_gget dup null ne
+ {
+ begin
+ currentdict/CSDBase known{CSDBase/CSD get_res/MappedCSA get}{CSA}ifelse
+ get_csa_by_name 0 get/DeviceCMYK eq ps_level 3 ge and ps_version 3015.007 lt and
+ AGMCORE_in_rip_sep and{
+ [/Indexed[/DeviceN[/Cyan/Magenta/Yellow/Black]/DeviceCMYK{}]HiVal Lookup]
+ setcolorspace
+ }if
+ end
+ }
+ {pop}ifelse
+ }ifelse
+ imageormask
+ Indexed_DeviceN{
+ end
+ }if
+ }{
+ Operator/imagemask eq{
+ imageormask
+ }{
+ indexed_imageormask_lev1
+ }ifelse
+ }ifelse
+ }ifelse
+ cleartomark restore
+ currentdict/_Filters known{_Filters AGMIMG_flushfilters}if
+ end
+}def
+/indexed_image_lev2_sep
+{
+ /indexed_colorspace_dict AGMCORE_gget begin
+ begin
+ Indexed_DeviceN not{
+ currentcolorspace
+ dup 1/DeviceGray put
+ dup 3
+ currentcolorspace 2 get 1 add string
+ 0 1 2 3 AGMCORE_get_ink_data 4 currentcolorspace 3 get length 1 sub
+ {
+ dup 4 idiv exch currentcolorspace 3 get exch get 255 exch sub 2 index 3 1 roll put
+ }for
+ put setcolorspace
+ }if
+ currentdict
+ Operator/imagemask eq{
+ AGMIMG_&imagemask
+ }{
+ use_mask{
+ process_mask AGMIMG_&image
+ }{
+ AGMIMG_&image
+ }ifelse
+ }ifelse
+ end end
+}def
+ /OPIimage
+ {
+ dup type/dicttype ne{
+ 10 dict begin
+ /DataSource xdf
+ /ImageMatrix xdf
+ /BitsPerComponent xdf
+ /Height xdf
+ /Width xdf
+ /ImageType 1 def
+ /Decode[0 1 def]
+ currentdict
+ end
+ }if
+ dup begin
+ /NComponents 1 cdndf
+ /MultipleDataSources false cdndf
+ /SkipImageProc{false}cdndf
+ /Decode[
+ 0
+ currentcolorspace 0 get/Indexed eq{
+ 2 BitsPerComponent exp 1 sub
+ }{
+ 1
+ }ifelse
+ ]cdndf
+ /Operator/image cdndf
+ end
+ /sep_colorspace_dict AGMCORE_gget null eq{
+ imageormask
+ }{
+ gsave
+ dup begin invert_image_samples end
+ sep_imageormask
+ grestore
+ }ifelse
+ }def
+/cachemask_level2
+{
+ 3 dict begin
+ /LZWEncode filter/WriteFilter xdf
+ /readBuffer 256 string def
+ /ReadFilter
+ currentfile
+ 0(%EndMask)/SubFileDecode filter
+ /ASCII85Decode filter
+ /RunLengthDecode filter
+ def
+ {
+ ReadFilter readBuffer readstring exch
+ WriteFilter exch writestring
+ not{exit}if
+ }loop
+ WriteFilter closefile
+ end
+}def
+/spot_alias
+{
+ /mapto_sep_imageormask
+ {
+ dup type/dicttype ne{
+ 12 dict begin
+ /ImageType 1 def
+ /DataSource xdf
+ /ImageMatrix xdf
+ /BitsPerComponent xdf
+ /Height xdf
+ /Width xdf
+ /MultipleDataSources false def
+ }{
+ begin
+ }ifelse
+ /Decode[/customcolor_tint AGMCORE_gget 0]def
+ /Operator/image def
+ /SkipImageProc{false}def
+ currentdict
+ end
+ sep_imageormask
+ }bdf
+ /customcolorimage
+ {
+ Adobe_AGM_Image/AGMIMG_colorAry xddf
+ /customcolor_tint AGMCORE_gget
+ <<
+ /Name AGMIMG_colorAry 4 get
+ /CSA[/DeviceCMYK]
+ /TintMethod/Subtractive
+ /TintProc null
+ /MappedCSA null
+ /NComponents 4
+ /Components[AGMIMG_colorAry aload pop pop]
+ >>
+ setsepcolorspace
+ mapto_sep_imageormask
+ }ndf
+ Adobe_AGM_Image/AGMIMG_&customcolorimage/customcolorimage load put
+ /customcolorimage
+ {
+ Adobe_AGM_Image/AGMIMG_override false put
+ current_spot_alias{dup 4 get map_alias}{false}ifelse
+ {
+ false set_spot_alias
+ /customcolor_tint AGMCORE_gget exch setsepcolorspace
+ pop
+ mapto_sep_imageormask
+ true set_spot_alias
+ }{
+ //Adobe_AGM_Image/AGMIMG_&customcolorimage get exec
+ }ifelse
+ }bdf
+}def
+/snap_to_device
+{
+ 6 dict begin
+ matrix currentmatrix
+ dup 0 get 0 eq 1 index 3 get 0 eq and
+ 1 index 1 get 0 eq 2 index 2 get 0 eq and or exch pop
+ {
+ 1 1 dtransform 0 gt exch 0 gt/AGMIMG_xSign? exch def/AGMIMG_ySign? exch def
+ 0 0 transform
+ AGMIMG_ySign?{floor 0.1 sub}{ceiling 0.1 add}ifelse exch
+ AGMIMG_xSign?{floor 0.1 sub}{ceiling 0.1 add}ifelse exch
+ itransform/AGMIMG_llY exch def/AGMIMG_llX exch def
+ 1 1 transform
+ AGMIMG_ySign?{ceiling 0.1 add}{floor 0.1 sub}ifelse exch
+ AGMIMG_xSign?{ceiling 0.1 add}{floor 0.1 sub}ifelse exch
+ itransform/AGMIMG_urY exch def/AGMIMG_urX exch def
+ [AGMIMG_urX AGMIMG_llX sub 0 0 AGMIMG_urY AGMIMG_llY sub AGMIMG_llX AGMIMG_llY]concat
+ }{
+ }ifelse
+ end
+}def
+level2 not{
+ /colorbuf
+ {
+ 0 1 2 index length 1 sub{
+ dup 2 index exch get
+ 255 exch sub
+ 2 index
+ 3 1 roll
+ put
+ }for
+ }def
+ /tint_image_to_color
+ {
+ begin
+ Width Height BitsPerComponent ImageMatrix
+ /DataSource load
+ end
+ Adobe_AGM_Image begin
+ /AGMIMG_mbuf 0 string def
+ /AGMIMG_ybuf 0 string def
+ /AGMIMG_kbuf 0 string def
+ {
+ colorbuf dup length AGMIMG_mbuf length ne
+ {
+ dup length dup dup
+ /AGMIMG_mbuf exch string def
+ /AGMIMG_ybuf exch string def
+ /AGMIMG_kbuf exch string def
+ }if
+ dup AGMIMG_mbuf copy AGMIMG_ybuf copy AGMIMG_kbuf copy pop
+ }
+ addprocs
+ {AGMIMG_mbuf}{AGMIMG_ybuf}{AGMIMG_kbuf}true 4 colorimage
+ end
+ }def
+ /sep_imageormask_lev1
+ {
+ begin
+ MappedCSA 0 get dup/DeviceRGB eq exch/DeviceCMYK eq or has_color not and{
+ {
+ 255 mul round cvi GrayLookup exch get
+ }currenttransfer addprocs settransfer
+ currentdict imageormask
+ }{
+ /sep_colorspace_dict AGMCORE_gget/Components known{
+ MappedCSA 0 get/DeviceCMYK eq{
+ Components aload pop
+ }{
+ 0 0 0 Components aload pop 1 exch sub
+ }ifelse
+ Adobe_AGM_Image/AGMIMG_k xddf
+ Adobe_AGM_Image/AGMIMG_y xddf
+ Adobe_AGM_Image/AGMIMG_m xddf
+ Adobe_AGM_Image/AGMIMG_c xddf
+ AGMIMG_y 0.0 eq AGMIMG_m 0.0 eq and AGMIMG_c 0.0 eq and{
+ {AGMIMG_k mul 1 exch sub}currenttransfer addprocs settransfer
+ currentdict imageormask
+ }{
+ currentcolortransfer
+ {AGMIMG_k mul 1 exch sub}exch addprocs 4 1 roll
+ {AGMIMG_y mul 1 exch sub}exch addprocs 4 1 roll
+ {AGMIMG_m mul 1 exch sub}exch addprocs 4 1 roll
+ {AGMIMG_c mul 1 exch sub}exch addprocs 4 1 roll
+ setcolortransfer
+ currentdict tint_image_to_color
+ }ifelse
+ }{
+ MappedCSA 0 get/DeviceGray eq{
+ {255 mul round cvi ColorLookup exch get 0 get}currenttransfer addprocs settransfer
+ currentdict imageormask
+ }{
+ MappedCSA 0 get/DeviceCMYK eq{
+ currentcolortransfer
+ {255 mul round cvi ColorLookup exch get 3 get 1 exch sub}exch addprocs 4 1 roll
+ {255 mul round cvi ColorLookup exch get 2 get 1 exch sub}exch addprocs 4 1 roll
+ {255 mul round cvi ColorLookup exch get 1 get 1 exch sub}exch addprocs 4 1 roll
+ {255 mul round cvi ColorLookup exch get 0 get 1 exch sub}exch addprocs 4 1 roll
+ setcolortransfer
+ currentdict tint_image_to_color
+ }{
+ currentcolortransfer
+ {pop 1}exch addprocs 4 1 roll
+ {255 mul round cvi ColorLookup exch get 2 get}exch addprocs 4 1 roll
+ {255 mul round cvi ColorLookup exch get 1 get}exch addprocs 4 1 roll
+ {255 mul round cvi ColorLookup exch get 0 get}exch addprocs 4 1 roll
+ setcolortransfer
+ currentdict tint_image_to_color
+ }ifelse
+ }ifelse
+ }ifelse
+ }ifelse
+ end
+ }def
+ /sep_image_lev1_sep
+ {
+ begin
+ /sep_colorspace_dict AGMCORE_gget/Components known{
+ Components aload pop
+ Adobe_AGM_Image/AGMIMG_k xddf
+ Adobe_AGM_Image/AGMIMG_y xddf
+ Adobe_AGM_Image/AGMIMG_m xddf
+ Adobe_AGM_Image/AGMIMG_c xddf
+ {AGMIMG_c mul 1 exch sub}
+ {AGMIMG_m mul 1 exch sub}
+ {AGMIMG_y mul 1 exch sub}
+ {AGMIMG_k mul 1 exch sub}
+ }{
+ {255 mul round cvi ColorLookup exch get 0 get 1 exch sub}
+ {255 mul round cvi ColorLookup exch get 1 get 1 exch sub}
+ {255 mul round cvi ColorLookup exch get 2 get 1 exch sub}
+ {255 mul round cvi ColorLookup exch get 3 get 1 exch sub}
+ }ifelse
+ AGMCORE_get_ink_data currenttransfer addprocs settransfer
+ currentdict imageormask_sys
+ end
+ }def
+ /indexed_imageormask_lev1
+ {
+ /indexed_colorspace_dict AGMCORE_gget begin
+ begin
+ currentdict
+ MappedCSA 0 get dup/DeviceRGB eq exch/DeviceCMYK eq or has_color not and{
+ {HiVal mul round cvi GrayLookup exch get HiVal div}currenttransfer addprocs settransfer
+ imageormask
+ }{
+ MappedCSA 0 get/DeviceGray eq{
+ {HiVal mul round cvi Lookup exch get HiVal div}currenttransfer addprocs settransfer
+ imageormask
+ }{
+ MappedCSA 0 get/DeviceCMYK eq{
+ currentcolortransfer
+ {4 mul HiVal mul round cvi 3 add Lookup exch get HiVal div 1 exch sub}exch addprocs 4 1 roll
+ {4 mul HiVal mul round cvi 2 add Lookup exch get HiVal div 1 exch sub}exch addprocs 4 1 roll
+ {4 mul HiVal mul round cvi 1 add Lookup exch get HiVal div 1 exch sub}exch addprocs 4 1 roll
+ {4 mul HiVal mul round cvi Lookup exch get HiVal div 1 exch sub}exch addprocs 4 1 roll
+ setcolortransfer
+ tint_image_to_color
+ }{
+ currentcolortransfer
+ {pop 1}exch addprocs 4 1 roll
+ {3 mul HiVal mul round cvi 2 add Lookup exch get HiVal div}exch addprocs 4 1 roll
+ {3 mul HiVal mul round cvi 1 add Lookup exch get HiVal div}exch addprocs 4 1 roll
+ {3 mul HiVal mul round cvi Lookup exch get HiVal div}exch addprocs 4 1 roll
+ setcolortransfer
+ tint_image_to_color
+ }ifelse
+ }ifelse
+ }ifelse
+ end end
+ }def
+ /indexed_image_lev1_sep
+ {
+ /indexed_colorspace_dict AGMCORE_gget begin
+ begin
+ {4 mul HiVal mul round cvi Lookup exch get HiVal div 1 exch sub}
+ {4 mul HiVal mul round cvi 1 add Lookup exch get HiVal div 1 exch sub}
+ {4 mul HiVal mul round cvi 2 add Lookup exch get HiVal div 1 exch sub}
+ {4 mul HiVal mul round cvi 3 add Lookup exch get HiVal div 1 exch sub}
+ AGMCORE_get_ink_data currenttransfer addprocs settransfer
+ currentdict imageormask_sys
+ end end
+ }def
+}if
+end
+systemdict/setpacking known
+{setpacking}if
+%%EndResource
+currentdict Adobe_AGM_Utils eq {end} if
+%%EndProlog
+%%BeginSetup
+Adobe_AGM_Utils begin
+2 2010 Adobe_AGM_Core/ds gx
+Adobe_CoolType_Core/ds get exec
Adobe_AGM_Image/ds gx
+currentdict Adobe_AGM_Utils eq {end} if
+%%EndSetup
+%%Page: (Page 1) 1
+%%EndPageComments
+%%BeginPageSetup
+%ADOBeginClientInjection: PageSetup Start "AI11EPS"
+%AI12_RMC_Transparency: Balance=75 RasterRes=300 GradRes=150 Text=0 Stroke=1 Clip=1 OP=0
+%ADOEndClientInjection: PageSetup Start "AI11EPS"
+Adobe_AGM_Utils begin
+Adobe_AGM_Core/ps gx
+Adobe_AGM_Utils/capture_cpd gx
+Adobe_CoolType_Core/ps get exec
Adobe_AGM_Image/ps gx
+%ADOBeginClientInjection: PageSetup End "AI11EPS"
+/currentdistillerparams where
{pop currentdistillerparams /CoreDistVersion get 5000 lt} {true} ifelse
{ userdict /AI11_PDFMark5 /cleartomark load put
userdict /AI11_ReadMetadata_PDFMark5 {flushfile cleartomark } bind put}
{ userdict /AI11_PDFMark5 /pdfmark load put
userdict /AI11_ReadMetadata_PDFMark5 {/PUT pdfmark} bind put } ifelse
[/NamespacePush AI11_PDFMark5
[/_objdef {ai_metadata_stream_123} /type /stream /OBJ AI11_PDFMark5
[{ai_metadata_stream_123}
currentfile 0 (% &&end XMP packet marker&&)
/SubFileDecode filter AI11_ReadMetadata_PDFMark5
+
+
+
+ application/postscript
+
+
+ Print
+
+
+
+
+ Adobe Illustrator CS3
+ 2010-05-25T11:06:31-06:00
+ 2010-05-25T11:06:31-06:00
+ 2010-05-25T11:06:31-06:00
+
+
+
+ 256
+ 116
+ JPEG
+ /9j/4AAQSkZJRgABAgEAlgCWAAD/7QAsUGhvdG9zaG9wIDMuMAA4QklNA+0AAAAAABAAlgAAAAEA
AQCWAAAAAQAB/+4ADkFkb2JlAGTAAAAAAf/bAIQABgQEBAUEBgUFBgkGBQYJCwgGBggLDAoKCwoK
DBAMDAwMDAwQDA4PEA8ODBMTFBQTExwbGxscHx8fHx8fHx8fHwEHBwcNDA0YEBAYGhURFRofHx8f
Hx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8f/8AAEQgAdAEAAwER
AAIRAQMRAf/EAaIAAAAHAQEBAQEAAAAAAAAAAAQFAwIGAQAHCAkKCwEAAgIDAQEBAQEAAAAAAAAA
AQACAwQFBgcICQoLEAACAQMDAgQCBgcDBAIGAnMBAgMRBAAFIRIxQVEGE2EicYEUMpGhBxWxQiPB
UtHhMxZi8CRygvElQzRTkqKyY3PCNUQnk6OzNhdUZHTD0uIIJoMJChgZhJRFRqS0VtNVKBry4/PE
1OT0ZXWFlaW1xdXl9WZ2hpamtsbW5vY3R1dnd4eXp7fH1+f3OEhYaHiImKi4yNjo+Ck5SVlpeYmZ
qbnJ2en5KjpKWmp6ipqqusra6voRAAICAQIDBQUEBQYECAMDbQEAAhEDBCESMUEFURNhIgZxgZEy
obHwFMHR4SNCFVJicvEzJDRDghaSUyWiY7LCB3PSNeJEgxdUkwgJChgZJjZFGidkdFU38qOzwygp
0+PzhJSktMTU5PRldYWVpbXF1eX1RlZmdoaWprbG1ub2R1dnd4eXp7fH1+f3OEhYaHiImKi4yNjo
+DlJWWl5iZmpucnZ6fkqOkpaanqKmqq6ytrq+v/aAAwDAQACEQMRAD8A735w/NHyr5VuVs715bnU
WAb6jaIJJVVtwX5FFWvgWr7Zm6bs/JmFjaPeXB1XaOLAalvLuDGv+V92H/Uu6p/wC/1zM/kaX8+L
hfy3D+bJ3/K+7D/qXdU/4Bf64/yNL+fFf5bh/Nk7/lfdh/1Luqf8Av8AXH+Rpfz4r/LcP5snf8r7
sP8AqXdU/wCAX+uP8jS/nxX+W4fzZO/5X3Yf9S7qn/AL/XH+Rpfz4r/LcP5snf8AK+7D/qXdU/4B
f64/yNL+fFf5bh/Nk7/lfdh/1Luqf8Av9cf5Gl/Piv8ALcP5snf8r7sP+pd1T/gF/rj/ACNL+fFf
5bh/Nkl+s/8AOSuiaVbGSfQr5JWB+rxSlIg7DtU1oPE0NMry9lmAsyDOHa8ZmhGT5289fmr5z853
cj6pevHYk/utMgZktkHb4AfjP+U9TghjEeTTlyymd0k8ueaNf8t6lHqWiXstldxkHlG3wuAa8ZE+
y6nurCmSIBFFhGRibGxfa/5VfmFbee/KUOsLGIL2Nzb6jbLUqk6AE8K78GVgy/OlajNdlx8Jp3Wn
zeJG+rMMqb3Yq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXzf5Hk+uxXuu3f73VL+4k
ae4brQkNxHgKnt/DOvzjhqA+kB4rCeMmcvqJZT62YzfTvWxWnetitO9bFad62K071sVp3rYrSC1n
XbLSNMuNRvH429uvJqbsx6KqjxY7DEmlp82+ZvMmoeYdXl1G9b4n+GGIGqxRg/Ci+w/E75hTlxFy
YQ4QgLS0nupxDCvJyCxPZVUFmdj2VVBJPYZFkSpOEDkIaqOhO1ffFQ+gv+cRNSkTVvMWmdY5oILk
exhdkPfv63h2zF1Q2Bc/QS9RHl+PvfTGYTtHYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FX
Yq7FXzJ5Cl46DT/i5/4Z2OpHqeJ0v0/Fknr++Y9OS71/fGld6/vjSu9f3xpXev740rvX98aV3r++
NK8Z/NjzY2paoNItnrZWDH1aE0efo1f9T7I965iZ5702449WAgEmg3J6DKG1l/mbTx5T0uPy8wpr
98kdxr53BgjakkFj8x8Ms3+VwX9g1jE3uzlHh2PP8fj+xiGSYPov/nEPS2MnmTVWUhQLa1iegoST
JJIK+1EzF1R2AdhoBuS+kMwnZuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KvljyZL
x0an/Fr/AMM7PUD1PEaX6finvr++UU5Nu9f3xpbd6/vjS271/fGlt3r++NLbvX98aW0n82+Yv0No
Vxdqf9II9K2H/Fr7A/7HdvoyGSXCLSBZp4OzMzFmNWJqSepJzWuUBT0T8rtGsdOsdR/MPW4RLpfl
6i6ZayfZu9UcfuI+oqsRIkeh8Oorlcz/AAjr9zdiAFyPIff+NywLUL+81G/uL+9lM15dyvPcTN1e
SRizMfmTljUh8UPtP/nHzyu+gflhpwmUrc6qzalMp7fWAoi/5Ion05r9RK5e53Ojhw4/fu9Iyhyn
Yq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXyb5Wl46XT/ixv4Z2ucep4fTH0pv9Yyq
m+3fWMaW3fWMaW3fWMaW3fWMaW3fWMaW3mX5m6ybnU4dPQ/u7ReTjxkkAP4LT7zmBqpb034R1Yvp
GlXur6raaXYx+reXsyQW8fSryMFFT2G+5zEJpyALNB6P+deoWGkLpP5caNJz03yzGDfyrsLjUZhy
lkYVb7Ibb+Usy9srxb+o9fub85AqA5R+/q8ty1x2Vflh5Ml84edtN0QKxtZJPVv3X9i2i+KU17VH
wg/zEZCc+EW2YsfHIB93xxxxRrFEoSNAFRFACqoFAAB0AzVu/ApdirsVdirsVdirsVdirsVdirsV
dirsVdirsVdirsVdirsVdir5A0CXjYU/y2/hnc5Ru8HhOyZfWMqpt4nfWMaXid9YxpeJ31jGl4nf
WMaXiWyXaxo0jtREBZj4AbnGl4njV/dveXs90/2p5Gc+3I1p9GaScuIkuyhGgA9X/I2xt9B0zzD+
Zuoxh7fy9A1vpSOPhkvp1CgVHgJFU/69e2Y+Xeo9/wBzl6f0gz/m8ve8ovr27v724vryVpru6kea
4mbdnkkYszH3JOXOPahih9a/84z/AJenQfKzeYr6LhqmuhWhDD4o7IbxjcbeqfjPiOOYOpyWa7na
6LFUeI9fu/a9mzGc52KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KvjTS5StrT/KO
d5MbvnsJUjPXOQpnxO9c40vE71zjS8TvXONLxO9c40vEgNcum/RdxGrhZJkaKOv7TMp+Ee5FaZVm
HoPmzxy9QeZqrMwVQSzGgA6knNE7l7H+cjL5T8keVPy3gIW4hhGq65TiSbqbkFUsP5WMnX9njlOL
1Ey+DlZ/TGMPife8by5xXqf5R/kn5i8za9Y3msabPaeWI2E9zcTqYhOi/EscQbi7CToWXYCu9aZV
lyiI83IwaczI29L7HjjSNFjjUJGgCoiigAGwAA7ZrXdgN4q7FXYq7FXYq7FXYq7FXYq7FXYq7FXY
q7FXYq7FXYq7FXYq7FXxVaScYqe5zvyHzi6VvWOCl4nescaXid6xxpeJ3rHGl4nescaXiY75undo
oRG9DCwd1H2hyqEb8Dmu152FdPwHYaGiTfX8FMPyV8tr5j/M7RrWVOVrBMb66FKrwtgZQrD+VnVU
+nNNmnUSXc6fHxTASz8y/NH+KPPes60GL29xcMtoaAf6PFSKHYd/TQV98MI8IARlnxSMu9vyjr2t
Wuo2ll5X062/TdzIsVtdvAl1dGRyOPA3AeKIg9GRFIHVsMgOvJjAkHbm+wBez/l9+W02peYr+bWL
3ToDPfXMjsxmuJGAWOMsPhQyMqJtsNyOua6uOdDZ3IPhY7kbLz3yF5LvvzV0/wDxl+YF9Pd6feSy
fony/byvBZxRRu0bFlQhq8loNw1BVmau12TJ4fpi4uHCc3rmb7gyHWf+cdvJRgM/lZ7ryzrMQ5Wt
9Z3M5AcfZ5q7saf6hU5COpl13DbPQw/h2ISL/nHUeaIvM3nu08z3M11rNnJp9vcSzu0rH0xcqpDN
uVKgFT3GS1NUK5MNECDK+e36XuOYrsHjX5tfn6nl/UD5Z8pwLqnmRmEMslDJFBK54iJUXeWap+z0
U7GpquZOLBe8uTgajV8J4Y81PQfyP1/zFEmp/mhr17qN5N+9/QsE5jtoeW5RuHw18REFA7E5KWcR
2gER0hnvkJ934/QyC4/5x2/KWSAxwaO9pLSkdzBdXXqofFecjrX/AFlOVjUz7206HEen2sH812X5
qflCo1fQ9Xm8x+TkZVnsdSJme2UnigZtmVd6B4yor1XxtiYZNiKLjzjkw7g3H8fjZ6n+W/5maB58
0Y32mkw3cHFdQ0+Q1kgdq0FaAOrUPFh19jUDHyYjAuZgzjIPNGecfy+8pecYIYfMNiLz6sJBayc5
I3iMvHmUZGXrwXr4YIZDHkyy4Yz+p8XweUIG/M9fJ8kzfVxrX6Ke4FOfpi69AuNqcuO+bLi9N+Vu
lMKnw+dfbT7a8q+UPL3lTSv0XoNoLOy5mVo+TuWkYBWdmcsxJCjNbOZkbLu8eKMBUU4yDY7FXYq7
FXYq7FXYq7FXYq+Io2ouegPm0gv54op3PFadzxWnc8Vp3PFaYr5haZNScn7EsYA91/sYVzSa6xkP
mPx9rutEAcY7wWd/lVMNB8hefPNleFwLKPRrA1oTJfPSQr/lRhUfNZk3IHx+TtMW0ZHyr5vLctaH
1P8A843flKdGsV84a1DTVb6OmlwOPigtnG8u/R5QfoT/AFiBhajLfpDs9Hgr1n4Ml/5yRt7ib8pd
UaIErDLbSTAfyeui/TuwyGm+pt1o/dsP/wCcaPzT0c6JB5I1OZbbULaR/wBFM/wrPHM5kMQY/wC7
FdmoO4Ip0yzUYjfEGnRZwBwH4Pf8xHYpVp/lnS9P1/V9dtw4v9bFst8S1UP1NGjiKrTb4X3yRmSA
O5rjjAkZd7Gfzq89v5M8h3d/bNx1S7IstNPXjNKCTJ/zzRWYe4AyzBDik1arLwQ25l4n/wA4teUI
9X806h5ov19YaQqrbGSpJurnlWTfqURW692B65k6mdRrvcLRY+KVno+p8wHbOxVRvbK0vrOeyvIl
ntLmNop4XFVdHHFlI8CDhBo2iUQRRfFtnqOqflJ+bdykDSNDpt01vcxH4Tc2EhDgMOlXiKuvg1D2
zZECcfe6ME4p+77f7X2pa3NvdW0V1buJbedFlhlXcMjjkrD2IOa0ineAgiw+Mof/AFo4/wDgWv8A
91E5sR/d/wCb+h0k/wC9/wA8f7p9o5rXeOxV2KvJPy0/KDzR5Q8/6vrc+treaNfrKFgLO08rySB0
acMqxhkA+0p39gcycuYSjVbuDp9LLHMm9vxzet5jOcxrV/POn6d520HyiYzNf64lzLyVgBBHbxNI
Gdab+oY2VfkcsGO4mXc0yzATEO9kuVtzsVdirsVfD6mgz0F84Ib5YrTuWK01zxWnc8C01zOKaSbz
LFyt4pu6NxPyYf2Zre0oekSc/QSqRDL7qy1Fvyi8n+WNLiN1qHmjVLzU/qsYq5MFLOIHtRuLN4bV
OaT+InuH7Xd0eCIG9kn9H63tf5Xf847eXPLcEOoeYo49Y14gMUkHK1tzT7McZ2dh/O30Ad8XJqCd
hyc7BowN5bn7HsGYznIXVdLsNW0260zUIRPZXkTQ3ELdGRxQio3HsR0wxJBsMZREhR5PjL81PyY8
x+Rb2S5RHvfLrv8A6LqaCvAE7JOF+w46V+y3bwGxx5RL3ulz6c4zvyZJ+WX/ADkn5h0BodN8z+pr
Ojr8K3JNbyEV6h2P75R4Oa/5VBTI5NOJcti24dXKOx3H2vqPQdf0fX9Kg1XR7pLywuRWKZK9tiCD
QqwOxB3GYMomJou0hMSFjk8F/wCcv5pBB5VgDfuna+dl7FkFuFP0czmVpergdofw/H9DIf8AnE+G
Nfy7v5QoEkmqzB3oKkLbwUFfAVOR1X1D3M+z/pPv/QHtOYrnuxV2Kvjn/nJm1WH817yQdbm2tpW+
YjEf6o82OA+gOl1YrIfx0fSn5N30t7+V3lqeU1dbKOGpp0grCvSn7KDMPOKmXZaWV4w+YIf/AFo4
/wDgWv8A91E5mj+7/wA39Dq5/wB7/nj/AHT7RzWu8diqR+cdS812GlLL5Y0ePWtSeVYzby3CWyRx
lWJlZnpyoVC8Qa717ZOAiTuaDVllID0iy8Gb86/zdX80dI8ra1Fa6OW1Oytb+ytolk5xXEqV/eyN
P9uOT7SEZlnDDhJG7rxqcpmInbcX+N3vPnC484waUreUrWzu9UaUKyX7ukKxcWLP8FCx5BRSo65i
QEb9TsMpmB6Ob5V0TUPzOl/PdHL2l/52t5rmBVuy31MGO2lV0HpmMhFj5caU3zOkI8H9F1WOU/F7
5bvpPyTdfm5NqUy+c7PSLfTRCTA+mmYymfmtA3qSSDjw5dutMw5iFek7uyxHKT6gKTrzf5t0byno
Fzrmry+naW4ACLQySyNskUakjkzH+poATkYQMjQbMuUQjZeNX3mv/nI7zFobeavL1lbaTozIZrLT
VWGa8lt6FhJSdX57DagUttxU1zJEMUTR3LgHJnnHiGw+H6fx5Ib8kvz981eYPNdt5Z8yLFd/Xlk+
rX0cYikSSKNpaOqUQqyoRsoNfbHNgiBYTptVIyEZb28kztHjHYq7FXYq7FXYqgtYj9TTZh/KAw/2
JrmLrY3iLfpZVkD6k/KfyNb2tnofmC5iX1rfQrGy0wNQlBMhurqWn7LPLPw8eK/5W/I5snOPm9jp
cI2n/RFfefvel5jua7FXYqtmhimieGZFkhkUpJG4DKysKFWB2IIxBQRb5+/N3/nG2zngn1zyRF6F
0gaS40RamOUAEk23Xi//ABX9k/s06HMxajpJ12o0dbw+X6v1MI/5xn853+kefIvL7yt+i9aEkckD
V4pcRoXjkA7MeHA/PfoMs1ELj5ho0eQiYHQvRf8AnLLy9cXnlbSNbhUumk3MkVwACeMd2FHMnsA8
Kr82GU6WW5Dk6+BoHuU/+cSNYjl8sa3o+3q2l6t37lLmIJ+Btz9+HVDcFdBLYj4/j5PecxHYOxV2
Kviz/nIDUk1b83NXS0JmFu0NlGFqSZIolV0A8RKWXNlhFQDo9TK8hL658laI2heUNG0dwBLYWUEE
9DyHqpGBIQd+r1OYGSVyJdxhhwwA8nyRD/60cf8AwLX/AO6iczx/d/5v6HTz/vf88f7p9o5rXeOx
V2Kvln8ywB/zk/o1B11DRif+DhzPh/dfAupy/wCMfEfofU2YDtny15V/9axn/wC2hqX/AFCT5n5P
7r4B1OH+/wDjL9L6lzAds+cP+cktXi1H8wvKvlK9u1ttEBguL+UuFRDdXBhZ5STRfTiTkCegY5m6
cVEnq6vWnimI3t+tn2q/nZo8sDaT+Xmnz+ZtYCelbR2cDrZW5+yjTSsEURr/AJO3bkvXKxgPOWwb
5asfTjFlB/kv+Ry+UJT5h190uvM8wbgqHlHarIDzCn9qRgaM3TsNty5s/FsOSNLpODeXP7nz/dWs
9pdTWtwhjuIHaKaNtiroeLA/IjO2jIEWOReJlExJB5hSwodirsVdirsVQup/8c+4/wBQ5j6v+7k2
6f6x73235ciSHy9pcUYokdpAijrssSgZxM/qL3mIege5Mci2OxV5h/zkLq8+jeTNO1aDeWw1mxuk
X+Yws0gB+ZXMjTCyR5OFriREEfzv1vQdD1vTdc0i01fTJhPY3sYlgkHgeoPgynZh2O2UyiYmi5UJ
iQBHJHEgAkmgG5JyLN8qflVosXmH/nIPUtY0pOehadfX1968Y4x8JGkS347ADmzhlX+UHwzYZTWP
fm6fTx4stjlZP6n07rui6drmj3mkalF61jfRNDOnfiw6qezKd1PY5gxkQbDtpwEhRfKenQa/+Rf5
oRy6jG9zoN3ygN1Gvw3Nk7A8l7CWIhWKV6ileLAnPNZI7OnHFgyb/wBofWGl6pp2q6fBqOnXCXVj
coJILiI8kZT4H8D4ZryCDRdxGQkLHJFYGTCvzU/M3SvInl6S7mdJdWuFZNLsK/FJJSgdlrX00O7H
6Opy3FiMj5OPqM4xjzeM/kB+VGp61ro8++aI5Pq6Sm609JxR7q5cl/rLV34Kx5Kf2m36DfJz5aHC
HC0unMjxS5fe+m8wXaviu3ngb/nIcTiRfRbzWzrJUcSp1AkGvhmzA9H+b+h0UjeX/P8A98+1M1jv
UHrWrWWj6TearfOI7SyheeZyQPhQVoK9z0Hvkoxs0xnMRBJ6KHljXYvMHl7T9bhhe3i1GBLiOGWn
NVkFRWm3TGceE0jHPiiC+ZfzEYn/AJyk08E1A1XRQPYUtjmbD+6+B/S6qf8AjH+cPuD6tzAdw+Wf
KzKv/OWExYgD9I6iKnbc2k4H45nz/uvgHU4v7/4n9L6H84eefL3lKyiuNWmb1blxFZ2MC+rdXEjE
DjDEDVqV37fhmHDGZcnY5c0YDd8+/wDOUNjPpvn3QPMyW4e1ntox+8FVea0mLsrq3SqSIKEfxzM0
x9NOu10amD3j8fofRHlDX/L+veXrPUtBeM6dKg4RRBV9JqAtEyLsjrXdcw5xIO7ssU4yjceSQfmJ
+aemeVTb6ZZIuq+ar+WKDT9Ejf4y0rABpSK+mtG+GvU9NqkTxYTLc7Bq1GpENhvLuQ3nv8l/L3mm
8bUYpn0zU5KetNEqvHJT9p4yVq3+UGHvXM3SdqTwjhI4ouJrOyseY8V8MmH/APQsv/fyf9OX/Zxm
b/Lv9D/ZfscD+QP6f+x/a7/oWX/v5P8Apy/7OMf5d/of7L9i/wAgf0/9j+13/Qsv/fyf9OX/AGcY
/wAu/wBD/ZfsX+QP6f8Asf2u/wChZf8Av5P+nL/s4x/l3+h/sv2L/IH9P/Y/td/0LL/38n/Tl/2c
Y/y7/Q/2X7F/kD+n/sf2pR5v/wCcdn0zyrrGppr31hrCyuLr0DacOfoxtJx5es9K8fDIy7Y8QcPB
z8/2L/Inh+vj+nfl+17x5dkSXy/pkkZ5I9pAyN4gxqQc0c/qLv8AEfSPcmGRbHYqxT8yvIEHnry/
FolxeNZQLdw3M0iIJGZIuQaNasoUsG2bengcsxZOA20Z8PiCrrdi9h+S+reVjKfIHmu70eGYlptO
voo761d9hyVWCGM0Aqwq3vTLDnEvqDSNLKH0SpD6t+Xf5x+ZoZNO8w+cbWz0acUuYNKtiryKRRoy
zem3FhWvxEeIOSGTHHcDdjLBmntKQr8e5nnkryN5d8maMulaHAY4a85ppCGmmkpTnK4AqfkAB2GU
TyGRsuVixRgKCf5BtS7X/Luh+YdMk0zWrOO+sZd2hlFaMNgysKMrCuzKQRkoyMTYYTxxmKLy2T8l
PNHlNLq7/LPzPc2BblIuh3oSe2lc/shnHFTQUDMhPi3fMgZ4y2kHDOklCzjl8Pxt9jyDWv8AnIX8
5LaafS7u6i069tmaC5C2sQlV1NGrzDgEewy8YIdziS1WXlf3PV/yt/K/yB5lsrbztqOoXHm/UbkD
1X1JqpDMlOUUlvVviTpxdmWlCBShynLllE0NnJ0+nhMcRPEXtYAAAAoBsAMxHYpB5z0LzFrWmLZ6
Hr7+XpS5M91FbpcO8ZUjgvNkMe+/JTXLMcgDuLac0JSFRPC8VP8AziFEWLnzY5cmpY2IrXxr9YzJ
/Nju+39jg/ycf5w+X7WZ235T/mZbQJBF+Z98Y4xRedmkjU93eZmP0nK/Gh/N/HybRpco/j+z/jyV
+YP+cf8Azd5ih9DWvzEvb22qG+rSWtIeSigb0luAlR40yQ1ERyj+Pkxlopy5zv4f8eenW3l29sfJ
tp5e0vUmsrqxs7ezt9UWGORl+roqc/Rk5IeQToa9cx+IGVkOYIEQEQdwObyrV/8AnGSTWNYl1rUv
OF5c6pMyu921vGH5IAEI4uOPEKAKdMyBqQBQDhy0JJsy3937XqnlLQ/MGkWssGseYJdfZivoSzQQ
wNGqggisQq/LxYk5jzkDyFOZihKP1HieX+f/APnHS417znN5o0LXf0Rc3MiTyRmNy0c6AD1YpUdW
BYry9j38L8eooUQ4uXRmUuIFlXkb8mdI8u6n+ndW1C58y+ZqUGrX7MxjFKfukZpCp47VZmPhTITz
kihsG3FpRE8RPFJk3nTyVoHnHQ5NG1qEyWzsHjljIWWKRQQskTENRhUjoR2O2VwmYmw3ZcQmKLw9
v+cUtbs7qQ6R5t9C3kBBJhkjk4mo4t6clG2PtXwzKGqHc686A3sWefll+QegeSdTOsz3smsa0Ayw
3UqCKOIOKMyR8pDzIJBYudsqy5zIU5ODSCBu7L1HMdy3Yq7FXYq7FXYqhdW06LUtKvNOmNIr2CW3
kNK0WVCh2+TYYmjbGceKJHex/wDKu+ku/wAvdC9ZDHc2lsthdRt9pZrEm1lB9+cJyzMKmWrSyvGP
l8tmVZU3uxV2KuxV2KuxV2KuxV2KvI/zw/JGHzpbnWtFVIfM9ugUqSES7jXokhOwkUbI5/1W2oVy
MObh2PJwtVpeP1R+r73gX5e+fvNP5V+aJoby0mS2chNV0e4DRMwHSRQw+F1/ZboR7ZlzgJhwMWWW
OX3h9WeXPzX/AC+8wael7Za3axBh8dtdSpbzxnuHjkYH6RUHsTmDLDIHk7WGpxyF3XvZFpmr6Tqt
u1zpl7Bf26uY2mtpUmQOtKqWQsKivTKzEjm2xmJCwbReBk7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq
7FXYq7FXYq7FVsXpcT6XHjyavGlOXI8+nflWvviUBdil2KuxV2KuxV2KuxV2KuxV2KsV8+/8q1+p
L/jb9G+jQ+h9f9P1KVHL0eX7yvjwy3Hx/wALRm8P+OnlUP8A0KN9cHD6t6vI/b/SXpV/2f7un4Zk
fvvxThD8tfX/AGT2fyp/hL9Dx/4U+pfojkeH6O9L0OdBy/uvh5dK138cxZ8V+p2GLgr01XknGQbH
Yq7FXYq7FXYq7FXYq7FXYq7FX//Z
+
+
+
+
+
+ uuid:EB89571B7F69DF11AF0CB9E432C5A167
+ uuid:EC89571B7F69DF11AF0CB9E432C5A167
+
+ uuid:EA89571B7F69DF11AF0CB9E432C5A167
+ uuid:E989571B7F69DF11AF0CB9E432C5A167
+
+
+
+ Print
+
+
+
+ 300.000000
+ 200.000000
+ Points
+
+ 1
+ False
+ False
+
+
+ PANTONE 3005 U
+ PANTONE 309 U
+ PANTONE 2905 U
+
+
+
+
+
+ Default Swatch Group
+ 0
+
+
+
+ PANTONE 3005 U
+ SPOT
+ 100.000000
+ CMYK
+ 100.000000
+ 34.000000
+ 0.000000
+ 2.000000
+
+
+ PANTONE 2905 U
+ SPOT
+ 100.000000
+ CMYK
+ 41.000000
+ 2.000000
+ 0.000000
+ 0.000000
+
+
+ PANTONE 309 U
+ SPOT
+ 100.000000
+ CMYK
+ 100.000000
+ 0.000000
+ 9.000000
+ 72.000000
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
% &&end XMP packet marker&&
[{ai_metadata_stream_123}
<>
/PUT AI11_PDFMark5
[/Document
1 dict begin /Metadata {ai_metadata_stream_123} def
currentdict end /BDC AI11_PDFMark5
+%ADOEndClientInjection: PageSetup End "AI11EPS"
+%%EndPageSetup
+1 -1 scale 0 -211.304 translate
+pgsv
+[1 0 0 1 0 0 ]ct
+gsave
+np
+gsave
+0 0 mo
+0 211.304 li
+372 211.304 li
+372 0 li
+cp
+clp
+[1 0 0 1 0 0 ]ct
+103.096 139.708 mo
+103.039 138.986 103.005 138.517 103.005 138.517 cv
+103.005 138.517 100.816 123.759 98.21 119.355 cv
+97.7979 118.657 98.2549 115.79 99.4053 111.543 cv
+100.078 112.706 102.917 117.68 103.487 119.283 cv
+104.129 121.095 104.264 121.614 104.264 121.614 cv
+104.264 121.614 102.707 113.603 100.152 108.928 cv
+100.712 107.038 101.381 104.949 102.138 102.716 cv
+103.106 104.414 105.423 108.525 105.933 109.951 cv
+106.036 110.244 106.123 110.493 106.201 110.721 cv
+106.226 110.584 106.251 110.447 106.276 110.31 cv
+105.691 107.828 104.542 103.509 102.969 100.318 cv
+106.459 82.1528 118.362 57.8726 130.565 47.0405 cv
+50.96 47.0405 li
+45.625 47.0405 41.2598 51.4063 41.2598 56.7407 cv
+41.2598 144.53 li
+41.2598 149.865 45.625 154.23 50.96 154.23 cv
+103.39 154.23 li
+103.012 149.654 102.886 144.591 103.096 139.708 cv
+cp
+false sop
+/0
+<<
+/Name (PANTONE 3005 U)
+/0
+[/DeviceCMYK] /CSA add_res
+/CSA /0 get_csa_by_name
+/MappedCSA /0 /CSA get_res
+/TintMethod /Subtractive
+/TintProc null
+/NComponents 4
+/Components [ 1 0.34 0 0.02 ]
+>>
+/CSD add_res
+1 /0 /CSD get_res sepcs
+1 sep
+f
+161.875 105.281 mo
+163.221 105.281 li
+163.221 116.492 li
+161.795 116.492 li
+161.469 113.393 160.348 110.974 158.432 109.234 cv
+156.514 107.497 154.104 106.626 151.196 106.626 cv
+148.612 106.626 146.534 107.279 144.958 108.583 cv
+143.382 109.886 142.594 111.626 142.594 113.801 cv
+142.594 115.812 143.246 117.646 144.551 119.303 cv
+145.854 120.962 148.682 123.093 153.031 125.706 cv
+158.085 128.722 161.482 131.355 163.221 133.613 cv
+164.96 135.868 165.829 138.45 165.829 141.358 cv
+165.829 145.108 164.502 148.315 161.844 150.978 cv
+159.182 153.642 155.979 154.973 152.236 154.973 cv
+150.064 154.973 147.297 154.375 143.932 153.178 cv
+142.549 152.689 141.666 152.447 141.287 152.447 cv
+140.499 152.447 139.89 153.057 139.455 154.281 cv
+138.028 154.281 li
+138.028 141.358 li
+139.455 141.358 li
+139.481 144.919 140.642 147.772 142.934 149.92 cv
+145.226 152.065 147.904 153.138 150.968 153.138 cv
+153.873 153.138 156.196 152.366 157.948 150.814 cv
+159.697 149.263 160.572 147.197 160.572 144.612 cv
+160.572 142.681 159.852 140.832 158.415 139.065 cv
+156.979 137.296 153.972 135.018 149.4 132.227 cv
+145.674 129.923 143.186 128.11 141.937 126.776 cv
+140.688 125.443 139.765 123.982 139.168 122.39 cv
+138.571 120.801 138.272 119.052 138.272 117.15 cv
+138.272 113.48 139.476 110.502 141.881 108.217 cv
+144.286 105.934 147.418 104.792 151.276 104.792 cv
+153.396 104.792 155.707 105.213 158.207 106.055 cv
+159.374 106.436 160.19 106.626 160.651 106.626 cv
+161.034 106.626 161.291 106.545 161.425 106.382 cv
+161.563 106.218 161.713 105.852 161.875 105.281 cv
+cp
+/1
+<<
+/Name (PANTONE 309 U)
+/CSA /0 get_csa_by_name
+/MappedCSA /0 /CSA get_res
+/TintMethod /Subtractive
+/TintProc null
+/NComponents 4
+/Components [ 1 0 0.09 0.72 ]
+>>
+/CSD add_res
+1 /1 /CSD get_res sepcs
+1 sep
+f
+216.34 130.229 mo
+216.34 122.917 214.667 117.157 211.324 112.944 cv
+207.98 108.732 203.66 106.626 198.358 106.626 cv
+192.733 106.626 188.244 108.741 184.888 112.964 cv
+181.53 117.192 179.855 122.837 179.855 129.903 cv
+179.855 136.779 181.519 142.372 184.847 146.679 cv
+188.178 150.986 192.518 153.138 197.871 153.138 cv
+203.443 153.138 207.913 151.053 211.282 146.881 cv
+214.651 142.708 216.34 137.158 216.34 130.229 cv
+cp
+224.653 166.999 mo
+225.182 168.304 li
+222.846 169.364 220.197 169.894 217.235 169.894 cv
+214.789 169.894 212.518 169.573 210.426 168.936 cv
+208.336 168.296 206.369 167.319 204.537 166 cv
+202.703 164.682 200.466 162.487 197.83 159.418 cv
+195.491 156.699 193.698 155.012 192.449 154.361 cv
+186.986 153.249 182.294 150.365 178.365 145.721 cv
+174.438 141.074 172.474 135.772 172.474 129.821 cv
+172.474 125.391 173.602 121.221 175.857 117.307 cv
+178.113 113.393 181.171 110.329 185.033 108.116 cv
+188.889 105.899 193.17 104.792 197.871 104.792 cv
+205.128 104.792 211.242 107.205 216.217 112.028 cv
+221.188 116.85 223.674 122.783 223.674 129.821 cv
+223.674 135.748 221.76 141.023 217.928 145.657 cv
+214.095 150.293 209.189 153.194 203.212 154.361 cv
+203.784 154.819 204.806 155.976 206.272 157.826 cv
+209.391 161.687 212 164.306 214.091 165.678 cv
+216.185 167.046 218.264 167.734 220.331 167.734 cv
+221.854 167.734 223.297 167.488 224.653 166.999 cv
+cp
+f
+259.776 146.473 mo
+258.979 154.111 li
+227.787 154.111 li
+227.853 152.591 li
+229.544 152.418 230.729 151.984 231.42 151.291 cv
+232.11 150.593 232.459 148.992 232.485 146.473 cv
+232.521 112.728 li
+232.509 110.965 232.423 109.75 232.277 109.087 cv
+231.885 107.234 230.135 106.309 227.005 106.309 cv
+226.938 104.792 li
+243.22 104.792 li
+243.282 106.309 li
+241.546 106.427 240.358 106.895 239.693 107.717 cv
+239.035 108.542 238.709 110.213 238.71 112.728 cv
+238.585 146.826 li
+238.59 147.406 238.639 147.937 238.747 148.429 cv
+239.27 150.917 241.243 152.158 244.665 152.158 cv
+247.604 152.158 li
+251.407 152.158 254.046 151.73 255.535 150.876 cv
+257.02 150.028 257.964 148.557 258.371 146.473 cv
+259.776 146.473 li
+cp
+f
+270.119 113.882 mo
+270.855 113.882 271.533 114.14 272.159 114.658 cv
+272.778 115.174 273.181 115.812 273.336 116.571 cv
+273.487 117.307 273.362 117.946 272.959 118.489 cv
+272.556 119.032 271.992 119.303 271.256 119.303 cv
+270.495 119.303 269.786 119.032 269.13 118.489 cv
+268.47 117.946 268.066 117.307 267.916 116.571 cv
+267.761 115.812 267.886 115.174 268.33 114.658 cv
+268.764 114.14 269.358 113.882 270.119 113.882 cv
+cp
+275.101 125.583 mo
+275.051 146.943 li
+275.06 148.436 275.136 149.497 275.269 150.124 cv
+275.441 150.994 275.827 151.583 276.406 151.895 cv
+276.995 152.209 278.309 152.366 280.348 152.366 cv
+280.395 153.792 li
+265.72 153.792 li
+265.755 152.366 li
+267.767 152.366 269.003 152.18 269.474 151.814 cv
+269.941 151.449 270.181 150.218 270.207 148.125 cv
+270.299 133.002 li
+270.282 132.177 270.23 131.532 270.133 131.064 cv
+269.845 129.666 268.923 128.965 267.373 128.965 cv
+264.189 129.372 li
+264.174 127.701 li
+273.424 125.583 li
+275.101 125.583 li
+cp
+f
+299.717 126.315 mo
+299.787 128.597 li
+292.118 128.597 li
+292.063 147.671 li
+292.063 148.191 292.094 148.638 292.183 149.021 cv
+292.508 150.597 293.372 151.387 294.761 151.387 cv
+297.021 151.387 299.6 149.906 302.527 146.943 cv
+303.758 147.981 li
+299.704 152.48 295.83 154.728 292.135 154.728 cv
+289.581 154.728 288.058 153.559 287.563 151.222 cv
+287.359 150.246 287.257 148.315 287.249 145.434 cv
+287.354 128.597 li
+282.338 128.597 li
+282.28 127.172 li
+286.584 126.738 289.38 124.034 290.673 119.06 cv
+292.14 119.06 li
+292.127 126.315 li
+299.717 126.315 li
+cp
+f
+311.427 140.01 mo
+319.449 138.461 323.033 135.679 322.196 131.658 cv
+322.025 130.839 321.502 130.092 320.645 129.414 cv
+319.776 128.734 318.904 128.394 318.011 128.394 cv
+315.972 128.394 314.303 129.597 313.017 132.001 cv
+311.721 134.407 311.198 137.078 311.427 140.01 cv
+cp
+311.381 141.522 mo
+311.449 142.227 311.544 142.894 311.677 143.518 cv
+312.7 148.409 315.455 150.855 319.937 150.855 cv
+323.197 150.855 327.513 148.887 329.655 144.946 cv
+330.741 146.066 li
+329.398 149.104 327.935 151.306 326.357 152.675 cv
+324.783 154.045 321.037 154.728 318.351 154.728 cv
+315.466 154.728 313.074 153.92 311.172 152.303 cv
+309.267 150.685 308.014 148.396 307.389 145.434 cv
+306.327 140.378 307.15 135.782 309.851 131.634 cv
+312.566 127.493 316.063 125.417 320.356 125.417 cv
+322.045 125.417 325.076 125.848 326.24 126.703 cv
+327.389 127.558 328.122 128.707 328.426 130.146 cv
+328.937 132.567 326.205 134.854 323.48 137.016 cv
+320.753 139.177 316.713 140.678 311.381 141.522 cv
+cp
+f
+99.4053 111.543 mo
+100.078 112.706 102.917 117.68 103.487 119.283 cv
+104.129 121.095 104.264 121.614 104.264 121.614 cv
+104.264 121.614 102.707 113.603 100.152 108.928 cv
+100.712 107.038 101.381 104.949 102.138 102.716 cv
+103.023 104.269 105.034 107.836 105.761 109.526 cv
+105.788 109.207 105.815 108.888 105.843 108.572 cv
+105.199 106.097 104.221 102.857 102.969 100.318 cv
+106.183 83.5933 116.528 61.6929 127.673 49.8696 cv
+50.96 49.8696 li
+47.1719 49.8696 44.0889 52.9521 44.0889 56.7407 cv
+44.0889 138.125 li
+61.4629 131.457 82.4121 125.362 100.442 125.623 cv
+99.7725 123.042 99.0107 120.708 98.21 119.355 cv
+97.7979 118.657 98.2549 115.79 99.4053 111.543 cv
+cp
+/2
+<<
+/Names [
+(PANTONE 2905 U)
+(PANTONE 3005 U)
+]
+/CSA /0 get_csa_by_name
+/TintMethod /Subtractive
+/TintProc null
+/MappedCSA null
+/TintTransform
+/0 {
+1 3 1 roll 1 3 1 roll 1 3 1 roll 1 3 1
+roll 6 -1 roll 2 index 0.41 mul 1 cvr exch sub mul 1 index
+1 mul 1 cvr exch sub mul 1 cvr exch sub 6 1 roll 5
+-1 roll 2 index 0.02 mul 1 cvr exch sub mul 1 index 0.34 mul
+1 cvr exch sub mul 1 cvr exch sub 5 1 roll 4 -1 roll
+2 index 0 mul 1 cvr exch sub mul 1 index 0 mul 1 cvr
+exch sub mul 1 cvr exch sub 4 1 roll 3 -1 roll 2 index
+0 mul 1 cvr exch sub mul 1 index 0.02 mul 1 cvr exch sub
+mul 1 cvr exch sub 3 1 roll pop pop
+} bind /Procedure add_res
+/0 /Procedure get_res
+>>
+/CSD add_res
+/2 /CSD get_res devncs
+level3{
+gsave
+clp
+[-4.01667e-06 91.8907 -91.8907 -4.01667e-06 85.8809 49.8696 ]ct
+/0
+<<
+/ShadingType 2
+/ColorSpace [/DeviceN /devicen_colorspace_dict AGMCORE_gget begin /Names load /MappedCSA load /TintTransform load end]
+/Coords [0 0 1 0 ]
+/Domain [0 1 ]
+/Extend[ true true]
+/Function
+<<
+/Domain[0 1 ]
+/FunctionType 3
+/Functions [
+<<
+/Domain[0 1 ]
+/FunctionType 2
+/C0 [0 1 ]
+/C1 [1 0 ]
+/N 1.3901
+>>
+<<
+/Domain[0 1 ]
+/FunctionType 2
+/C0 [0 1 ]
+/C1 [0 1 ]
+/N 1
+>>
+]
+/Bounds [0.920245 ]
+/Encode [1 0 0 1 ]
+>>
+>>/Gradient add_res /0 /Gradient get_res clonedict shfill grestore
+}if
+level3 not{
+gsave
+[-4.01667e-06 91.8907 -91.8907 -4.01667e-06 85.8809 49.8696 ]ct
+clp
+/0 {
+<<
+/NumSamples 256
+/NumComp 2
+/Scaling[[0.00392157 0 ][0.00392157 0 ]]
+/Samples[
+<~s8Dikq=jUToC;>$4p-
+%1/1`;$.4?Pj,pX][+sA'N*Zc=A)]BS3
+(`*r''bh;p&eP]e%h9'Y$k*LO$4."E#R:P<"U,#3!s8T+!W`<&zzzzz!!!!!~>
+<~!!*0)"U>;?$OmRW&JGio(E",2*?QCI,:"T`.4Qi"/hf"91c73O3]fGf5=%V'77Kd=8k_rS:f1+i$P?>?t!MTAS,UiC27[(DfKf=FEVkQH$apeIXm!$K8#&8Ll%%KNK0'^P*;,qQC+)/S"-%ATV8*TUnsuf
+WN*##Xfek3Z*UgF[^WcW]">Vg^;%J"_Sa=2`lH0Bb0/#RcHjkbdaHUoe^i@)g"P38gtgiEi8ESRj5f=`
+k3(sll0@U#m-X3.n*oi:o()DDo_%nNp@n@Wq>'m`qtp
+]
+>>
+0 0 1 0 []true true []
+/DeviceN
+GenStrips
+} /Gradient add_res /0 /Gradient get_res exec grestore
+}if
+np
+149.133 44.167 mo
+143.681 39.3052 137.08 41.2583 130.565 47.0405 cv
+129.598 47.8994 128.633 48.8516 127.673 49.8696 cv
+116.528 61.6929 106.183 83.5933 102.969 100.318 cv
+104.221 102.857 105.199 106.097 105.843 108.572 cv
+106.008 109.207 106.157 109.803 106.276 110.31 cv
+106.559 111.51 106.711 112.288 106.711 112.288 cv
+106.711 112.288 106.611 111.91 106.201 110.721 cv
+106.123 110.493 106.036 110.244 105.933 109.951 cv
+105.889 109.83 105.828 109.683 105.761 109.526 cv
+105.034 107.836 103.023 104.269 102.138 102.716 cv
+101.381 104.949 100.712 107.038 100.152 108.928 cv
+102.707 113.603 104.264 121.614 104.264 121.614 cv
+104.264 121.614 104.129 121.095 103.487 119.283 cv
+102.917 117.68 100.078 112.706 99.4053 111.543 cv
+98.2549 115.79 97.7979 118.657 98.21 119.355 cv
+99.0107 120.708 99.7725 123.042 100.442 125.623 cv
+101.954 131.438 103.005 138.517 103.005 138.517 cv
+103.005 138.517 103.039 138.986 103.096 139.708 cv
+102.886 144.591 103.012 149.654 103.39 154.23 cv
+103.891 160.288 104.834 165.492 106.036 168.277 cv
+106.852 167.832 li
+105.087 162.345 104.37 155.154 104.684 146.861 cv
+105.159 134.185 108.076 118.898 113.466 102.965 cv
+122.572 78.9131 135.206 59.6152 146.769 50.3994 cv
+136.23 59.917 121.966 90.7261 117.696 102.135 cv
+112.915 114.911 109.527 126.9 107.485 138.387 cv
+111.008 127.618 122.399 122.989 122.399 122.989 cv
+122.399 122.989 127.986 116.099 134.515 106.255 cv
+130.604 107.147 124.182 108.674 122.031 109.578 cv
+118.858 110.909 118.003 111.363 118.003 111.363 cv
+118.003 111.363 128.281 105.104 137.099 102.27 cv
+149.226 83.1704 162.438 56.0361 149.133 44.167 cv
+cp
+1 /1 /CSD get_res sepcs
+1 sep
+f
+0.5 lw
+0 lc
+0 lj
+4 ml
+[] 0 dsh
+true sadj
+27 175.304 mo
+0 175.304 li
+/3
+<<
+/Name (All)
+/CSA /0 get_csa_by_name
+/MappedCSA /0 /CSA get_res
+/TintMethod /Subtractive
+/TintProc null
+/NComponents 4
+/Components [ 1 1 1 1 ]
+>>
+/CSD add_res
+1 /3 /CSD get_res sepcs
+1 sep
+@
+36 184.304 mo
+36 211.304 li
+@
+27 36 mo
+0 36 li
+@
+36 27 mo
+36 0 li
+@
+345 36 mo
+372 36 li
+@
+336 27 mo
+336 0 li
+@
+345 175.304 mo
+372 175.304 li
+@
+336 184.304 mo
+336 211.304 li
+@
+%ADOBeginClientInjection: EndPageContent "AI11EPS"
+userdict /annotatepage 2 copy known {get exec}{pop pop} ifelse
+%ADOEndClientInjection: EndPageContent "AI11EPS"
+grestore
+grestore
+pgrs
+%%PageTrailer
+%ADOBeginClientInjection: PageTrailer Start "AI11EPS"
+[/EMC AI11_PDFMark5
[/NamespacePop AI11_PDFMark5
+%ADOEndClientInjection: PageTrailer Start "AI11EPS"
+[
+[/CSA [/0 ]]
+[/CSD [/0 /1 /2 /3 ]]
+[/Gradient [/0 ]]
+[/Procedure [/0 ]]
+] del_res
+Adobe_AGM_Image/pt gx
+Adobe_CoolType_Core/pt get exec
Adobe_AGM_Core/pt gx
+currentdict Adobe_AGM_Utils eq {end} if
+%%Trailer
+Adobe_AGM_Image/dt get exec
+Adobe_CoolType_Core/dt get exec
Adobe_AGM_Core/dt get exec
+%%EOF
+%AI9_PrintingDataEnd
userdict /AI9_read_buffer 256 string put
userdict begin
/ai9_skip_data
{
mark
{
currentfile AI9_read_buffer { readline } stopped
{
}
{
not
{
exit
} if
(%AI9_PrivateDataEnd) eq
{
exit
} if
} ifelse
} loop
cleartomark
} def
end
userdict /ai9_skip_data get exec
%AI9_PrivateDataBegin
%!PS-Adobe-3.0 EPSF-3.0
%%Creator: Adobe Illustrator(R) 11.0
%%AI8_CreatorVersion: 13.0.2
%%For: (Mary Diarte) ()
%%Title: (logo_color_eps.eps)
%%CreationDate: 5/25/10 11:06 AM
%AI9_DataStream
%Gb"-6Bp2f2Pp#o2n/pqR,4,%FCG'cqcS6#7\b,MND(@139M]b(.+f4cF,#U)U>4f'lQ=CZga0;4=HqR+.hQ;lS]&LdEc>u@I.>23
%q]F^Dqtf,mGOPRXQT*6<^3Q#^]MCJ(9'2$brlaWb^%u/7-/D=Fni+nSD+m-X<]f0h<-*C3hW?NRp>"X*b9,WUHM?s1&+qVd5.
%q`!t.r:$QY@eYrRS($[^Y>4ON]RB>?-JqPuoARsO7-Dhq=33@?bLaTo$t5LppPVL"+66G_;E"Vp$lPXedl-E
%f\r+m++O(\r[knTjqhClIe3HoroSIcl4e19q3S5Cs4r6Tg&tH.m=5(!rb6'5bF@`Sf_44`DPgGmRtgB[JFB2ck)1d=>MFY\SU'T^
%Df'`(IdsVVHhH4\+$P-unaZ;LGQ79$4Ehn(J,f5]L]?1"qeUklES7hPDM(`iorZ[%fMm-9i0iK;HMdZ1(E[#CP1hnO`g^!`%_m%t
%[T9d%ee_Y:(Ya7?CN"gNe98q#]2C+Ae(=TLT3f;/pFgH6rSlr)G9>`Bn4-[EY?nsh7+cq_q"ALpeZ*uU"Q+,&*S(`=2$/\^"6K$I!"B]C\>(a9E-*SQ,CA@Y[&$N`prU*>H@fGL35.pm'j)Dni6N7XLPi5u7J(3:4nrFl"^#SNb
%pW`]h]q&lKrUne7K&?Uua1)*Bm`"KMD:Z,7dnOO^\SF1smdT5rM_B"3\*iCn]_XiOGCTSt>AIsqn)=;2i;[!>s8?8shS[Z/*YSA]
%hg/)USkec34d?Z08dYJh$#6:>[8K&VUs3)::9@?j[LYK+/(CR6C;FiX5LeEUU"4Z':[[uHc$g1G:+,i1F@lk25+UX/dtsZi*`IQf
%5P.GA(7pcZ]f0<6s%j*C"TLh@(mBoC^4R$6s%iqi"TQBLAL?VD(_cfXJ&-s!IqX]$M->WG_>c3,p^'b6'>0N@PMQhX?sN1ce2a'"
%-L1asV`CVWUfioCQ=pG9eFXc`)#T%j0KrWJRMC,a`WuO]?SFNEJ,#X^%M8CWC;K]'Ip,:X
%&t2mJ5J0j4!.$9m,ATdIUKN@U1iG-&1ihkEoUPbHn"l=]]r6[5TFQoU0G?n\:Xc9,>8SI[&%)\X@=0mZoITnNF^T=R$c-Z44#D$N
%#/Y8o@DN\p2X!/0$K6U'$X$DsGNp3n3k^mB%=0WW,%IC^6S3RF[8,&sp[k/Ne'94TVDY9^Tb^Du+oL`>SXqQCaDUB,9BN565%1qY
%IE+Qs]&Sn=oY/I'bC(D(7l$:tFIs$,X7TKM9C:/kekZ!kT0sDGJ"39PqG6s@5_&O#oJ'^k8GIh)oeX$8jnsVoj(j"uk-k#k^Oc::E",u7_Aq#0;c`kGD?SlQaT@EJo2e*A.h80H]D/*n@
%FmGl>`pL4+mMXUl(_")pfQIMm0E6]L_f*%[gq2.h)-4*[YuMasF*HE#46cP\1B+);V11[hZN9ZV/%3p@?MRCk/<7>qq5iTNGL08*
%e"qPu^\_Ad#'&/tRaoe+Ai&j)[s2TTT?dIP4WjYATAQ#-376K@L"U9\hodX/]7tV&
%3BD0I"&NKFHhA%b9d+p2gFCM2a$FJG>",TbT]bn#eka9BrF2Z^4Gu]lYIJNFs-.M3^D,:mD4K]4oF,NlqenG')BK2JF=5[83&i""
%#9sZN#>hZST2(2p0HPJMnO>2S'C\Zn]P@6%G;A'X2eRWWSE"NM&pKkp9A%1HXDGI&24ibR!d"ibu:^OrUD85Q0
%4`KET\Z#.*@mG3tp/iNOlTCA=^77[^N'6gdg7G\iG72$(&%Ejo0V#t7)7CYPJj%'fT?e5FZneI=corR)`-d].,mZuUC^N=XQ*G\P
%[-`@D3DYJ2Q.E$Fe9D%eQ8;,\&^mb\F'Oq&6ln]YieiE6679CjS(Esp`Y?6FWN/&.E2a8L&BoZKb]
%kA;7+n^>(t%\Ksp>@c\hSENqQD*KI+0M+oprTj44&jKnI.r(05LEi6*.-@q*UeM._GbQ.a^!!W
%KQST*J5;u$O47CPoBW@7JZ6kReeL[2hAqFGaUC7YMk"1eJs+\kraqUFm&*)@)K@MB41^C[kR:i2SHFD=ck`\hltSXMYFB*L47sLP
%0&Y"<";WJXOc+;%#>d#2$q>Q&Vph5460O_WG'c8*KoQh,UOFEokLY$>YgQ!Q^1]_5r-%KU@Jkei&"nkHNEb:`0Tt#Y*V2cmB+r>4
%E*roq0`KkLmb?^Si==ZY^k]qA+:cl7fA;]:Lt!:rde7Z,i"\gYg/St%lq!6On0s`73:X,Pifo64L=,'&;8kH%$M+\p]j>2j,._s5
%%^jjaiqVqGY[GgrqtG$FhXlHQ2ST%Wrlt?/kWA_`^\R`V`ui&D,7XYNc#;IJg99Cbl,Q
%nV9?cr;QZl2ql%B=5$@Q,8BGCFhH`bI,Nh!hpG-$ho$7qDq]X7pfI3UXPM\"]PC)WE&
%&$Ol=gNEkjk<=mohgQ)r'fTM"n\t7+Ye)"WGf#nV?\-48?G(XTiJQ`m`nfEuC];l^b,!b6Y*HV"p?BrHZ'A7Z>-Y3>B-e?<^Md')X&@r>T0j85MR1tisR
%3FZ^CVu^VIEDKD9.iq>s-+8D@"S7$i=C;n`+:%*=[R?E+/12k.LoLt:%Z>6O@#8!)d>r,NO16)uSCSq1bn5_$SYJ*(7tW^u'9pta
%`F]?`G
%`-Y_!+?E]E.D+bpCMGE("]*giL#+$rO!Sb%=;$tE"ZMjM`o3M/4"c?,NmZ2o\F@p.=ec:]%#a#`IMVUA5:.sEX[,H.mgo@1[3QCuQ`R#sWEOIKmY8%!EfabaJ&e7QHAU*Im&[^"nRF:./B[gQcB^#XU%AIYr@&h8^6hQ#IR8$d
%EhBhrhk5I$0b8]jRKg[m/EZO^2aoOAcW`S3K1)U<5on6Z6s0`m7jk/r%d?YTE`/!81brL$4rf[b=PVc?Md^gq`gVF''i+Kj4$c[0\?V#MHR)3_I#-SjiDhjAh"5=g0:VF(1@ikr_NXfTA0=Z=ZBlet`1+U*es(4]koU\MY>\>C:dq
%"TrZ.c(XgsY4]*/*p;,OfLFlTW%9F8:9E!#2Xn=:$.ZEYaDGg(RjHtSbC[aINU20OHIq;f[V=$(_IK']h\O&rVq,.@JOI])KboLj!V4hZ+oiN`Y
%Ts60_147-N`-XoE>snF>U-rbN+chW!Y"Q0J6nd-0oLtK."0H(Em_1\o3&K-ltnhE5eD[LMJa[af.8O/^Gr/uc/TV3'=Kqi/lq(u/I='cEp0ZQ2(3$S`J+Ro9:>TE`_dmgMCYQ$HkN0.CX/XG([1D1>Q$+>fW*u6X[8>sCT'WCrEjrFsgdCC/5/=9r
%m7=%qek4cZ\Z3>[`$:=[?W3M!p=4Jjg.7D"4NJ/2nheY4I9#`)V2M9]b9jMO0Oh,V$P@i;:a$=8?&<\lK&$c)$+I-O^$WRVCn[&Y
%n.$=dG4]-f"Au*0W]&Sq2MGNZ9Eiu2^YhL:C,*rdp!<,ZUdeL\5&,m3lT9UNpppOQNS+)hkNH4'p$L_5Gr.!eWp'N=RZ<%.D2Dq;
%f"\TR4au8YeDR+F_&ONjfe87dK\n.$FM)P>EAs4a9d6ZAfG\.YO[2/cPC1;_g*uB:F;=toA/2]04`)[O]]jo'N
%=_fh_er@:Rf"*Zh"dS;i>^+bf)!h.9=;+op=D@q#[5-7K:.Jlk-Y/s.Kbieg(h_UUOX'G?`%)Lfgc/hsbjcb-@UP%D(N'I2EkoKf
%M13@oNau06_'ODPP^d(m[UW,qhlZ
%Q+:KfN5bS^j+a94O&>-jecV4_7I!%?m+[E6+*Uu.,Z2)d/n2X>H\Q4\7JH7R:kk#g+tp"@Ht)[R*?uahhm(.sc33>`6E:Z7#!$u^
%8KVQ-oT$n(&Y_^E(?&DLk"0YZTCqg*i:&aU=tpi.5!5bdIprV?jLX-;/\B9%LW1(+mN)%2jmac%39Q0D/:ktn3j=[@0s`MTr\STdM/PZ^dj0-bP$_DsBTa;j2*eXrW;XbJjO`-TeG4J+BaSp+btkJ,VeS[+?"5>r=69fTOJpd%7Bma(6u@O?Hhsqm1tnQTHG,B\@6g\gh8kOR"XF
%r,`^QQ\>sfJcQtg^2]U$*C$=(i&$[nQ9i%4Ei3J=7LCkK#aa)d'k4C'gq:f9(aG-2LF\t-FZ
%U%96cc)Ib6366q\S9Vnnml0c^l[Ea:fOid6D=h6]h8&6Q2KqPV0A]gAOR;:mbMJ?/A9fda[nur=A\,O9mp&-M1KtDeX
%fG>&Un_)g+"cN?QeTZLPcC#B-hQTU0W$B&:o8>tcf*e62+Rf\
%5,jnQi%3!^[[>)on[!"MU`cZ,*ft>okXT2;2&8)l
%b<-(W_6C]Pg09$L=+9B^mq7[OaJB`>%tme_HHTr5fB8i`Vh&bh"ETHfS^P$rV\CFNNbB/c(#K^=IL:^k68<%k.\fhJNQX\2'gXP(-afec%A8iU2%$5=R`bU3+d"F#>MO*UM5g0iMn?#VV'
%KbSdR>>WdZ8;8$+l#dFNI'7HeD#7[70D0j.=FY)a9shB.gF&c]FF\DFA8aaOIH/4^Z8%<'QXB7r\MXp=jNPC3Y3,82j]>_\][S)h
%=tQ#JeP^+YAS!.o-`k0O.[:*+4m^R5Mi)GQ*jS5pKG*9(`&H*XgFrKh+E?kgKf3EPQ3(2XpplA
%0EIClCn5Lr<(>&e#?DNL_WjJ/E(`T5VuVq3qWmGbo?.6.UNbPCW<`(qdDoPL548!LnI9,IiLaAH)Z88IfOQeEi@OMtnrO(-?5X^$
%gfl'hXI'H0O%\S@`m!5uXR4?"D.akNpEJ;[-JA#@=o+Em4sjl\1c[+Om<=ID$H7-g8hWXs"IZ1pld;Dl$;jdVVC0YIE9+7%
%""Eb;?Ukc53=4M]qTWfL+KDWfhqo=R/C+CB>1&b/h'A6J5t
%+s^Fs(Pr-:n8bjn\i?YocC]o1;8lc;8^tg7N/h!J22*;D%9L^U]`WkF0Q'^ltm>_=DI:i#fu3Cce]DBXFtE:Mo"N`4t%]I^1&/@6s&VNMX8[`5tYcRjiH9))/Cs.W0=^5iES+,.d+t^k\X3Fcmr!TGZQU)
%cWl*N^<5K#O+(%%k_JI-1lFGM(=:S5bKTBaSc-5ZmP\k)oZ4GOafe^HWs]-7I:05LKmS(9Wp]/$JLg%>gTJAqlRqWB\?c+>m51)I
%Q*0t"eOQ@W[#H`fgsE=_RDUU7bUMFb=QnX6>RNrrS/0GA0grApCFF2!+lYh"=b,L`m(UUfGI=1C5DW`d4:R;4^
%#;u8oo)#'&l$lu^s,F,ZAk9sc.Bic:)>t)^>g2?Fb2g=.a'Y&*Et$keL]9,^s"])=B('*&mcVOeAsKiTT!c>rm%4L0EP2+IZ5YDD
%lSn/N46Gg8?@fL",nK?t\sp[J@<@uA"Lf,ZoZ@Wbq<+aQa^#Tc7Hn,/8]qXYTa/Jd'_P?QD2PKhbu@qR9?=DgBr'3X0TrM5Pd;-XVar*;qIs=;uMrDX<(Cj'<0:QFl?,gH(0i>n"YO4
%kfM[Ef@a6]Ya9\1:9T!67b1:.?KgBt/pD9)eDXXjq$s]!N7W&;FY:.]qsKb)XaTf!(\r'0B!(D`5$lWV)D:9QH;-n+Dt3@l%Nu1*
%6X^4>Vr$C.FqXWof6*0V\1!tE9I92FNfd(AP4(Tro@2\sK22sUX;ZJ\'2K'$2>G\ZI1?2CWe3sQTg1g8gmH(E2K+CV-?-`j
%M8q''[Sned`e>\L^XJs\W]nWZWWo#G3HJ'jV/**?/u_l-YL+l#QPWV.DrRj]`)fptJo_,\E9d%fZc)SG-\Ec8MT^@I@0WH9cf]L\oj
%+hd'tEti]hX0X790,J\Pe[t.)\[LLlCnse$@a9JrJ5U_KiuangR+XWMX$VWH29rIlZF\,\?fj;^dLS#%0RC'43.'-%R6,i*0B+^!
%YLbdYLX*Hs/8%nZCu:=13p*X$i&n"elrRf?2a5OkSqg:fKbaIMFNU+M\`ZmLRS\:`
%8>nXHV`[bOI-)%WUM5q4,fr`9B=X)dabqU#0"JpB<=%e>B@q9cr9-Oa8SR$q*:sa:i7Z$69ZS^/I,s)'hL9bm"r;4njNOom^QgS.
%&"*H@(jHb3p3^7'hD#c6AhGt4l,hF/YO4H@g+"?B4kt$f^%a@b_QX
%bT^3o7&5d?ZbnkO&4oWQ((Lp"imJWJm5nS1,Hdua2%7!.)&I%C9-YeO[N(Y&D`r0*l`0/[Y3(W=>qb%-\h)71Z[-Lr#.l?t5&tB(
%V/,EMa##CGVp*AQl?':9a@S!&VbYBf]_0j0&pF-(WMWM?:1':UG$6HH\^PAgAY$C?-0RLScObQUVaF4GIcAEte?aMDJ^of93SK5B
%j5^p@B;6H7HAE&!?V[g(d9@%VGqFer5MG5/mnA89"eAd=mV9C&COTF=q&AKMh`%_SC,7eJ0?X,Ljs:DH\tdTQK=k7/oPmm^2LGCi
%fu$H*f>=R1/kq=rDD4
%>l%j1$nfY).:Sk2ELDo.1Y\Y7inhqVFKq&aAgnD<7dTI&0\4EeWdHVc\Qk!7GgsoVW-"He-cVf3I?k[IM-e6S0B87spb#mlki'rA
%a7o#LSU5V)KmE/F(\_n"k*+4-s3:,bh;1*]??
%/Xj"6"#IlQ.:GJt@jMGDAr$ZL+Fg$DM]k6iiDD7a9[*$b>Yt7%a;nV)H,g0&*%Sr?cU?=ZB2aY52N45q8@,fLH(.b,RjDkc1CK;&
%$TWK,`TbOJ#k:pl%8W7Co^(P.V-$=_.P*;#bRVi`g9O:EtYu.O`LRb?`/_K\If?9]NhjMI"0L%C[bjt\I=lR[M
%X2FKa5O+P)iV]@=jX2*\V7"N(DZ$p?1s/XVJU55+rQGUcYLJ8+1M<$BbYla/.^9(%GM$*UEsmDFagEZ1rO';M0'De3bS"Gk_NG9D
%ZPK7d3C1r=c=\XfS#BmC]/;>(:I`b[,O)(foM^a2pDEZX=moV17cSOd!<,3_:'\R/N#XaJ;RnANo.[Hdq/*^T5\P[)=deQB(*W@fO$;LVANAu!4
%j3[GkO-"ijoEBsP`7TD$dH&+%$EJm.1$[Vq*.V8,9r$HBohA[dMg9BMJD=o^=bQ_'Oac;[qqfih0ai]!M)
%e0dHQM,/QZ;kNj7-U.40n3J;Fo@2^U>_>i'Osb%ePI9Jn>4Q2UH*Wb6K[u;6hl\?aXs`,092.SFG!5YE4o'7a^piVYioDQJj=0*L
%.H9D;ioDQJj:`b5SkL>(-BY5(#+qeUjYqG,Dt'"Ki$lE,QVW*#q$m>rZ[;ZK-2`uk^MhXumgo+jGd6qpqPh-r52D";D7OVriQ?m^
%)'g+ErQlp$8QZn?L(oQR**pUicu0mNcF1K!WgufO,EIe?I4:"*B"i+E>X:P+oD>L?oV"IWraLrVc1X