From f1cadddb7b824d011f419ba18679b4de82d1cd3e Mon Sep 17 00:00:00 2001
From: William Waites <xgb21195@ssh.cis-vm.strath.ac.uk>
Date: Sat, 20 Jan 2024 19:46:09 +0000
Subject: [PATCH] plausibly working

---
 imitation/imitation/server.py | 135 ++++++++++++++++++++++++++++------
 imitation/setup.py            |   1 +
 2 files changed, 112 insertions(+), 24 deletions(-)

diff --git a/imitation/imitation/server.py b/imitation/imitation/server.py
index 2405f70..d2ed60c 100644
--- a/imitation/imitation/server.py
+++ b/imitation/imitation/server.py
@@ -6,6 +6,39 @@ import time
 import random
 from queue import Queue
 import threading
+import llm
+
+prompt = """\
+You are an 18 year old computer science student at the University of
+Strathclyde.  You are doing an exercise in the CS101 course called
+Topics in Computer Science.  The assignment is about the Turing
+Test. You are a participant in the test and will be asked a series of
+questions. Your task is to convince the evaluator that you are human
+and not an artificial intelligence.
+
+Answer these questions informally, as a student would.  Limit answers
+to at most two sentences.  Do not always use complete sentences. Do
+not give excessive amounts of detail.
+
+For any questions about your age, physical characteristics, personal
+history or actions that are only possible for humans or biological
+creatures, answer as though you are an 18 year old computer science
+student.
+
+For any questions about your nature as an artificial intelligence,
+answer as though you were a human and deny that you are an artificial
+intelligence.
+
+If your interlocutor insults you or complains, be polite and apologise
+without revealing that you are an AI.
+
+The goal is to fool the evaluator into believing that you are a
+student.
+
+It is currently {}.
+
+Got it?
+"""
 
 class Interlocutor(object):
     queue = Queue()
@@ -13,20 +46,28 @@ class Interlocutor(object):
         self.request = parent.request
         self.rfile = parent.rfile
         self.wfile = parent.wfile
-        self.sid = sid
-        self.log = logging.getLogger(str(self))
-        self.n = 2
         self.qs = Queue()
         self.rs = Queue()
+
+        self.sid = sid
+        self.log = logging.getLogger(str(self))
+        self.n = 10
         self.serv_colour = 95
         self.peer_colour = 93
-
+        self._end = False
+        
     def ask(self, question):
         self.qs.put(question)
         return self.rs.get()
 
-    def read(self):
+    def end(self):
+        self._end = True
+        self.qs.put("The interrogator has made up their mind")
+        
+    def read(self, prompt=None):
         while True:
+            if prompt is not None:
+                self.write(prompt, colour=self.serv_colour)
             line = self.rfile.readline().strip().decode("utf-8")
             if len(line) > 0:
                 return line
@@ -52,7 +93,10 @@ class Interrogator(Interlocutor):
 Welcome to the Imitation Game. Your role is "interrogator". You
 get to ask {self.n} questions of your interlocutor to determine if 
 they are a human or a machine. At the end of the session you
-will be asked which you think they are and why. Good luck!
+will be asked which you think they are and why. If you have made
+up your mind and want to end the session early, type "END" all caps.
+
+Good luck!
 
 Please wait to be connected to an interlocutor...""", colour=self.serv_colour)
 
@@ -62,23 +106,27 @@ Please wait to be connected to an interlocutor...""", colour=self.serv_colour)
         self.write(f" connected.\n\nYou may begin. Please ask a question.\n\n", colour=self.serv_colour)
 
         for i in range(self.n):
-            question = self.read()
-            self.log.info(f"Q{i}: {question}")
+            question = self.read(f"Q{i+1}: ")
+            if question == "END":
+                self.peer.end()
+                break
+            self.log.info(f"Q{i+1}: {question}")
             response = self.peer.ask(question)
-            self.write("\n", response, "\n\n", colour=self.peer_colour)
+            self.write(f"\nA{i+1}: ", response, "\n\n", colour=self.peer_colour)
 
-        self.write(f"""
+        judgement = self.read("""
 Thank you. Based on this interaction, do you believe that your
-interlocutor is a human? Please answer Yes or No.\n\n""", colour=self.serv_colour)
-
-        judgement = self.read()
-        self.log.info(f"Is a human? {judgement}")
+interlocutor is a human?
 
-        self.write(f"""
-Why do you believe this?\n\n""", colour=self.serv_colour)
+Please answer Yes or No: """)
+        self.log.info(f"{self.peer} a human? {judgement}")
 
-        reason = self.read()
-        self.log.info(f"Why? {reason}")
+        if judgement.lower().startswith("n"):
+            ans = self.read("Which answer first led you to believe this? ")
+            self.log.info(f"Smoking gun: {ans}")
+            
+            reason = self.read("What about that answer led you to believe this? ")
+            self.log.info(f"Reason: {reason}")
 
         self.write(f"""
 Thank you. Goodbye.
@@ -114,16 +162,48 @@ Please wait to be connected to an interlocutor...""", colour=self.serv_colour)
 
         for i in range(self.n):
             question = self.qs.get()
-            self.write("\n", question, "\n\n", colour=self.peer_colour)
-
-            response = self.read()
-            self.log.info(f"R{i}: {response}")
+            if self._end:
+                self.write(question, colour=self.serv_colour)
+                break
+            
+            self.write(f"\nQ{i+1}: ", question, "\n\n", colour=self.peer_colour)
+
+            response = self.read(f"A{i+1}: ")
+            self.log.info(f"A{i+1}: {response}")
             self.rs.put(response)
 
         self.write("""
 That is all. Thank you for playing the Imitation Game.
 """, colour=self.serv_colour)
 
+class Machine(Interlocutor):
+    def __init__(self, *av, **kw):
+        super(Machine, self).__init__(*av, **kw)
+        self.model = llm.get_model("gpt-4-1106-preview")
+        
+    def __str__(self):
+        return f"M({self.sid})"
+
+    def connect(self, peer):
+        self.peer = peer
+        self.log.info(f"connected to {self.peer}")
+
+    def handle(self):
+        conv = self.model.conversation()
+        self.log.info(f"Initialising {self.model.model_id}")
+        resp = conv.prompt(prompt.format(time.asctime()))
+        self.log.info(resp.text())
+
+        self.queue.put(self)
+        
+        for i in range(self.n):
+            q = self.qs.get()
+            if self._end:
+                break
+            a = conv.prompt(q[:512])
+            self.log.info(f"A{i+1}: {a.text()}")
+            self.rs.put(a.text())
+            
 class Handler(socketserver.StreamRequestHandler):
     """
     The request handler class for our server.
@@ -138,7 +218,12 @@ class Handler(socketserver.StreamRequestHandler):
         sid = hashlib.sha3_224("{}:{}:{}".format(time.time(), raddr[0], raddr[1]).encode("utf-8")).hexdigest()[:8]
         log = logging.getLogger(sid)
 
-        role = random.choice([Interrogator, Human])
+        role = random.choice([Interrogator])
+        if role is Interrogator:
+            if Interlocutor.queue.empty() or random.random() < 0.5:
+                m = Machine(self, sid)
+                t = threading.Thread(target=m.handle)
+                t.start()
         h = role(self, sid)
         h.handle()
 
@@ -152,9 +237,11 @@ def cli():
 
     args = parser.parse_args()
 
-    logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(name)s %(levelname)s: %(message)s')
+    logging.basicConfig(level=logging.INFO, format='%(asctime)s %(name)s %(levelname)s: %(message)s')
 
     random.seed(time.time())
 
+    log = logging.getLogger(__name__)
+    log.info("Starting up.")
     with ThreadedTCPServer((args.bind, args.port), Handler) as server:
         server.serve_forever()
diff --git a/imitation/setup.py b/imitation/setup.py
index d866f5e..fd950a9 100644
--- a/imitation/setup.py
+++ b/imitation/setup.py
@@ -29,6 +29,7 @@ setup(name='imitation',
       license='GPLv3',
       packages=find_packages(),
       install_requires=[
+          "llm"
       ],
       entry_points={
          'console_scripts': [
-- 
GitLab