1/*
    2%  NomicMUD: A MUD server written in Prolog
    3%  Maintainer: Douglas Miles
    4%  Dec 13, 2035
    5%
    6%  Bits and pieces:
    7%
    8%    LogicMOO, Inform7, FROLOG, Guncho, PrologMUD and Marty's Prolog Adventure Prototype
    9% 
   10%  Copyright (C) 2004 Marty White under the GNU GPL 
   11%  Sept 20,1999 - Douglas Miles
   12%  July 10,1996 - John Eikenberry 
   13%
   14%  Logicmoo Project changes:
   15%
   16% Main file.
   17%
   18*/
   19
   20
   21each_live_agent(NewGoal, S0, S2) :-
   22 get_live_agents(List, S0),
   23 apply_all(List, NewGoal, S0, S2).
   24
   25each_sensing_agent(Sense, NewGoal, S0, S2) :-
   26 dmust((get_sensing_objects(Sense, List, S0),
   27      List\==[],
   28      %dbug(each_sensing_agent(Sense)=(List=NewGoal)),
   29 apply_all(List, NewGoal, S0, S2))).
   30
   31each_agent(Precond, NewGoal, S0, S2) :-
   32 get_some_agents(Precond, List, S0),
   33 apply_all(List, NewGoal, S0, S2).
   34
   35
   36
   37% -----------------------------------------------------------------------------
   38% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
   39%  CODE FILE SECTION
   40:- nop(ensure_loaded('adv_agent_model')).   41% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
   42
   43% Protocol:
   44%   Agent: request(Action, Action_Id)
   45%   Simulation: respond(Action_Id, LogicalResponse/Percept, EnglishResponse)
   46%   Action(Verb, ...)
   47%   failure(Reason)
   48%   moved(Spatial, obj, from, how, to)
   49
   50% -----------------------------------------------------------------------------
   51% The state of an Agent is stored in its memory.
   52% Agent memory is stored as a list in reverse chronological order, implicitly
   53%   ordering and timestamping everything.
   54% Types of memories:
   55%   inst(A)        - identity of agent (?)
   56%   timestamp(T)    - agent may add a new timestamp whenever a sequence point
   57%                     is desired.
   58%   [percept]       - received perceptions.
   59%   model(Spatial, [...])    - Agent's internal model of the Spatial world.
   60%                     Model is a collection of timestampped relations.
   61%   goals([...])    - states the agent would like to achieve, or
   62%                     acts the agent would like to be able to do.
   63%   plan(S, O, B, L)   - plans for achieving goals.
   64%   affect(...)     - Agent's current affect.
   65% Multiple plans, goals, models, affects, etc. may be stored, for introspection
   66%   about previous internal states.
   67
   68% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
   69%  CODE FILE SECTION
   70:- nop(ensure_loaded('adv_agent_goal')).   71% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
   72
   73add_goal(Goal, Mem0, Mem2) :-
   74  bugout('adding goal ~w~n', [Goal], planner),
   75  forget(goals(OldGoals), Mem0, Mem1),
   76  append([Goal], OldGoals, NewGoals),
   77  memorize(goals(NewGoals), Mem1, Mem2).
   78
   79add_goals(Goals, Mem0, Mem2) :-
   80  forget(goals(OldGoals), Mem0, Mem1),
   81  append(Goals, OldGoals, NewGoals),
   82  memorize(goals(NewGoals), Mem1, Mem2).
   83
   84add_todo(Auto, Mem0, Mem3) :- Auto = a,
   85  member(inst(Agent), Mem0),
   86  autonomous_decide_goal_action(Agent, Mem0, Mem3),!.
   87add_todo(Action, Mem0, Mem2) :-
   88  forget(todo(OldToDo), Mem0, Mem1),
   89  append(OldToDo, [Action], NewToDo),
   90  memorize(todo(NewToDo), Mem1, Mem2).
   91
   92add_todo_all([], Mem0, Mem0).
   93add_todo_all([Action|Rest], Mem0, Mem2) :-
   94  add_todo(Action, Mem0, Mem1),
   95  add_todo_all(Rest, Mem1, Mem2).
   96
   97
   98
   99% -----------------------------------------------------------------------------
  100
  101% do_introspect(Query, Answer, Memory)
  102do_introspect(path(Spatial, There), Answer, Memory) :-
  103  thought(inst(Agent), Memory),
  104  thought_model(Spatial,ModelData, Memory),
  105  in_model(h(Spatial, _How, Agent, Here, _T), ModelData),
  106  find_path(Spatial, Here, There, Route, ModelData),
  107  Answer = ['Model is', ModelData, '\nShortest path is', Route].
  108
  109do_introspect(whereis(Spatial, X), Answer, Memory) :-
  110  remember_whereis(Spatial, X, Answer, Memory).
  111do_introspect(whereis(Spatial, X), Answer, Memory) :-
  112  thought(inst(Agent), Memory),
  113  sensory_model(Sense, Spatial),
  114  Answer = [subj(Agent), person('don\'t', 'doesn\'t'),
  115            'recall ever ', ing(Sense), ' a "', X, '".'].
  116
  117do_introspect(whois(Spatial, X), Answer, Memory) :-
  118  remember_whereis(Spatial, X, Answer, Memory).
  119do_introspect(whois(_Spatial, X), [X, is, X, .], _Memory).
  120
  121do_introspect(whatis(Spatial, X), Answer, Memory) :-
  122  remember_whereis(Spatial, X, Answer, Memory).
  123do_introspect(whatis(_Spatial, X), [X, is, X, .], _Memory).
  124
  125
  126
  127remember_whereis(Spatial, Thing, Answer, Memory) :-
  128  thought(inst(Agent), Memory),
  129  thought_model(Spatial,ModelData, Memory),
  130  in_model(h(Spatial, How, Thing, Where, T), ModelData),
  131  How \= exit(_),
  132  Answer = ['At time', T, subj(Agent), 'saw the', Thing, How, the, Where, .].
  133remember_whereis(Spatial, Here, Answer, Memory) :-
  134  thought(inst(Agent), Memory),
  135  thought_model(Spatial,ModelData, Memory),
  136  in_model(h(Spatial, _How, Agent, Here, _T), ModelData),
  137  Answer = 'Right here.'.
  138remember_whereis(Spatial, There, Answer, Memory) :-
  139  thought(inst(Agent), Memory),
  140  thought_model(Spatial,ModelData, Memory),
  141  in_model(h(Spatial, _How, Agent, Here, _T), ModelData),
  142  find_path(Spatial, Here, There, Route, ModelData),
  143  Answer = ['To get to the', There, ', ', Route].
  144remember_whereis(Spatial, There, Answer, Memory) :-
  145  thought_model(Spatial,ModelData, Memory),
  146  ( in_model(h(Spatial, exit(_), _, There, _T), ModelData);
  147    in_model(h(Spatial, exit(_), There, _, _T), ModelData)),
  148  Answer = 'Can''t get there from here.'.
  149
  150
  151
  152console_decide_action(Agent, Mem0, Mem1):- 
  153  thought(timestamp(T0), Mem0),
  154  %dbug(read_pending_codes(In,Codes,Found,Missing)),
  155  repeat,
  156   notrace((ttyflush,
  157    player_format('[~p: ~p] ==> ', [T0, Agent]), ttyflush,
  158    agent_to_input(Agent,In),
  159    dmust(is_stream(In)),
  160    readtokens(In,[], Words0),
  161    read_pending_input(In,_,[]),
  162    (Words0==[]->Words=[wait];Words=Words0))),
  163    parse(Words, Action, Mem0),
  164    !,
  165  (Action =.. Words; player_format('~w~n', [Action])),
  166  add_todo(Action, Mem0, Mem1), ttyflush, !.
  167
  168
  169% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
  170%  CODE FILE SECTION
  171:- dbug(ensure_loaded('adv_agents')).  172% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
  173
  174% Telnet client
  175decide_action(Agent, Mem0, Mem1) :-
  176  notrace(thought(inherit(telnet,t), Mem0)),!,
  177  dmust(telnet_decide_action(Agent, Mem0, Mem1)).
  178
  179
  180% Stdin Client
  181decide_action(Agent, Mem0, Mem1) :-
  182  notrace(thought(inherit(console,t), Mem0)),!,
  183  current_input(In), % agent_to_input(Agent,In),
  184  (tracing->catch(sleep(3),_,(nortrace,notrace,break));true),
  185  wait_for_input([In,user_input],Found,0.1),
  186  % read_pending_codes(In,Codes,Missing), 
  187  (Found==[] -> (Mem0=Mem1) ; 
  188    (((notrace,console_decide_action(Agent, Mem0, Mem1))))).
  189
  190% Autonomous
  191decide_action(Agent, Mem0, Mem3) :-
  192  thought(inherit(autonomous,t), Mem0),
  193  maybe_autonomous_decide_goal_action(Agent, Mem0, Mem3).
  194
  195decide_action(_Agent, Mem, Mem) :-
  196  thought(inherit(memorize,t), Mem), !.  % recorders don't decide much.
  197decide_action(Agent, Mem0, Mem0) :-
  198  bugout('decide_action(~w) FAILED!~n', [Agent], general).
  199
  200
  201run_agent_pass_1(Agent, S0, S) :-
  202  with_agent_console(Agent,run_agent_pass_1_0(Agent, S0, S)).
  203run_agent_pass_1(Agent, S0, S0) :-
  204  bugout('run_agent_pass_1(~w) FAILED!~n', [Agent], general).
  205
  206run_agent_pass_2(Agent, S0, S) :-
  207  with_agent_console(Agent,run_agent_pass_2_0(Agent, S0, S)).
  208run_agent_pass_2(Agent, S0, S0) :-
  209  bugout('run_agent_pass_2(~w) FAILED!~n', [Agent], general).
  210
  211
  212
  213/*
  214with_agent_console(Agent,Goal):- 
  215   adv:console_info(Id,Alias,InStream,OutStream, Host, Peer, Agent),
  216   nop(adv:console_info(Id,Alias,InStream,OutStream, Host, Peer, Agent)),
  217   current_input(WasIn),
  218   InStream\==WasIn,!,
  219   setup_call_cleanup(set_input(InStream),with_agent_console(Agent,Goal),set_input(WasIn)).
  220*/
  221with_agent_console(Agent,Goal):- 
  222  setup_call_cleanup(
  223     asserta(adv:current_agent(Agent),E),
  224     Goal,erase(E)),!.
  225
  226run_agent_pass_1_0(Agent, S0, S) :-
  227  must_input_state(S0),
  228  %dmust((
  229  undeclare(memories(Agent, Mem0), S0, S1),
  230  undeclare(perceptq(Agent, PerceptQ), S1, S2),
  231  thought(timestamp(T0), Mem0),  
  232  (PerceptQ==[] -> (T1 is T0 + 0, Mem0 = Mem1) ;  (T1 is T0 + 1, memorize(timestamp(T1), Mem0, Mem1))), 
  233  process_percept_list(Agent, PerceptQ, T1, Mem1, Mem2),
  234  memorize_list(PerceptQ, Mem2, Mem3),
  235  decide_action(Agent, Mem3, Mem4),
  236  declare(memories(Agent, Mem4), S2, S3),
  237  declare(perceptq(Agent, []), S3, S4),
  238  % dbug(timestamp(Agent, T1)),
  239  apply_first_arg_state(Agent, do_todo(), S4, S),
  240  % pprint(S, general),
  241  
  242  notrace(must_output_state(S)),!.
  243  
  244run_agent_pass_2_0(_Agent, S0, S0):-!.
  245run_agent_pass_2_0(Agent, S0, S) :-
  246  must_input_state(S0),
  247  apply_first_arg_state(Agent, do_todo(), S0, S),
  248  notrace(must_output_state(S)),!.
  249  
  250% --------