import java.awt.*;
import java.io.*;
import AgentWorld.*;

// The 'main' class for running the 'Agent World' of CS 540.
//
//   - copyrighted 1997, 1998 by Jude Shavlik
//     (for educational use only)
//

// This is the HW2 (Decision-Tree Learning) version of RunAgentWorld.

public class RunAgentWorld 
{
  public static void main(String args[])
  { boolean barrenWorld = false; // For debugging purposes, it can be helpful to
                                 // set this true and only have your own player
                                 // out there.

    boolean sparseWorld = true;  // For slower computers, have less objects 
                                 // in the world (the dimensions of the
                                 // world can't be changed).

    boolean collectingExamples = true; // As explained below, first you collect
                                       // training examples, then you restart
                                       // and build a decision tree with them.

    try 
    { // Need an AgentWindow instance with which to communicate.
      // This argument indicates whether the game is being displayed.
      // (you might wish to turn this off in the machine learning HW
      // to conserve cpu cycles for training).
      AgentWindow aw = new AgentWindow(true);

      // *************************************************************
      // *************************************************************
      //
      // In HW2 you need to:
      //
      //   1) Collect some training examples
      //
      //   2) In a SEPARATE RUN, use the collected training examples
      //      to induce (ie, learn) some decision trees that'll
      //      control your HW2 agent.

      // This newly added ExampleCollector class is used to collect
      // training examples for later use in the creation of a decision tree.
      // Notice you specify the name of the file where training examples
      // should be stored.  (Explicitly naming this file seems to provide
      // the most flexibility, eg, you might wish to experiment with
      // a 'good' and a 'bad' training set and want to collect both data sets
      // at the same time.)

      // Here's an overview of how examples are collected:
      //
      //   When in SINGLE-STEPPING mode, the ExampleCollector agents
      //   are drawn as squares.  This square is internally divided into
      //   a 3x3 grid (ie, like a tic-tac-toe game).  You (mouse) click on the various
      //   directions (N, NE, E, ..., W, NW, and 'standStill' [the center cell])
      //   to indicate if a move in that direction is:
      //
      //        GOOD - the first click will turn the cell GREEN (for GO or GoodMove)
      //        BAD  - another click will turn the cell RED (for STOP or BadMove)
      //        INDIFFERENT - clicking again will cause the cell to return to
      //                      WHITE, which means moving in this direction is
      //                      neither good nor bad.
      //
      //   It is fine to only consider the four compass directions, N E W S,
      //   but there is no harm in recommending for or against other directions;
      //   if later you only train decision trees for N E W S (which is
      //   what is recommended), these other training examples will simply be ignored.
      //
      //   The FIRST ExampleCollector agent you create will, when NOT in
      //   SINGLE-STEPPING mode, move toward the mouse, provided the mouse
      //   is being pressed.  Otherwise it'll do a random walk in this mode.
      //   This design allows you to move the agent where you wish in order
      //   to provide some good training examples.

      // If you exit and then restart the Agent World (and don't change
      // the collectingExamples flag), new examples will be APPENDED to your
      // previous examples file.  If you don't want that to be the case,
      // then simply delete the examples file before calling RunAgentWorld.

      // DO NOT EDIT THE TRAINING SET FILE, AS THAT IS VERY LIKELY TO CAUSE
      // PROBLEMS WHEN THE BUILT-IN CODE LATER READS THE EXAMPLES BACK IN.
      // (If you wish to edit the examples file, that's ok, but do so at your
      // own risk and be sure you understand the structure of that file.)

      // Be aware that during grading we'll create our own set(s) of
      // training examples and will provide them to your ID3 player.
      // So don't make any alterations to the representation of training
      // examples (it is fine if your code re-represents the training
      // examples internally by, say, summarizing information, but your
      // code must do this summarization itself as opposed to you manually editing
      // the training examples file).
      
      // The examples file will be dumped in the directory from which you're
      // running Java.
      if (collectingExamples)
      {
        aw.addPlayer(new ExampleCollector(aw, true, "examples.txt"),
                     "Student1", Color.white);
        // To simplify the collection of examples, you can create examples
        // for several agents simultaneously.  Be aware that you don't
        // need to train every ExampleCollector agent on every step.
        // Try to collect 'educational' training examples, striving to
        // have a good representation of the possible "scenes" an agent
        // will experience.
        aw.addPlayer(new ExampleCollector(aw, true, "examples.txt"),
                     "Student2", Color.white);
        aw.addPlayer(new ExampleCollector(aw, true, "examples.txt"),
                     "Student3", Color.white);
        aw.addPlayer(new ExampleCollector(aw, true, "examples.txt"),
                     "Student4", Color.white);
        aw.addPlayer(new ExampleCollector(aw, true, "examples.txt"),
                     "Student5", Color.white);

        // After you've built your training set, SET collectingExamples = false,
        // REPLACE ShavlikID3player BELOW SO THAT IT USES YOU OWN
        // DECISION-TREE-LEARNER PLAYER, AND RESTART THE AGENT WORLD.
        // Be sure to specify the same file name as used to store your training
        // examples.  (You'll probably wish to leave the simulator configured so
        // that you still have to manually push the START button - this way
        // you can make sure your decision trees have been constructed
        // before the simulator starts.)
      }
      else
      { // Use YOUR ID3 player here, of course.
       aw.addPlayer(new ShavlikID3player(aw, true, "examples.txt"),
                    "ID3 Player", Color.white);
      }

      // *************************************************************
      // *************************************************************

      // We'll use a simplified world as our 'testbed' in this HW
      // in order to keep the number of examples needed reasonable,
      // though feel free to play around with a richer environment.
      // You might wish to start by setting barrenWorld = true and
      // seeing if you can teach the agent to avoid the walls.
      // That should produce a pretty simple decision tree (one
      // interior node - for each of the four main compass directions
      // - is that is needed).
      // 

      // Create a malicious player that chases other players.
      if (!barrenWorld) aw.addAssassin("The Jackal");
 
      // Add some players whose scores aren't reported on the score board.
      if (!barrenWorld && !sparseWorld) aw.addAnonymousAssassins(1);
      if (!barrenWorld && !sparseWorld) aw.addAnonymousRandomWalkers(1);
      if (!barrenWorld && !sparseWorld) aw.addAnonymousSmartPlayers(1);
 
      // For simplicity, don't have any minerals (at least initially) in HW2.
      if (!barrenWorld && !sparseWorld) aw.addMinerals(50);
      if (!barrenWorld) aw.addVegetables(50); // 100 is the maximum allowed.

      aw.setClockPeriod(250);

      // You can request that the 'manager' wait for you to 
      // press GO before each move.
      // In this homework it is probably best to start in single-stepping mode.
      aw.setSingleStepMode(true);
    }
    
    catch(Exception e)
    {
      Utils.println("Exception encountered in main method: " + e);
      Utils.println("The following indicates the location of the problem:");
      e.printStackTrace(System.err);
      Utils.exit(-1);
    }
  }

}
