Skip to content

Commit

Permalink
Merge pull request #45 from CLSFramework/fix-server-side
Browse files Browse the repository at this point in the history
Fix-server-side
  • Loading branch information
naderzare authored Nov 11, 2024
2 parents 78a5a91 + 581140e commit 131506b
Show file tree
Hide file tree
Showing 16 changed files with 104 additions and 151 deletions.
3 changes: 2 additions & 1 deletion src/grpc-client/grpc_client_player.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -788,14 +788,15 @@ bool GrpcClientPlayer::GetBestPlannerAction()
<< std::endl;
return false;
}
ActionChainHolder::instance().updateBestChain(best_action.index());

auto agent = M_agent;

#ifdef DEBUG_CLIENT_PLAYER
std::cout << "best action index:" << best_action.index() << std::endl;
#endif

if (Bhv_PlannedAction().execute(agent, best_action.index()))
if (Bhv_PlannedAction().execute(agent))
{
#ifdef DEBUG_CLIENT_PLAYER
std::cout << "PlannedAction" << std::endl;
Expand Down
11 changes: 0 additions & 11 deletions src/player/planner/actgen_cross.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -59,17 +59,6 @@ ActGen_Cross::generate( std::vector< ActionStatePair > * result,

const std::vector< CooperativeAction::Ptr > & courses = CrossGenerator::instance().courses( wm );

for (auto & course : courses)
{
if (path.empty())
{
course->setParentIndex(-1);
}
else
{
course->setParentIndex(( *( path.rbegin() ) ).action().uniqueIndex());
}
}

//
// add pass course candidates
Expand Down
8 changes: 0 additions & 8 deletions src/player/planner/actgen_direct_pass.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -305,14 +305,6 @@ ActGen_DirectPass::generate( std::vector< ActionStatePair > * result,
kick_step,
false,
"actgenDirect" ) );
if (path.empty())
{
action->setParentIndex(-1);
}
else
{
action->setParentIndex(( *( path.rbegin() ) ).action().uniqueIndex());
}
++s_action_count;
++generated_count;
action->setIndex( s_action_count );
Expand Down
11 changes: 0 additions & 11 deletions src/player/planner/actgen_self_pass.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -69,17 +69,6 @@ ActGen_SelfPass::generate( std::vector< ActionStatePair > * result,
const std::vector< CooperativeAction::Ptr > &
cont = SelfPassGenerator::instance().courses( wm );

for (auto & course : cont)
{
if (path.empty())
{
course->setParentIndex(-1);
}
else
{
course->setParentIndex(( *( path.rbegin() ) ).action().uniqueIndex());
}
}
//
// add dribble candidates
//
Expand Down
8 changes: 0 additions & 8 deletions src/player/planner/actgen_shoot.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -108,14 +108,6 @@ ActGen_Shoot::generate( std::vector< ActionStatePair > * result,
shoot_spend_time,
1,
"shoot" ) );
if (path.empty())
{
action->setParentIndex(-1);
}
else
{
action->setParentIndex(( *( path.rbegin() ) ).action().uniqueIndex());
}

result->push_back( ActionStatePair( action, result_state ) );
}
11 changes: 0 additions & 11 deletions src/player/planner/actgen_short_dribble.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -70,17 +70,6 @@ ActGen_ShortDribble::generate( std::vector< ActionStatePair > * result,
const std::vector< CooperativeAction::Ptr > &
cont = ShortDribbleGenerator::instance().courses( wm );

for (auto & course : cont)
{
if (path.empty())
{
course->setParentIndex(-1);
}
else
{
course->setParentIndex(( *( path.rbegin() ) ).action().uniqueIndex());
}
}
//
// add dribble candidates
//
Expand Down
8 changes: 0 additions & 8 deletions src/player/planner/actgen_simple_dribble.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -191,14 +191,6 @@ ActGen_SimpleDribble::generate( std::vector< ActionStatePair > * result,
holder_reach_step - 2,
"actgenDribble" ) );

if (path.empty())
{
action->setParentIndex(-1);
}
else
{
action->setParentIndex(( *( path.rbegin() ) ).action().uniqueIndex());
}
++s_action_count;
++generated_count;
action->setIndex( s_action_count );
Expand Down
11 changes: 0 additions & 11 deletions src/player/planner/actgen_strict_check_pass.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -62,17 +62,6 @@ ActGen_StrictCheckPass::generate( std::vector< ActionStatePair > * result,
M_generate_direct_pass,
M_generate_lead_pass,
M_generate_through_pass );
for (auto & course : courses)
{
if (path.empty())
{
course->setParentIndex(-1);
}
else
{
course->setParentIndex(( *( path.rbegin() ) ).action().uniqueIndex());
}
}
//
// add pass course candidates
//
Expand Down
69 changes: 57 additions & 12 deletions src/player/planner/action_chain_graph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -524,6 +524,7 @@ ActionChainGraph::calculateResultBestFirstSearch( const WorldModel & wm,

auto copy_action = std::make_shared<CooperativeAction>(it->action());
auto copy_state = std::make_shared<PredictState>(it->state());
copy_action->setParentIndex(parent_index);
auto new_action_state_pair = std::shared_ptr<ActionStatePair>(new ActionStatePair(copy_action, copy_state));
M_all_results[copy_action->uniqueIndex()] = std::make_pair(new_action_state_pair, ev);

Expand Down Expand Up @@ -779,6 +780,7 @@ ActionChainGraph::write_chain_log( const std::string & pre_log_message,
for ( size_t i = 0; i < path.size(); ++i )
{
const CooperativeAction & a = path[i].action();
int unique_index = a.uniqueIndex();
const PredictState * s0;
const PredictState * s1;

Expand All @@ -798,16 +800,16 @@ ActionChainGraph::write_chain_log( const std::string & pre_log_message,
case CooperativeAction::Hold:
{
dlog.addText( Logger::ACTION_CHAIN,
"__ %d: hold (%s) t=%d",
i, a.description(), s1->spendTime() );
"__ %d: u%d: hold (%s) t=%d",
i, unique_index, a.description(), s1->spendTime() );
break;
}

case CooperativeAction::Dribble:
{
dlog.addText( Logger::ACTION_CHAIN,
"__ %d: dribble (%s[%d]) t=%d unum=%d target=(%.2f %.2f)",
i, a.description(), a.index(), s1->spendTime(),
"__ %d: u%d: dribble (%s[%d]) t=%d unum=%d target=(%.2f %.2f)",
i, unique_index, a.description(), a.index(), s1->spendTime(),
s0->ballHolderUnum(),
a.targetPoint().x, a.targetPoint().y );
break;
Expand All @@ -816,8 +818,8 @@ ActionChainGraph::write_chain_log( const std::string & pre_log_message,
case CooperativeAction::Pass:
{
dlog.addText( Logger::ACTION_CHAIN,
"__ %d: pass (%s[%d]) t=%d from[%d](%.2f %.2f)-to[%d](%.2f %.2f)",
i, a.description(), a.index(), s1->spendTime(),
"__ %d: u%d: pass (%s[%d]) t=%d from[%d](%.2f %.2f)-to[%d](%.2f %.2f)",
i, unique_index, a.description(), a.index(), s1->spendTime(),
s0->ballHolderUnum(),
s0->ball().pos().x, s0->ball().pos().y,
s1->ballHolderUnum(),
Expand All @@ -828,8 +830,8 @@ ActionChainGraph::write_chain_log( const std::string & pre_log_message,
case CooperativeAction::Shoot:
{
dlog.addText( Logger::ACTION_CHAIN,
"__ %d: shoot (%s) t=%d unum=%d",
i, a.description(), s1->spendTime(),
"__ %d: u%d: shoot (%s) t=%d unum=%d",
i, unique_index, a.description(), s1->spendTime(),
s0->ballHolderUnum() );

break;
Expand All @@ -838,18 +840,61 @@ ActionChainGraph::write_chain_log( const std::string & pre_log_message,
case CooperativeAction::Move:
{
dlog.addText( Logger::ACTION_CHAIN,
"__ %d: move (%s)",
i, a.description(), s1->spendTime() );
"__ %d: u%d: move (%s)",
i, unique_index, a.description(), s1->spendTime() );
break;
}

default:
{
dlog.addText( Logger::ACTION_CHAIN,
"__ %d: ???? (%s)",
i, a.description(), s1->spendTime() );
"__ %d: u%d: ???? (%s)",
i, unique_index, a.description(), s1->spendTime() );
break;
}
}
}
}

/*-------------------------------------------------------------------*/
/*!
*/
void
ActionChainGraph::updateBestChain(int unique_index)
{
std::cout<<"updateBestChain"<<std::endl;
M_result.clear();
M_best_evaluation = -std::numeric_limits< double >::max();

dlog.addText( Logger::ACTION_CHAIN,
"updateBestChain: unique_index=%d", unique_index );

std::cout<<"updateBestChain: unique_index="<<unique_index<<std::endl;
while (unique_index != -1){
if (M_all_results.find(unique_index) == M_all_results.end())
{
std::cout<<"updateBestChain: not found"<<std::endl;
return;
}
auto result = M_all_results.at(unique_index);
auto action_state_pair = result.first;
auto eval = result.second;
if (M_best_evaluation == -std::numeric_limits< double >::max())
{
M_best_evaluation = eval;
}
// push action state pair to front of the vector M_result
std::cout<<"updateBestChain: "<<unique_index<<" "<<action_state_pair->action().description()<<" parrentIndex="<<action_state_pair->action().parentIndex()<<std::endl;
M_result.insert(M_result.begin(), *action_state_pair);
unique_index = action_state_pair->action().parentIndex();
}

for (size_t i = 0; i < M_result.size(); ++i)
{
dlog.addText( Logger::ACTION_CHAIN,
"updateBestChain: %d: %s",
i, M_result[i].action().description() );
std::cout<<"updateBestChain: "<<i<<": "<<M_result[i].action().description()<<std::endl;
}
}
2 changes: 2 additions & 0 deletions src/player/planner/action_chain_graph.h
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,8 @@ class ActionChainGraph {
return M_result;
};

void updateBestChain(int unique_index);

const CooperativeAction & getFirstAction() const
{
return (*(M_result.begin())).action();
Expand Down
9 changes: 9 additions & 0 deletions src/player/planner/action_chain_holder.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -146,3 +146,12 @@ ActionChainHolder::graph() const
{
return *M_graph;
}

/*-------------------------------------------------------------------*/
/*!
*/
void ActionChainHolder::updateBestChain(int unique_index)
{
M_graph->updateBestChain(unique_index);
}
2 changes: 2 additions & 0 deletions src/player/planner/action_chain_holder.h
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,8 @@ class ActionChainHolder {

void update( const rcsc::WorldModel & wm );

void updateBestChain(int unique_index);

const ActionChainGraph & graph() const;
};

Expand Down
56 changes: 28 additions & 28 deletions src/player/planner/bhv_normal_dribble.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -197,34 +197,34 @@ IntentionNormalDribble::execute( PlayerAgent * agent )
// compare the current queue with other chain action candidates
//

if ( wm.self().isKickable()
&& M_turn_step <= 0 )
{
CooperativeAction::Ptr current_action( new Dribble( wm.self().unum(),
M_target_point,
wm.ball().vel().r(),
0,
M_turn_step,
M_dash_step,
"queuedDribble" ) );
current_action->setIndex( 0 );
current_action->setFirstDashPower( ServerParam::i().maxDashPower() );

ShortDribbleGenerator::instance().setQueuedAction( wm, current_action );

ActionChainHolder::instance().update( wm );
const ActionChainGraph & search_result = ActionChainHolder::i().graph();
const CooperativeAction & first_action = search_result.getFirstAction();

if ( first_action.category() != CooperativeAction::Dribble
|| ! first_action.targetPoint().equals( current_action->targetPoint() ) )
{
agent->debugClient().addMessage( "CancelDribbleQ" );
dlog.addText( Logger::DRIBBLE,
__FILE__": (intention:execute) cancel. select other action." );
return false;
}
}
// if ( wm.self().isKickable()
// && M_turn_step <= 0 )
// {
// CooperativeAction::Ptr current_action( new Dribble( wm.self().unum(),
// M_target_point,
// wm.ball().vel().r(),
// 0,
// M_turn_step,
// M_dash_step,
// "queuedDribble" ) );
// current_action->setIndex( 0 );
// current_action->setFirstDashPower( ServerParam::i().maxDashPower() );

// ShortDribbleGenerator::instance().setQueuedAction( wm, current_action );

// ActionChainHolder::instance().update( wm );
// const ActionChainGraph & search_result = ActionChainHolder::i().graph();
// const CooperativeAction & first_action = search_result.getFirstAction();

// if ( first_action.category() != CooperativeAction::Dribble
// || ! first_action.targetPoint().equals( current_action->targetPoint() ) )
// {
// agent->debugClient().addMessage( "CancelDribbleQ" );
// dlog.addText( Logger::DRIBBLE,
// __FILE__": (intention:execute) cancel. select other action." );
// return false;
// }
// }

//
//
Expand Down
Loading

0 comments on commit 131506b

Please sign in to comment.