summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--tools/perf/util/intel-pt-decoder/intel-pt-decoder.c11
-rw-r--r--tools/perf/util/intel-pt-decoder/intel-pt-decoder.h1
-rw-r--r--tools/perf/util/intel-pt.c16
3 files changed, 17 insertions, 11 deletions
diff --git a/tools/perf/util/intel-pt-decoder/intel-pt-decoder.c b/tools/perf/util/intel-pt-decoder/intel-pt-decoder.c
index 91cba0582736..ef29f6b25e60 100644
--- a/tools/perf/util/intel-pt-decoder/intel-pt-decoder.c
+++ b/tools/perf/util/intel-pt-decoder/intel-pt-decoder.c
@@ -2814,9 +2814,18 @@ const struct intel_pt_state *intel_pt_decode(struct intel_pt_decoder *decoder)
}
if (intel_pt_sample_time(decoder->pkt_state)) {
intel_pt_update_sample_time(decoder);
- if (decoder->sample_cyc)
+ if (decoder->sample_cyc) {
decoder->sample_tot_cyc_cnt = decoder->tot_cyc_cnt;
+ decoder->state.flags |= INTEL_PT_SAMPLE_IPC;
+ decoder->sample_cyc = false;
+ }
}
+ /*
+ * When using only TSC/MTC to compute cycles, IPC can be
+ * sampled as soon as the cycle count changes.
+ */
+ if (!decoder->have_cyc)
+ decoder->state.flags |= INTEL_PT_SAMPLE_IPC;
}
decoder->state.timestamp = decoder->sample_timestamp;
diff --git a/tools/perf/util/intel-pt-decoder/intel-pt-decoder.h b/tools/perf/util/intel-pt-decoder/intel-pt-decoder.h
index 8645fc265481..b52937b03c8c 100644
--- a/tools/perf/util/intel-pt-decoder/intel-pt-decoder.h
+++ b/tools/perf/util/intel-pt-decoder/intel-pt-decoder.h
@@ -17,6 +17,7 @@
#define INTEL_PT_ABORT_TX (1 << 1)
#define INTEL_PT_ASYNC (1 << 2)
#define INTEL_PT_FUP_IP (1 << 3)
+#define INTEL_PT_SAMPLE_IPC (1 << 4)
enum intel_pt_sample_type {
INTEL_PT_BRANCH = 1 << 0,
diff --git a/tools/perf/util/intel-pt.c b/tools/perf/util/intel-pt.c
index c9477d021687..c63a9080c1b1 100644
--- a/tools/perf/util/intel-pt.c
+++ b/tools/perf/util/intel-pt.c
@@ -1381,7 +1381,8 @@ static int intel_pt_synth_branch_sample(struct intel_pt_queue *ptq)
sample.branch_stack = (struct branch_stack *)&dummy_bs;
}
- sample.cyc_cnt = ptq->ipc_cyc_cnt - ptq->last_br_cyc_cnt;
+ if (ptq->state->flags & INTEL_PT_SAMPLE_IPC)
+ sample.cyc_cnt = ptq->ipc_cyc_cnt - ptq->last_br_cyc_cnt;
if (sample.cyc_cnt) {
sample.insn_cnt = ptq->ipc_insn_cnt - ptq->last_br_insn_cnt;
ptq->last_br_insn_cnt = ptq->ipc_insn_cnt;
@@ -1431,7 +1432,8 @@ static int intel_pt_synth_instruction_sample(struct intel_pt_queue *ptq)
else
sample.period = ptq->state->tot_insn_cnt - ptq->last_insn_cnt;
- sample.cyc_cnt = ptq->ipc_cyc_cnt - ptq->last_in_cyc_cnt;
+ if (ptq->state->flags & INTEL_PT_SAMPLE_IPC)
+ sample.cyc_cnt = ptq->ipc_cyc_cnt - ptq->last_in_cyc_cnt;
if (sample.cyc_cnt) {
sample.insn_cnt = ptq->ipc_insn_cnt - ptq->last_in_insn_cnt;
ptq->last_in_insn_cnt = ptq->ipc_insn_cnt;
@@ -1983,14 +1985,8 @@ static int intel_pt_sample(struct intel_pt_queue *ptq)
ptq->have_sample = false;
- if (ptq->state->tot_cyc_cnt > ptq->ipc_cyc_cnt) {
- /*
- * Cycle count and instruction count only go together to create
- * a valid IPC ratio when the cycle count changes.
- */
- ptq->ipc_insn_cnt = ptq->state->tot_insn_cnt;
- ptq->ipc_cyc_cnt = ptq->state->tot_cyc_cnt;
- }
+ ptq->ipc_insn_cnt = ptq->state->tot_insn_cnt;
+ ptq->ipc_cyc_cnt = ptq->state->tot_cyc_cnt;
/*
* Do PEBS first to allow for the possibility that the PEBS timestamp